waxstat_spider.py 5.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173
  1. # -*- coding: utf-8 -*-
  2. # Author : Charley
  3. # Python : 3.10.8
  4. # Date : 2025/3/10 14:01
  5. import time
  6. import inspect
  7. import requests
  8. import schedule
  9. import user_agent
  10. from loguru import logger
  11. from mysq_pool import MySQLConnectionPool
  12. from tenacity import retry, stop_after_attempt, wait_fixed
  13. url = 'https://www.waxstat.com/waxtracker/search-suggestions'
  14. headers = {
  15. "accept": "application/json, text/javascript, */*; q=0.01",
  16. "user-agent": user_agent.generate_user_agent()
  17. }
  18. logger.remove()
  19. logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
  20. format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
  21. level="DEBUG", retention="7 day")
  22. def after_log(retry_state):
  23. """
  24. retry 回调
  25. :param retry_state: RetryCallState 对象
  26. """
  27. # 检查 args 是否存在且不为空
  28. if retry_state.args and len(retry_state.args) > 0:
  29. log = retry_state.args[0] # 获取传入的 logger
  30. else:
  31. log = logger # 使用全局 logger
  32. if retry_state.outcome.failed:
  33. log.warning(
  34. f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
  35. else:
  36. log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
  37. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  38. def get_proxys(log):
  39. """
  40. 获取代理
  41. :return: 代理
  42. """
  43. tunnel = "x371.kdltps.com:15818"
  44. kdl_username = "t13753103189895"
  45. kdl_password = "o0yefv6z"
  46. try:
  47. proxies = {
  48. "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
  49. "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
  50. }
  51. return proxies
  52. except Exception as e:
  53. log.error(f"Error getting proxy: {e}")
  54. raise e
  55. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  56. def get_proxys_(log):
  57. # 已购买账户 北美
  58. # http_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36927"
  59. # https_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36927"
  60. http_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36928"
  61. https_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36928"
  62. # url = "https://ifconfig.me"
  63. try:
  64. proxySettings = {
  65. "http": http_proxy,
  66. "https": https_proxy,
  67. }
  68. return proxySettings
  69. except Exception as e:
  70. log.error(f"Error getting proxy: {e}")
  71. raise e
  72. def save_data(sql_pool, info):
  73. """
  74. 保存数据
  75. :param sql_pool: MySQL连接池对象
  76. :param info: 要保存的数据
  77. """
  78. sql = "INSERT INTO waxstat_lowest_price_record (bo_id, title, lowest_price_day_ago, slug) VALUES (%s, %s, %s, %s)"
  79. sql_pool.insert_one(sql, info)
  80. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  81. def get_response(log, keywords, sql_pool):
  82. sql_id = keywords[0]
  83. keyword = keywords[1]
  84. params = {
  85. # "q": "2024 Panini Prizm Baseball Hobby 12 Box Case",
  86. "q": keyword,
  87. "page": "0",
  88. "_": f"{int(time.time() * 1000) - 555}"
  89. }
  90. log.debug(f"Requesting URL: {keyword}")
  91. with requests.Session() as session:
  92. response = session.get(url, headers=headers, params=params, timeout=10, proxies=get_proxys_(log))
  93. # response = session.get(url, headers=headers, params=params, timeout=10)
  94. # print(response.text)
  95. response.raise_for_status()
  96. resp_json = response.json()
  97. if resp_json:
  98. # if resp_json.get('succeed') == 'true':
  99. lowest_price_day_ago = resp_json.get('data', [])[0].get('price_data', {}).get('lowest_price_day_ago')
  100. slug = resp_json.get('data', [])[0].get('slug')
  101. info = (sql_id, keyword, lowest_price_day_ago, slug)
  102. save_data(sql_pool, info)
  103. sql_pool.update_one("UPDATE blowout_record_new_daily_new SET low_state=1 WHERE id=%s", (sql_id,))
  104. # else:
  105. # log.debug(f"succeed is {resp_json.get('succeed')}")
  106. else:
  107. log.debug('resp_json not.........')
  108. @retry(stop=stop_after_attempt(50), wait=wait_fixed(1800), after=after_log)
  109. def waxstat_main(log):
  110. """
  111. 主函数
  112. :param log: logger对象
  113. """
  114. log.info(
  115. f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
  116. # 配置 MySQL 连接池
  117. sql_pool = MySQLConnectionPool(log=log)
  118. if not sql_pool:
  119. log.error("MySQL数据库连接失败")
  120. raise Exception("MySQL数据库连接失败")
  121. try:
  122. keyword_list = sql_pool.select_all("SELECT id, title FROM blowout_record_new_daily_new WHERE low_state=0")
  123. if not keyword_list:
  124. log.info("没有需要处理的关键词, 等待下一个采集任务.........")
  125. return
  126. for keywords in keyword_list:
  127. try:
  128. get_response(log, keywords, sql_pool)
  129. except Exception as e:
  130. log.error(f"Loop keyword:{keywords} 获取失败, error:{e}")
  131. except Exception as e:
  132. log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
  133. finally:
  134. log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
  135. def schedule_task():
  136. """
  137. 设置定时任务
  138. """
  139. # waxstat_main(log=logger)
  140. schedule.every().day.at("05:00").do(waxstat_main, log=logger)
  141. while True:
  142. schedule.run_pending()
  143. time.sleep(1)
  144. if __name__ == '__main__':
  145. schedule_task()