|
|
@@ -0,0 +1,173 @@
|
|
|
+# -*- coding: utf-8 -*-
|
|
|
+# Author : Charley
|
|
|
+# Python : 3.10.8
|
|
|
+# Date : 2025/3/10 14:01
|
|
|
+import time
|
|
|
+import inspect
|
|
|
+import requests
|
|
|
+import schedule
|
|
|
+import user_agent
|
|
|
+from loguru import logger
|
|
|
+from mysq_pool import MySQLConnectionPool
|
|
|
+from tenacity import retry, stop_after_attempt, wait_fixed
|
|
|
+
|
|
|
+url = 'https://www.waxstat.com/waxtracker/search-suggestions'
|
|
|
+
|
|
|
+headers = {
|
|
|
+ "accept": "application/json, text/javascript, */*; q=0.01",
|
|
|
+ "user-agent": user_agent.generate_user_agent()
|
|
|
+}
|
|
|
+
|
|
|
+logger.remove()
|
|
|
+logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
|
|
|
+ format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
|
|
|
+ level="DEBUG", retention="7 day")
|
|
|
+
|
|
|
+
|
|
|
+def after_log(retry_state):
|
|
|
+ """
|
|
|
+ retry 回调
|
|
|
+ :param retry_state: RetryCallState 对象
|
|
|
+ """
|
|
|
+ # 检查 args 是否存在且不为空
|
|
|
+ if retry_state.args and len(retry_state.args) > 0:
|
|
|
+ log = retry_state.args[0] # 获取传入的 logger
|
|
|
+ else:
|
|
|
+ log = logger # 使用全局 logger
|
|
|
+
|
|
|
+ if retry_state.outcome.failed:
|
|
|
+ log.warning(
|
|
|
+ f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
|
|
|
+ else:
|
|
|
+ log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
|
|
|
+
|
|
|
+
|
|
|
+@retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
|
|
|
+def get_proxys(log):
|
|
|
+ """
|
|
|
+ 获取代理
|
|
|
+ :return: 代理
|
|
|
+ """
|
|
|
+ tunnel = "x371.kdltps.com:15818"
|
|
|
+ kdl_username = "t13753103189895"
|
|
|
+ kdl_password = "o0yefv6z"
|
|
|
+ try:
|
|
|
+ proxies = {
|
|
|
+ "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
|
|
|
+ "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
|
|
|
+ }
|
|
|
+ return proxies
|
|
|
+ except Exception as e:
|
|
|
+ log.error(f"Error getting proxy: {e}")
|
|
|
+ raise e
|
|
|
+
|
|
|
+
|
|
|
+@retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
|
|
|
+def get_proxys_(log):
|
|
|
+ # 已购买账户 北美
|
|
|
+ # http_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36927"
|
|
|
+ # https_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36927"
|
|
|
+ http_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36928"
|
|
|
+ https_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36928"
|
|
|
+
|
|
|
+ # url = "https://ifconfig.me"
|
|
|
+ try:
|
|
|
+ proxySettings = {
|
|
|
+ "http": http_proxy,
|
|
|
+ "https": https_proxy,
|
|
|
+ }
|
|
|
+ return proxySettings
|
|
|
+ except Exception as e:
|
|
|
+ log.error(f"Error getting proxy: {e}")
|
|
|
+ raise e
|
|
|
+
|
|
|
+
|
|
|
+def save_data(sql_pool, info):
|
|
|
+ """
|
|
|
+ 保存数据
|
|
|
+ :param sql_pool: MySQL连接池对象
|
|
|
+ :param info: 要保存的数据
|
|
|
+ """
|
|
|
+ sql = "INSERT INTO waxstat_lowest_price_record (bo_id, title, lowest_price_day_ago, slug) VALUES (%s, %s, %s, %s)"
|
|
|
+ sql_pool.insert_one(sql, info)
|
|
|
+
|
|
|
+
|
|
|
+@retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
|
|
|
+def get_response(log, keywords, sql_pool):
|
|
|
+ sql_id = keywords[0]
|
|
|
+ keyword = keywords[1]
|
|
|
+ params = {
|
|
|
+ # "q": "2024 Panini Prizm Baseball Hobby 12 Box Case",
|
|
|
+ "q": keyword,
|
|
|
+ "page": "0",
|
|
|
+ "_": f"{int(time.time() * 1000) - 555}"
|
|
|
+ }
|
|
|
+ log.debug(f"Requesting URL: {keyword}")
|
|
|
+ with requests.Session() as session:
|
|
|
+ response = session.get(url, headers=headers, params=params, timeout=10, proxies=get_proxys_(log))
|
|
|
+ # response = session.get(url, headers=headers, params=params, timeout=10)
|
|
|
+
|
|
|
+ # print(response.text)
|
|
|
+ response.raise_for_status()
|
|
|
+ resp_json = response.json()
|
|
|
+ if resp_json:
|
|
|
+ # if resp_json.get('succeed') == 'true':
|
|
|
+ lowest_price_day_ago = resp_json.get('data', [])[0].get('price_data', {}).get('lowest_price_day_ago')
|
|
|
+ slug = resp_json.get('data', [])[0].get('slug')
|
|
|
+ info = (sql_id, keyword, lowest_price_day_ago, slug)
|
|
|
+ save_data(sql_pool, info)
|
|
|
+ sql_pool.update_one("UPDATE blowout_record_new_daily_new SET low_state=1 WHERE id=%s", (sql_id,))
|
|
|
+
|
|
|
+ # else:
|
|
|
+ # log.debug(f"succeed is {resp_json.get('succeed')}")
|
|
|
+ else:
|
|
|
+ log.debug('resp_json not.........')
|
|
|
+
|
|
|
+
|
|
|
+@retry(stop=stop_after_attempt(50), wait=wait_fixed(1800), after=after_log)
|
|
|
+def waxstat_main(log):
|
|
|
+ """
|
|
|
+ 主函数
|
|
|
+ :param log: logger对象
|
|
|
+ """
|
|
|
+ log.info(
|
|
|
+ f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
|
|
|
+
|
|
|
+ # 配置 MySQL 连接池
|
|
|
+ sql_pool = MySQLConnectionPool(log=log)
|
|
|
+ if not sql_pool:
|
|
|
+ log.error("MySQL数据库连接失败")
|
|
|
+ raise Exception("MySQL数据库连接失败")
|
|
|
+
|
|
|
+ try:
|
|
|
+ keyword_list = sql_pool.select_all("SELECT id, title FROM blowout_record_new_daily_new WHERE low_state=0")
|
|
|
+ if not keyword_list:
|
|
|
+ log.info("没有需要处理的关键词, 等待下一个采集任务.........")
|
|
|
+ return
|
|
|
+ for keywords in keyword_list:
|
|
|
+
|
|
|
+ try:
|
|
|
+ get_response(log, keywords, sql_pool)
|
|
|
+ except Exception as e:
|
|
|
+ log.error(f"Loop keyword:{keywords} 获取失败, error:{e}")
|
|
|
+ except Exception as e:
|
|
|
+ log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
|
|
|
+ finally:
|
|
|
+ log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
|
|
|
+
|
|
|
+
|
|
|
+def schedule_task():
|
|
|
+ """
|
|
|
+ 设置定时任务
|
|
|
+ """
|
|
|
+ # waxstat_main(log=logger)
|
|
|
+
|
|
|
+ schedule.every().day.at("05:00").do(waxstat_main, log=logger)
|
|
|
+
|
|
|
+ while True:
|
|
|
+ schedule.run_pending()
|
|
|
+ time.sleep(1)
|
|
|
+
|
|
|
+
|
|
|
+if __name__ == '__main__':
|
|
|
+ schedule_task()
|