ags_pop_spider.py 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150
  1. # -*- coding: utf-8 -*-
  2. # Author : Charley
  3. # Python : 3.12.10
  4. # Date : 2026/5/7 13:48
  5. import inspect
  6. import time
  7. import requests
  8. import schedule
  9. import user_agent
  10. from loguru import logger
  11. from mysql_pool import MySQLConnectionPool
  12. from tenacity import retry, stop_after_attempt, wait_fixed
  13. """
  14. 2026/5/7 网站改版 代码重构
  15. """
  16. logger.remove()
  17. logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
  18. format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
  19. level="DEBUG", retention="7 day")
  20. def after_log(retry_state):
  21. """
  22. retry 回调
  23. :param retry_state: RetryCallState 对象
  24. """
  25. # 检查 args 是否存在且不为空
  26. if retry_state.args and len(retry_state.args) > 0:
  27. log = retry_state.args[0] # 获取传入的 logger
  28. else:
  29. log = logger # 使用全局 logger
  30. if retry_state.outcome.failed:
  31. log.warning(
  32. f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
  33. else:
  34. log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
  35. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  36. def get_proxys(log):
  37. """
  38. 获取代理
  39. :return: 代理
  40. """
  41. tunnel = "x371.kdltps.com:15818"
  42. kdl_username = "t13753103189895"
  43. kdl_password = "o0yefv6z"
  44. try:
  45. proxies = {
  46. "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
  47. "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
  48. }
  49. return proxies
  50. except Exception as e:
  51. log.error(f"Error getting proxy: {e}")
  52. raise e
  53. @retry(stop=stop_after_attempt(5), wait=wait_fixed(2), after=after_log)
  54. def get_ags_pop_list(log, sql_pool):
  55. """
  56. 获取卡组 数据报告 信息
  57. :param log: logger对象
  58. :param sql_pool: MySQLConnectionPool对象
  59. """
  60. crawl_date = time.strftime("%Y-%m-%d", time.localtime())
  61. headers = {
  62. "accept": "application/json",
  63. "user-agent": user_agent.generate_user_agent()
  64. }
  65. url = "https://app.agscard.com/api/v2/pop/categories"
  66. response = requests.get(url, headers=headers, timeout=10)
  67. # print(response.text)
  68. # print(response)
  69. response.raise_for_status()
  70. resp_json = response.json()
  71. resp_data = resp_json.get("data", [])
  72. if not resp_data:
  73. log.error("No data found in the response")
  74. return
  75. info_list = []
  76. for category_ in resp_data:
  77. category_id = category_.get("id")
  78. category_name = category_.get("name")
  79. category_link = f"https://agscard.com/pop/{category_id}"
  80. category_sets = category_.get("card_sets_count")
  81. category_cards = category_.get("card_products_count")
  82. category_graded = category_.get("total_population")
  83. data_dict = {
  84. 'category_name': category_name,
  85. 'category_link': category_link,
  86. 'category_sets': category_sets,
  87. 'category_cards': category_cards,
  88. 'category_graded': category_graded,
  89. 'crawl_date': crawl_date
  90. }
  91. # print(data_dict)
  92. info_list.append(data_dict)
  93. # 保存数据
  94. sql_pool.insert_many(table="ags_pop_record", data_list=info_list, ignore=True)
  95. @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
  96. def ags_pop_main(log):
  97. """
  98. 主函数
  99. :param log: logger对象
  100. """
  101. log.info(
  102. f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
  103. # 配置 MySQL 连接池
  104. sql_pool = MySQLConnectionPool(log=log)
  105. if not sql_pool.check_pool_health():
  106. log.error("数据库连接池异常")
  107. raise RuntimeError("数据库连接池异常")
  108. try:
  109. # 获取所有卡组中的卡牌信息
  110. get_ags_pop_list(log, sql_pool)
  111. except Exception as e:
  112. log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
  113. finally:
  114. log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
  115. def schedule_task():
  116. """
  117. 爬虫模块 定时任务 的启动文件
  118. """
  119. # 立即运行一次任务
  120. ags_pop_main(log=logger)
  121. # 设置定时任务
  122. schedule.every().day.at("00:01").do(ags_pop_main, log=logger)
  123. while True:
  124. schedule.run_pending()
  125. time.sleep(1)
  126. if __name__ == '__main__':
  127. schedule_task()
  128. # ags_pop_main(log=logger)