bgs_new_daily_spider.py 9.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247
  1. # -*- coding: utf-8 -*-
  2. # Author : Charley
  3. # Python : 3.8.10
  4. # Date: 2024-11-12 16:40
  5. import time
  6. import requests
  7. import schedule
  8. import user_agent
  9. from loguru import logger
  10. import concurrent.futures
  11. from tenacity import stop_after_attempt, wait_fixed, retry
  12. from mysq_pool import MySQLConnectionPool
  13. logger.remove()
  14. logger.add("logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
  15. format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
  16. level="DEBUG", retention="1 day")
  17. def after_log(retry_state):
  18. """
  19. retry 回调
  20. :param retry_state: RetryCallState 对象
  21. """
  22. # 检查 args 是否存在且不为空
  23. if retry_state.args and len(retry_state.args) > 0:
  24. log = retry_state.args[0] # 获取传入的 logger
  25. else:
  26. log = logger # 使用全局 logger
  27. if retry_state.outcome.failed:
  28. log.warning(
  29. f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
  30. else:
  31. log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
  32. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  33. def get_proxys(log):
  34. tunnel = "x371.kdltps.com:15818"
  35. kdl_username = "t13753103189895"
  36. kdl_password = "o0yefv6z"
  37. try:
  38. proxies = {
  39. "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
  40. "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
  41. }
  42. return proxies
  43. except Exception as e:
  44. log.error(f"Error getting proxy: {e}")
  45. raise e
  46. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  47. def get_proxys1(log):
  48. # 已购买账户 北美
  49. # http_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36927"
  50. # https_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36927"
  51. http_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36931"
  52. https_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36931"
  53. # url = "https://ifconfig.me"
  54. try:
  55. proxySettings = {
  56. "http": http_proxy,
  57. "https": https_proxy,
  58. }
  59. return proxySettings
  60. except Exception as e:
  61. log.error(f"Error getting proxy: {e}")
  62. raise e
  63. def save_data(mysql_pool, info):
  64. """
  65. :param mysql_pool:
  66. :param info:
  67. :return:
  68. """
  69. sql = "INSERT INTO beckett_bgs_record(set_name, player_name, date_graded, centering_grade, corner_grade, edges_grade, surfaces_grade, auto_grade, final_grade, total_grade, cards_grade, number) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
  70. mysql_pool.insert_one(sql, info)
  71. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  72. def get_data(log, bgs_id, mysql_pool):
  73. """
  74. :param log:
  75. :param bgs_id:
  76. :param mysql_pool:
  77. :return:
  78. """
  79. headers = {
  80. "accept": "application/json, text/plain, */*",
  81. "user-agent": user_agent.generate_user_agent()
  82. }
  83. url = "https://www.beckett.com/api/grading/lookup"
  84. params = {
  85. "category": "BGS",
  86. "serialNumber": str(bgs_id)
  87. }
  88. response = requests.get(url, headers=headers, params=params, proxies=get_proxys1(log), timeout=5)
  89. # response = requests.get(url, headers=headers, params=params, timeout=5)
  90. if response.status_code == 404:
  91. # 没有数据 No Record Found 将状态改为3
  92. log.warning(f"No Record Found for {bgs_id}")
  93. mysql_pool.update_one("UPDATE bgs_task SET state=3 WHERE auth_code=%s", (bgs_id,))
  94. return
  95. if response.status_code != 200:
  96. # 查询失败 将状态改为2
  97. log.warning(f"Error getting data for {bgs_id}, {response.status_code}")
  98. mysql_pool.update_one("UPDATE bgs_task SET state=2 WHERE auth_code=%s", (bgs_id,))
  99. return
  100. # print(response.json())
  101. result_dict = response.json()
  102. if result_dict:
  103. """
  104. "label": "silver",
  105. "non_bccg_card_total": 0,
  106. "item_id": "17932864",
  107. "set_name": "2024 Magic the Gathering Secret Lair Dungeons & Dragons 50th Anniversary Bonus Card Foil",
  108. "sport_name": "Magic",
  109. "card_key": "0879",
  110. "player_name": "Minsc & Boo, Timeless Heroes M",
  111. "date_graded": "Thursday, April 17, 2025",
  112. "center_grade": "9.5",
  113. "corners_grade": "8.5",
  114. "edges_grade": "8.5",
  115. "surfaces_grade": "10.0",
  116. "autograph_grade": "0.0",
  117. "final_grade": "8.5",
  118. """
  119. set_name = result_dict.get('set_name')
  120. player_name = result_dict.get('player_name')
  121. date_graded = result_dict.get('date_graded')
  122. centering_grade = result_dict.get('center_grade')
  123. corner_grade = result_dict.get('corners_grade')
  124. edges_grade = result_dict.get('edges_grade')
  125. surfaces_grade = result_dict.get('surface_grade')
  126. auto_grade = result_dict.get('autograph_grade')
  127. final_grade = result_dict.get('final_grade')
  128. total_grade = result_dict.get('pop_report')
  129. cards_grade = result_dict.get('pop_higher')
  130. info = (set_name, player_name, date_graded, centering_grade, corner_grade, edges_grade, surfaces_grade,
  131. auto_grade, final_grade, total_grade, cards_grade, int(bgs_id))
  132. # 检查所有值是否都为 None或空字符串, 不包含bgs_id
  133. all_none_or_empty = all(x is None or x == '' for x in info[:-1])
  134. if all_none_or_empty:
  135. log.debug("All values are empty")
  136. else:
  137. # print(info)
  138. save_data(mysql_pool, info)
  139. # 查询成功 将状态改为1
  140. mysql_pool.update_one("UPDATE bgs_task SET state=1 WHERE auth_code=%s", (bgs_id,))
  141. # def process_urls(log, ids, mysql_pool, batch_size=1000, max_workers=5):
  142. # with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
  143. # for i in range(0, len(ids), batch_size):
  144. # # print(i)
  145. # batch = ids[i:i + batch_size]
  146. # # print(batch)
  147. # try:
  148. # futures_to_urls = {executor.submit(get_data, log, url, mysql_pool): url for url in batch}
  149. # for future in concurrent.futures.as_completed(futures_to_urls):
  150. # url = futures_to_urls[future]
  151. # try:
  152. # future.result()
  153. # log.debug(f"处理 {url} 成功")
  154. # except Exception as exc:
  155. # log.debug(f"处理 {url} 出错: {exc}")
  156. # except Exception as e:
  157. # log.error(f"提交任务失败: {e}")
  158. @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
  159. def bgs_main(log):
  160. try:
  161. log.info(
  162. "开始运行 bgs_main 爬虫任务............................................................")
  163. sql_pool = MySQLConnectionPool(log=log)
  164. if not sql_pool:
  165. log.error("数据库连接失败")
  166. raise Exception("数据库连接失败")
  167. # max_bgs_id = sql_pool.select_one("SELECT MAX(number) AS max_number FROM beckett_bgs_record")
  168. # # print(max_bgs_id_list)
  169. # max_bgs_id = max_bgs_id[0]
  170. # log.info(f"max_bgs_id 从 {max_bgs_id} 开始爬取.........................")
  171. # # bgs_id_list = [i for i in range(max_bgs_id, max_bgs_id + 3001)]
  172. """
  173. 2026/01/20 修改 +3000,-3000,
  174. 2026/02/02 修改 +20000,-100000,
  175. 2026/02/02 18:00 修改 大于1500w的 单线程 循环跑
  176. """
  177. # sql_bgs_id_list = sql_pool.select_all(
  178. # "SELECT auth_code FROM bgs_task WHERE state != 1 AND auth_code BETWEEN %s AND %s LIMIT 120000",
  179. # (max_bgs_id - 100000, max_bgs_id + 20000))
  180. while True:
  181. sql_bgs_id_list = sql_pool.select_all(
  182. "SELECT auth_code FROM bgs_task WHERE state!=1 AND auth_code > 15000000 LIMIT 5000")
  183. # "SELECT auth_code FROM bgs_task WHERE state!=1 AND auth_code > 17990000 LIMIT 5000"
  184. sql_bgs_id_list = [bid[0] for bid in sql_bgs_id_list]
  185. if not sql_bgs_id_list:
  186. log.info("没有需要处理的数据,等待1小时后再试.........................")
  187. time.sleep(3600)
  188. continue
  189. for bid in sql_bgs_id_list:
  190. try:
  191. # process_urls(log, sql_bgs_id_list, sql_pool, batch_size=1000,
  192. # max_workers=3) # 根据需要调整batch_size和max_workers
  193. get_data(log, bid, sql_pool)
  194. except Exception as e:
  195. log.error('process urls: ', e)
  196. # time.sleep(5)
  197. except Exception as e:
  198. log.error(e)
  199. finally:
  200. log.info("爬虫程序运行结束,等待下一轮的采集任务.....................")
  201. def schedule_task():
  202. """
  203. 设置定时任务
  204. """
  205. # 立即运行一次任务
  206. bgs_main(logger)
  207. # 设置定时任务 - 改为每周一执行
  208. schedule.every().monday.at("03:01").do(bgs_main, logger)
  209. while True:
  210. schedule.run_pending()
  211. time.sleep(1)
  212. if __name__ == '__main__':
  213. # schedule_task()
  214. # get_data('1000743')
  215. bgs_main(logger)