bc_new_daily_spider.py 9.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252
  1. # -*- coding: utf-8 -*-
  2. # Author : Charley
  3. # Python : 3.10.8
  4. # Date : 2025/6/11 19:00
  5. import inspect
  6. import random
  7. import time
  8. import requests
  9. import schedule
  10. import user_agent
  11. from loguru import logger
  12. from mysql_pool import MySQLConnectionPool
  13. from tenacity import retry, stop_after_attempt, wait_fixed
  14. """
  15. D+9位数,从D000000001-D000200000
  16. E+9位数,从E000000001-E000040000
  17. C+9位数,从C000000001-C000600000
  18. """
  19. logger.remove()
  20. logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
  21. format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
  22. level="DEBUG", retention="7 day")
  23. def after_log(retry_state):
  24. """
  25. retry 回调
  26. :param retry_state: RetryCallState 对象
  27. """
  28. # 检查 args 是否存在且不为空
  29. if retry_state.args and len(retry_state.args) > 0:
  30. log = retry_state.args[0] # 获取传入的 logger
  31. else:
  32. log = logger # 使用全局 logger
  33. if retry_state.outcome.failed:
  34. log.warning(
  35. f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
  36. else:
  37. log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
  38. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  39. def get_proxys(log):
  40. """
  41. 获取代理
  42. :return: 代理
  43. """
  44. tunnel = "x371.kdltps.com:15818"
  45. kdl_username = "t13753103189895"
  46. kdl_password = "o0yefv6z"
  47. try:
  48. proxies = {
  49. "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
  50. "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
  51. }
  52. return proxies
  53. except Exception as e:
  54. log.error(f"Error getting proxy: {e}")
  55. raise e
  56. def save_data(sql_pool, info):
  57. """
  58. 保存数据
  59. :param sql_pool: MySQL连接池对象
  60. :param info: 要保存的数据
  61. """
  62. sql = """
  63. INSERT INTO baocui_record (keyword, picurl, dt_pics, vr_pics, vr_date, title, name, year, score, card_type, card_num, card_score, card_centering, card_edges, card_corners, card_surface)
  64. VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
  65. """
  66. sql_pool.insert_one(sql, info)
  67. def parse_data(resp_json: dict, keyword, sql_pool):
  68. """
  69. 解析数据
  70. :param resp_json: response.json()的返回值 -> dict
  71. :param keyword: 查询的keyword
  72. :param sql_pool: MySQL连接池对象
  73. """
  74. info_dict = resp_json.get("info")
  75. if info_dict:
  76. picurl = info_dict.get("picurl") # 正面图
  77. dt_pics = info_dict.get("dt_pics") # 反面图
  78. vr_pics = info_dict.get("vr_pics") # VR图
  79. vr_date = info_dict.get("vr_date")
  80. title = info_dict.get("details", {}).get("title")
  81. details_grade = info_dict.get("details", {}).get("grade", [])
  82. info_dict = {
  83. 'name': None,
  84. 'year': None,
  85. 'score': None,
  86. 'card_type': None,
  87. 'card_num': None,
  88. 'card_score': None,
  89. 'card_centering': None,
  90. 'card_edges': None,
  91. 'card_corners': None,
  92. 'card_surface': None
  93. }
  94. for grade in details_grade:
  95. grade_name = grade.get("title")
  96. grade_value = grade.get("val")
  97. # print(f"{grade_name}:{grade_value}")
  98. if grade_name == "名称":
  99. info_dict['name'] = grade_value
  100. elif grade_name == "年份":
  101. info_dict['year'] = grade_value
  102. elif grade_name == "分数":
  103. info_dict['score'] = grade_value
  104. elif grade_name == "卡种":
  105. info_dict['card_type'] = grade_value
  106. elif grade_name == "编号":
  107. info_dict['card_num'] = grade_value
  108. elif grade_name == "分值":
  109. info_dict['card_score'] = grade_value
  110. elif grade_name == "CENTERING":
  111. info_dict['card_centering'] = grade_value
  112. elif grade_name == "EDGES":
  113. info_dict['card_edges'] = grade_value
  114. elif grade_name == "CORNERS":
  115. info_dict['card_corners'] = grade_value
  116. elif grade_name == "SURFACE":
  117. info_dict['card_surface'] = grade_value
  118. info = (keyword, picurl, dt_pics, vr_pics, vr_date, title, info_dict.get('name'), info_dict.get('year'),
  119. info_dict.get('score'), info_dict.get('card_type'), info_dict.get('card_num'),
  120. info_dict.get('card_score'), info_dict.get('card_centering'), info_dict.get('card_edges'),
  121. info_dict.get('card_corners'), info_dict.get('card_surface'))
  122. # print(info)
  123. save_data(sql_pool, info)
  124. sql_pool.update_one("update baocui_task set state = 1 where keyword = %s", (keyword,))
  125. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  126. def get_bc_resp(log, keyword, sql_pool):
  127. """
  128. 获取响应
  129. :param log: logger对象
  130. :param keyword: 查询的keyword
  131. :param sql_pool: MySQL连接池对象
  132. """
  133. headers = {
  134. "accept": "application/json",
  135. "accept-language": "en,zh-CN;q=0.9,zh;q=0.8",
  136. # "referer": "https://www.baocuicoin.com/",
  137. "user-agent": user_agent.generate_user_agent()
  138. }
  139. url = "https://web-api.baocuicoin.com/Search/index"
  140. params = {
  141. "keyword": keyword,
  142. # "code": "404"
  143. "code": ""
  144. }
  145. with requests.Session() as session:
  146. response = session.post(url, headers=headers, params=params, proxies=get_proxys(log), timeout=10)
  147. # response = requests.post(url, headers=headers, params=params, proxies=get_proxys(log), timeout=10)
  148. # print(response.text)
  149. # print(response.json())
  150. response.raise_for_status()
  151. resp_json = response.json()
  152. if not resp_json:
  153. log.error(f"Not resp_json, {keyword} 获取失败, msg:{resp_json.get('msg')}")
  154. return
  155. if resp_json.get("code") == '200':
  156. parse_data(resp_json, keyword, sql_pool)
  157. elif resp_json.get("code") == '0':
  158. msg = resp_json.get('msg')
  159. if '编号未收录' in msg:
  160. log.error(f"code == 0, {keyword} 获取失败, msg:{msg}")
  161. sql_pool.update_one("update baocui_task set state = 2 where keyword = %s", (keyword,))
  162. elif 'IP已被封禁' in msg:
  163. log.error(f"code == 0, {keyword} 获取失败, msg:{msg}")
  164. raise Exception("IP已被封禁, 重新请求")
  165. else:
  166. log.error(f"code == 0, {keyword} 获取失败, msg:{msg}")
  167. sql_pool.update_one("update baocui_task set state = 3 where keyword = %s", (keyword,))
  168. else:
  169. log.error(f"code != 0 or 200, {keyword} 获取失败, msg:{resp_json.get('msg')}")
  170. sql_pool.update_one("update baocui_task set state = 3 where keyword = %s", (keyword,))
  171. @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
  172. def baocui_main(log):
  173. """
  174. 主函数
  175. :param log: logger对象
  176. """
  177. log.info(
  178. f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
  179. # 配置 MySQL 连接池
  180. sql_pool = MySQLConnectionPool(log=log)
  181. if not sql_pool:
  182. log.error("MySQL数据库连接失败")
  183. raise Exception("MySQL数据库连接失败")
  184. try:
  185. while True:
  186. # keyword = 'E000011631'
  187. sql_code_list = sql_pool.select_all("select keyword from baocui_task where state = 0 limit 10000")
  188. sql_code_list = [i[0] for i in sql_code_list]
  189. # if not sql_code_list:
  190. # log.debug(
  191. # f"{inspect.currentframe().f_code.co_name} -> No len sql_code_list, Wait 1 hours again query.........")
  192. # time.sleep(3600)
  193. # sql_code_list = sql_pool.select_all("select keyword from baocui_task where state = 0 limit 10000")
  194. # sql_code_list = [i[0] for i in sql_code_list]
  195. if not sql_code_list:
  196. log.debug(f"{inspect.currentframe().f_code.co_name} -> No len sql_code_list")
  197. break
  198. for keyword in sql_code_list:
  199. try:
  200. get_bc_resp(log, keyword, sql_pool)
  201. except Exception as e:
  202. log.error(f"Loop keyword:{keyword} 获取失败, error:{e}")
  203. sql_pool.update_one("update baocui_task set state = 3 where keyword = %s", (keyword,))
  204. time.sleep(random.uniform(0.5, 1))
  205. except Exception as e:
  206. log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
  207. finally:
  208. log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
  209. def schedule_task():
  210. """
  211. 爬虫模块的启动文件
  212. """
  213. # 立即运行一次任务
  214. baocui_main(log=logger)
  215. # 设置定时任务
  216. schedule.every(30).days.at("00:01").do(baocui_main, log=logger)
  217. while True:
  218. schedule.run_pending()
  219. time.sleep(1)
  220. if __name__ == '__main__':
  221. schedule_task()