bc_new_daily_spider.py 8.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250
  1. # -*- coding: utf-8 -*-
  2. # Author : Charley
  3. # Python : 3.10.8
  4. # Date : 2025/6/11 19:00
  5. import inspect
  6. import time
  7. import requests
  8. import schedule
  9. import user_agent
  10. from loguru import logger
  11. from mysql_pool import MySQLConnectionPool
  12. from tenacity import retry, stop_after_attempt, wait_fixed
  13. """
  14. D+9位数,从D000000001-D000200000
  15. E+9位数,从E000000001-E000040000
  16. C+9位数,从C000000001-C000600000
  17. """
  18. logger.remove()
  19. logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
  20. format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
  21. level="DEBUG", retention="7 day")
  22. def after_log(retry_state):
  23. """
  24. retry 回调
  25. :param retry_state: RetryCallState 对象
  26. """
  27. # 检查 args 是否存在且不为空
  28. if retry_state.args and len(retry_state.args) > 0:
  29. log = retry_state.args[0] # 获取传入的 logger
  30. else:
  31. log = logger # 使用全局 logger
  32. if retry_state.outcome.failed:
  33. log.warning(
  34. f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
  35. else:
  36. log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
  37. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  38. def get_proxys(log):
  39. """
  40. 获取代理
  41. :return: 代理
  42. """
  43. tunnel = "x371.kdltps.com:15818"
  44. kdl_username = "t13753103189895"
  45. kdl_password = "o0yefv6z"
  46. try:
  47. proxies = {
  48. "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
  49. "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
  50. }
  51. return proxies
  52. except Exception as e:
  53. log.error(f"Error getting proxy: {e}")
  54. raise e
  55. def save_data(sql_pool, info):
  56. """
  57. 保存数据
  58. :param sql_pool: MySQL连接池对象
  59. :param info: 要保存的数据
  60. """
  61. sql = """
  62. INSERT INTO baocui_record (keyword, picurl, dt_pics, vr_pics, vr_date, title, name, year, score, card_type, card_num, card_score, card_centering, card_edges, card_corners, card_surface)
  63. VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
  64. """
  65. sql_pool.insert_one(sql, info)
  66. def parse_data(resp_json: dict, keyword, sql_pool):
  67. """
  68. 解析数据
  69. :param resp_json: response.json()的返回值 -> dict
  70. :param keyword: 查询的keyword
  71. :param sql_pool: MySQL连接池对象
  72. """
  73. info_dict = resp_json.get("info")
  74. if info_dict:
  75. picurl = info_dict.get("picurl") # 正面图
  76. dt_pics = info_dict.get("dt_pics") # 反面图
  77. vr_pics = info_dict.get("vr_pics") # VR图
  78. vr_date = info_dict.get("vr_date")
  79. title = info_dict.get("details", {}).get("title")
  80. details_grade = info_dict.get("details", {}).get("grade", [])
  81. info_dict = {
  82. 'name': None,
  83. 'year': None,
  84. 'score': None,
  85. 'card_type': None,
  86. 'card_num': None,
  87. 'card_score': None,
  88. 'card_centering': None,
  89. 'card_edges': None,
  90. 'card_corners': None,
  91. 'card_surface': None
  92. }
  93. for grade in details_grade:
  94. grade_name = grade.get("title")
  95. grade_value = grade.get("val")
  96. # print(f"{grade_name}:{grade_value}")
  97. if grade_name == "名称":
  98. info_dict['name'] = grade_value
  99. elif grade_name == "年份":
  100. info_dict['year'] = grade_value
  101. elif grade_name == "分数":
  102. info_dict['score'] = grade_value
  103. elif grade_name == "卡种":
  104. info_dict['card_type'] = grade_value
  105. elif grade_name == "编号":
  106. info_dict['card_num'] = grade_value
  107. elif grade_name == "分值":
  108. info_dict['card_score'] = grade_value
  109. elif grade_name == "CENTERING":
  110. info_dict['card_centering'] = grade_value
  111. elif grade_name == "EDGES":
  112. info_dict['card_edges'] = grade_value
  113. elif grade_name == "CORNERS":
  114. info_dict['card_corners'] = grade_value
  115. elif grade_name == "SURFACE":
  116. info_dict['card_surface'] = grade_value
  117. info = (keyword, picurl, dt_pics, vr_pics, vr_date, title, info_dict.get('name'), info_dict.get('year'),
  118. info_dict.get('score'), info_dict.get('card_type'), info_dict.get('card_num'),
  119. info_dict.get('card_score'), info_dict.get('card_centering'), info_dict.get('card_edges'),
  120. info_dict.get('card_corners'), info_dict.get('card_surface'))
  121. # print(info)
  122. save_data(sql_pool, info)
  123. sql_pool.update_one("update baocui_task set state = 1 where keyword = %s", (keyword,))
  124. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  125. def get_bc_resp(log, keyword, sql_pool):
  126. """
  127. 获取响应
  128. :param log: logger对象
  129. :param keyword: 查询的keyword
  130. :param sql_pool: MySQL连接池对象
  131. """
  132. headers = {
  133. "accept": "application/json",
  134. "accept-language": "en,zh-CN;q=0.9,zh;q=0.8",
  135. # "referer": "https://www.baocuicoin.com/",
  136. "user-agent": user_agent.generate_user_agent()
  137. }
  138. url = "https://web-api.baocuicoin.com/Search/index"
  139. params = {
  140. "keyword": keyword,
  141. # "code": "404"
  142. "code": ""
  143. }
  144. with requests.Session() as session:
  145. response = session.post(url, headers=headers, params=params, proxies=get_proxys(log), timeout=10)
  146. # print(response.text)
  147. # print(response.json())
  148. response.raise_for_status()
  149. resp_json = response.json()
  150. if not resp_json:
  151. log.error(f"Not resp_json, {keyword} 获取失败, msg:{resp_json.get('msg')}")
  152. return
  153. if resp_json.get("code") == '200':
  154. parse_data(resp_json, keyword, sql_pool)
  155. elif resp_json.get("code") == '0':
  156. msg = resp_json.get('msg')
  157. if '编号未收录' in msg:
  158. log.error(f"code == 0, {keyword} 获取失败, msg:{msg}")
  159. sql_pool.update_one("update baocui_task set state = 2 where keyword = %s", (keyword,))
  160. elif 'IP已被封禁' in msg:
  161. log.error(f"code == 0, {keyword} 获取失败, msg:{msg}")
  162. raise Exception("IP已被封禁, 重新请求")
  163. else:
  164. log.error(f"code == 0, {keyword} 获取失败, msg:{msg}")
  165. sql_pool.update_one("update baocui_task set state = 3 where keyword = %s", (keyword,))
  166. else:
  167. log.error(f"code != 0 or 200, {keyword} 获取失败, msg:{resp_json.get('msg')}")
  168. sql_pool.update_one("update baocui_task set state = 3 where keyword = %s", (keyword,))
  169. @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
  170. def baocui_main(log):
  171. """
  172. 主函数
  173. :param log: logger对象
  174. """
  175. log.info(
  176. f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
  177. # 配置 MySQL 连接池
  178. sql_pool = MySQLConnectionPool(log=log)
  179. if not sql_pool:
  180. log.error("MySQL数据库连接失败")
  181. raise Exception("MySQL数据库连接失败")
  182. try:
  183. while True:
  184. # keyword = 'E000011631'
  185. sql_code_list = sql_pool.select_all("select keyword from baocui_task where state = 0 limit 10000")
  186. sql_code_list = [i[0] for i in sql_code_list]
  187. # if not sql_code_list:
  188. # log.debug(
  189. # f"{inspect.currentframe().f_code.co_name} -> No len sql_code_list, Wait 1 hours again query.........")
  190. # time.sleep(3600)
  191. # sql_code_list = sql_pool.select_all("select keyword from baocui_task where state = 0 limit 10000")
  192. # sql_code_list = [i[0] for i in sql_code_list]
  193. if not sql_code_list:
  194. log.debug(f"{inspect.currentframe().f_code.co_name} -> No len sql_code_list")
  195. break
  196. for keyword in sql_code_list:
  197. try:
  198. get_bc_resp(log, keyword, sql_pool)
  199. except Exception as e:
  200. log.error(f"Loop keyword:{keyword} 获取失败, error:{e}")
  201. sql_pool.update_one("update baocui_task set state = 3 where keyword = %s", (keyword,))
  202. except Exception as e:
  203. log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
  204. finally:
  205. log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
  206. def schedule_task():
  207. """
  208. 爬虫模块的启动文件
  209. """
  210. # 立即运行一次任务
  211. baocui_main(log=logger)
  212. # 设置定时任务
  213. schedule.every(30).days.at("00:01").do(baocui_main, log=logger)
  214. while True:
  215. schedule.run_pending()
  216. time.sleep(1)
  217. if __name__ == '__main__':
  218. schedule_task()