bc_spider.py 7.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220
  1. # -*- coding: utf-8 -*-
  2. # Author : Charley
  3. # Python : 3.10.8
  4. # Date : 2025/2/27 11:57
  5. import inspect
  6. import requests
  7. import user_agent
  8. from loguru import logger
  9. from mysq_pool import MySQLConnectionPool
  10. from tenacity import retry, stop_after_attempt, wait_fixed
  11. """
  12. D+9位数,从D000000001-D000200000
  13. E+9位数,从E000000001-E000040000
  14. C+9位数,从C000000001-C000600000
  15. """
  16. logger.remove()
  17. logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
  18. format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
  19. level="DEBUG", retention="7 day")
  20. def after_log(retry_state):
  21. """
  22. retry 回调
  23. :param retry_state: RetryCallState 对象
  24. """
  25. # 检查 args 是否存在且不为空
  26. if retry_state.args and len(retry_state.args) > 0:
  27. log = retry_state.args[0] # 获取传入的 logger
  28. else:
  29. log = logger # 使用全局 logger
  30. if retry_state.outcome.failed:
  31. log.warning(
  32. f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
  33. else:
  34. log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
  35. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  36. def get_proxys(log):
  37. """
  38. 获取代理
  39. :return: 代理
  40. """
  41. tunnel = "x371.kdltps.com:15818"
  42. kdl_username = "t13753103189895"
  43. kdl_password = "o0yefv6z"
  44. try:
  45. proxies = {
  46. "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
  47. "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
  48. }
  49. return proxies
  50. except Exception as e:
  51. log.error(f"Error getting proxy: {e}")
  52. raise e
  53. def save_data(sql_pool, info):
  54. """
  55. 保存数据
  56. :param sql_pool: MySQL连接池对象
  57. :param info: 要保存的数据
  58. """
  59. sql = """
  60. INSERT INTO baocui_record (keyword, picurl, dt_pics, vr_pics, vr_date, title, name, year, score, card_type, card_num, card_score, card_centering, card_edges, card_corners, card_surface)
  61. VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
  62. """
  63. sql_pool.insert_one(sql, info)
  64. def parse_data(resp_json: dict, keyword, sql_pool):
  65. """
  66. 解析数据
  67. :param resp_json: response.json()的返回值 -> dict
  68. :param keyword: 查询的keyword
  69. :param sql_pool: MySQL连接池对象
  70. """
  71. info_dict = resp_json.get("info")
  72. if info_dict:
  73. picurl = info_dict.get("picurl") # 正面图
  74. dt_pics = info_dict.get("dt_pics") # 反面图
  75. vr_pics = info_dict.get("vr_pics") # VR图
  76. vr_date = info_dict.get("vr_date")
  77. title = info_dict.get("details", {}).get("title")
  78. details_grade = info_dict.get("details", {}).get("grade", [])
  79. info_dict = {
  80. 'name': None,
  81. 'year': None,
  82. 'score': None,
  83. 'card_type': None,
  84. 'card_num': None,
  85. 'card_score': None,
  86. 'card_centering': None,
  87. 'card_edges': None,
  88. 'card_corners': None,
  89. 'card_surface': None
  90. }
  91. for grade in details_grade:
  92. grade_name = grade.get("title")
  93. grade_value = grade.get("val")
  94. # print(f"{grade_name}:{grade_value}")
  95. if grade_name == "名称":
  96. info_dict['name'] = grade_value
  97. elif grade_name == "年份":
  98. info_dict['year'] = grade_value
  99. elif grade_name == "分数":
  100. info_dict['score'] = grade_value
  101. elif grade_name == "卡种":
  102. info_dict['card_type'] = grade_value
  103. elif grade_name == "编号":
  104. info_dict['card_num'] = grade_value
  105. elif grade_name == "分值":
  106. info_dict['card_score'] = grade_value
  107. elif grade_name == "CENTERING":
  108. info_dict['card_centering'] = grade_value
  109. elif grade_name == "EDGES":
  110. info_dict['card_edges'] = grade_value
  111. elif grade_name == "CORNERS":
  112. info_dict['card_corners'] = grade_value
  113. elif grade_name == "SURFACE":
  114. info_dict['card_surface'] = grade_value
  115. info = (keyword, picurl, dt_pics, vr_pics, vr_date, title, info_dict.get('name'), info_dict.get('year'),
  116. info_dict.get('score'), info_dict.get('card_type'), info_dict.get('card_num'),
  117. info_dict.get('card_score'), info_dict.get('card_centering'), info_dict.get('card_edges'),
  118. info_dict.get('card_corners'), info_dict.get('card_surface'))
  119. # print(info)
  120. save_data(sql_pool, info)
  121. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  122. def get_bc_resp(log, keyword, sql_pool):
  123. """
  124. 获取响应
  125. :param log: logger对象
  126. :param keyword: 查询的keyword
  127. :param sql_pool: MySQL连接池对象
  128. """
  129. headers = {
  130. "accept": "application/json",
  131. "accept-language": "en,zh-CN;q=0.9,zh;q=0.8",
  132. # "referer": "https://www.baocuicoin.com/",
  133. "user-agent": user_agent.generate_user_agent()
  134. }
  135. url = "https://web-api.baocuicoin.com/Search/index"
  136. params = {
  137. "keyword": keyword,
  138. # "code": "404"
  139. "code": ""
  140. }
  141. try:
  142. with requests.Session() as session:
  143. response = session.post(url, headers=headers, params=params, proxies=get_proxys(log), timeout=10)
  144. # print(response.text)
  145. # print(response.json())
  146. response.raise_for_status()
  147. resp_json = response.json()
  148. if not resp_json:
  149. log.error(f"Not resp_json, {keyword} 获取失败, msg:{resp_json.get('msg')}")
  150. return
  151. if resp_json.get("code") == '200':
  152. parse_data(resp_json, keyword, sql_pool)
  153. else:
  154. log.error(f"code != 200, {keyword} 获取失败, msg:{resp_json.get('msg')}")
  155. except Exception as e:
  156. log.warning(f"{inspect.currentframe().f_code.co_name} error: {e}")
  157. def generate_keywords():
  158. """
  159. 生成要查询的 keywords
  160. """
  161. start = 1
  162. # for prefix, end in [('C', 600001), ('D', 200001), ('E', 40001)]:
  163. for prefix, end in [('D', 200001), ('E', 40001)]:
  164. if prefix == 'D':
  165. start = 150503
  166. for i in range(start, end):
  167. yield f"{prefix}{i:09d}"
  168. @retry(stop=stop_after_attempt(50), wait=wait_fixed(1800), after=after_log)
  169. def baocui_main(log):
  170. """
  171. 主函数
  172. :param log: logger对象
  173. """
  174. log.info(
  175. f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
  176. # 配置 MySQL 连接池
  177. sql_pool = MySQLConnectionPool(log=log)
  178. if not sql_pool:
  179. log.error("MySQL数据库连接失败")
  180. raise Exception("MySQL数据库连接失败")
  181. try:
  182. # keyword = 'E000011631'
  183. for keyword in generate_keywords():
  184. try:
  185. get_bc_resp(log, keyword, sql_pool)
  186. except Exception as e:
  187. log.error(f"Loop keyword:{keyword} 获取失败, error:{e}")
  188. except Exception as e:
  189. log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
  190. finally:
  191. log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
  192. if __name__ == '__main__':
  193. baocui_main(logger)