zj_new_daily_spider.py 8.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201
  1. # -*- coding: utf-8 -*-
  2. # Author : Charley
  3. # Python : 3.10.8
  4. # Date : 2025/6/9 15:56
  5. import time
  6. import inspect
  7. import requests
  8. import schedule
  9. import user_agent
  10. from loguru import logger
  11. from tenacity import retry, stop_after_attempt, wait_fixed
  12. from mysq_pool import MySQLConnectionPool
  13. logger.remove()
  14. logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
  15. format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
  16. level="DEBUG", retention="7 day")
  17. def after_log(retry_state):
  18. """
  19. retry 回调
  20. :param retry_state: RetryCallState 对象
  21. """
  22. # 检查 args 是否存在且不为空
  23. if retry_state.args and len(retry_state.args) > 0:
  24. log = retry_state.args[0] # 获取传入的 logger
  25. else:
  26. log = logger # 使用全局 logger
  27. if retry_state.outcome.failed:
  28. log.warning(
  29. f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
  30. else:
  31. log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
  32. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  33. def get_proxys(log):
  34. """
  35. 获取代理
  36. :return: 代理
  37. """
  38. tunnel = "x371.kdltps.com:15818"
  39. kdl_username = "t13753103189895"
  40. kdl_password = "o0yefv6z"
  41. try:
  42. proxies = {
  43. "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
  44. "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
  45. }
  46. return proxies
  47. except Exception as e:
  48. log.error(f"Error getting proxy: {e}")
  49. raise e
  50. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  51. def get_request_one_page(log, rating_no) -> dict:
  52. headers = {
  53. "accept": "*/*",
  54. "accept-language": "en,zh-CN;q=0.9,zh;q=0.8",
  55. "content-type": "application/json;charset=UTF-8",
  56. "origin": "https://www.zhongjianjiantong.com",
  57. "priority": "u=1, i",
  58. "referer": "https://www.zhongjianjiantong.com/web/index.html",
  59. "sec-ch-ua": "\"Not(A:Brand\";v=\"99\", \"Google Chrome\";v=\"133\", \"Chromium\";v=\"133\"",
  60. "sec-ch-ua-mobile": "?1",
  61. "sec-ch-ua-platform": "\"Android\"",
  62. "sec-fetch-dest": "empty",
  63. "sec-fetch-mode": "cors",
  64. "sec-fetch-site": "same-origin",
  65. "user-agent": user_agent.generate_user_agent()
  66. }
  67. url = "https://www.zhongjianjiantong.com/Api/OrderRatingGoods/detail"
  68. data = {
  69. "rating_no": rating_no
  70. }
  71. with requests.Session() as session:
  72. response = session.post(url, headers=headers, json=data, proxies=get_proxys(log), timeout=5)
  73. # print(response.text)
  74. response.raise_for_status()
  75. return response.json()
  76. def parse_data(resp_json, sql_pool):
  77. card_id = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('id')
  78. order_no = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('order_no')
  79. tag_no = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('tag_no') # 标签号/查询的号码
  80. images = resp_json.get('data', {}).get('obj_order_rating_goods', []).get('images')
  81. card_create_time = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('create_time')
  82. card_update_time = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('update_time')
  83. score = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('score') # 中检评分
  84. corners = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('obj_detail', {}).get('corners') # 四角
  85. eoges = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('obj_detail', {}).get('eoges') # 边缘
  86. surface = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('obj_detail', {}).get('surface') # 表面
  87. centering = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('obj_detail', {}).get('centering') # 居中
  88. colour = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('obj_detail', {}).get('colour') # 颜色
  89. repair = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('obj_detail', {}).get('repair') # 修复
  90. rating_no = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('rating_no') # 证书编号
  91. obj_brand_title = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('obj_brand', {}).get(
  92. 'title') # 商品品牌
  93. obj_detail_spxl = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('obj_detail', {}).get(
  94. 'spxl') # 商品系列
  95. obj_detail_spmc = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('obj_detail', {}).get(
  96. 'spmc') # 商品名称
  97. obj_detail_fxnf = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('obj_detail', {}).get(
  98. 'fxnf') # 发行年份
  99. obj_detail_yy = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('obj_detail', {}).get('yy') # 语言
  100. obj_detail_spbh = resp_json.get('data', {}).get('obj_order_rating_goods', {}).get('obj_detail', {}).get(
  101. 'spbh') # 商品编号
  102. info = (
  103. card_id, order_no, tag_no, images, card_create_time, card_update_time, score, corners, eoges, surface,
  104. centering,
  105. colour, repair, rating_no, obj_brand_title, obj_detail_spxl, obj_detail_spmc, obj_detail_fxnf, obj_detail_yy,
  106. obj_detail_spbh)
  107. sql = """
  108. INSERT INTO zhongjian_record (card_id, order_no, tag_no, images, card_create_time, card_update_time, score, corners, eoges, surface, centering, colour, repair, rating_no, obj_brand_title, obj_detail_spxl, obj_detail_spmc, obj_detail_fxnf, obj_detail_yy, obj_detail_spbh)
  109. VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
  110. """
  111. sql_pool.insert_one(sql, info)
  112. def loop_rating_no(log, sql_pool, sql_ra_no_list):
  113. # sql_ra_no_list = sql_pool.select_all('select tag_no from zhongjian_task where state = 0 limit 10000')
  114. # sql_ra_no_list = [i[0] for i in sql_ra_no_list]
  115. for rating_no_ in sql_ra_no_list:
  116. log.info(f"{rating_no_} is start ......................................")
  117. try:
  118. resp_json = get_request_one_page(log, rating_no_)
  119. if resp_json and resp_json.get('code') == 200:
  120. # print(resp_json)
  121. parse_data(resp_json, sql_pool)
  122. sql_pool.update_one('update zhongjian_task set state = 1 where tag_no = %s', (rating_no_,))
  123. elif resp_json and resp_json.get('code') == 400:
  124. log.warning(f"{rating_no_} is not exist ......................................")
  125. sql_pool.update_one('update zhongjian_task set state = 2 where tag_no = %s', (rating_no_,))
  126. else:
  127. log.warning(f"other warning, please check ......................................")
  128. sql_pool.update_one('update zhongjian_task set state = 3 where tag_no = %s', (rating_no_,))
  129. except Exception as e:
  130. log.warning(f"{inspect.currentframe().f_code.co_name} error: {e}")
  131. sql_pool.update_one('update zhongjian_task set state = 3 where tag_no = %s', (rating_no_,))
  132. continue
  133. @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
  134. def zhongjian_main(log):
  135. """
  136. 主函数
  137. :param log:
  138. """
  139. log.info(
  140. f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
  141. # 配置 MySQL 连接池
  142. sql_pool = MySQLConnectionPool(log=log)
  143. if not sql_pool:
  144. log.error("MySQL数据库连接失败")
  145. raise Exception("MySQL数据库连接失败")
  146. try:
  147. while True:
  148. sql_ra_no_list = sql_pool.select_all('select tag_no from zhongjian_task where state = 0 limit 10000')
  149. sql_ra_no_list = [i[0] for i in sql_ra_no_list]
  150. if not sql_ra_no_list:
  151. log.info(f'没有需要处理的数据,等待下一轮处理........................................................')
  152. break
  153. try:
  154. loop_rating_no(log, sql_pool, sql_ra_no_list)
  155. except Exception as e:
  156. log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
  157. except Exception as e:
  158. log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
  159. finally:
  160. log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
  161. def schedule_task():
  162. """
  163. 爬虫模块的启动文件
  164. """
  165. # 立即运行一次任务
  166. zhongjian_main(log=logger)
  167. # 设置定时任务
  168. schedule.every(30).days.at("00:01").do(zhongjian_main, log=logger)
  169. while True:
  170. schedule.run_pending()
  171. time.sleep(1)
  172. if __name__ == '__main__':
  173. schedule_task()