urbox_id_spider.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317
  1. # -*- coding: utf-8 -*-
  2. # Author : Charley
  3. # Python : 3.10.8
  4. # Date : 2025/8/4 16:08
  5. import random
  6. import time
  7. import requests
  8. import inspect
  9. import schedule
  10. from loguru import logger
  11. from parsel import Selector
  12. from tenacity import retry, stop_after_attempt, wait_fixed
  13. from mysql_pool import MySQLConnectionPool
  14. # logger.remove()
  15. # logger.add("./logs/id_{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
  16. # format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
  17. # level="DEBUG", retention="7 day")
  18. def after_log(retry_state):
  19. """
  20. retry 回调
  21. :param retry_state: RetryCallState 对象
  22. """
  23. # 检查 args 是否存在且不为空
  24. if retry_state.args and len(retry_state.args) > 0:
  25. log = retry_state.args[0] # 获取传入的 logger
  26. else:
  27. log = logger # 使用全局 logger
  28. if retry_state.outcome.failed:
  29. log.warning(
  30. f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
  31. else:
  32. log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
  33. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  34. def get_product_detail(log, product_id, sql_pool):
  35. log.debug(f'--------------- {inspect.currentframe().f_code.co_name}, product_id {product_id} start ---------------')
  36. headers = {
  37. "Accept": "application/json, text/plain, */*",
  38. "Accept-Language": "en,zh-CN;q=0.9,zh;q=0.8",
  39. # "Authorization": "Bearer 4001|rZwBadHCeDlJTRK52IFVpvcuay2hQjYFDLMO72xo",
  40. "Connection": "keep-alive",
  41. "Content-Type": "application/json",
  42. "Referer": "https://www.urboxwin.com/",
  43. "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36",
  44. }
  45. url = "https://jp.urboxwin.com/webapi/breaking/detail"
  46. data = {
  47. # "id": "353"
  48. "id": f"{product_id}"
  49. }
  50. response = requests.post(url, headers=headers, json=data)
  51. # print(response.text)
  52. logger.debug(response.json())
  53. # print(response)
  54. # time.sleep(11111)
  55. logger.debug(response.status_code)
  56. response.raise_for_status()
  57. resp_json = response.json()
  58. if resp_json.get("error"):
  59. log.debug(f"product_id:{product_id}, message:{resp_json.get('error', {}).get('message', '')}")
  60. return
  61. # time.sleep(11111)
  62. json_data = resp_json.get("data", {})
  63. sale_status = json_data.get("sale_status")
  64. card_number = json_data.get("number")
  65. tags = json_data.get("tags", [])
  66. tags_name_list = [tag.get("name") for tag in tags] if tags else []
  67. tags_name = " ".join(tags_name_list) if tags_name_list else ""
  68. title = json_data.get("title")
  69. score = json_data.get("score")
  70. available_at = json_data.get("available_at")
  71. end_at = json_data.get("end_at")
  72. buy_count = json_data.get("buy_count") # str
  73. total_stock = json_data.get("total_stock") # 总库存量
  74. available_stock = json_data.get("available_stock") # 可用库存量
  75. # 使用 parsel 去除 HTML 标签
  76. try:
  77. detail = json_data.get("detail")
  78. if not detail:
  79. clean_detail = ""
  80. else:
  81. # 确保是字符串类型
  82. if isinstance(detail, bytes):
  83. detail = detail.decode('utf-8')
  84. selector = Selector(text=detail)
  85. text_nodes = selector.xpath('//text()').getall()
  86. clean_detail = ''.join(text_nodes).strip()
  87. except UnicodeDecodeError as e:
  88. log.error(f"商品 {product_id} 的detail字段编码错误: {e}")
  89. clean_detail = ""
  90. except Exception as e:
  91. log.error(f"商品 {product_id} 的detail字段解析失败: {e}")
  92. clean_detail = ""
  93. cover_image = json_data.get("cover_image", {}).get('thumb_o_file') # 列表页图片
  94. top_images = json_data.get("top_images", [])
  95. top_images = [item.get('thumb_o_file') for item in top_images]
  96. top_images = '|'.join(top_images) # 多图
  97. detail_images = json_data.get("detail_images", [])
  98. detail_images = [item.get('thumb_o_file') for item in detail_images]
  99. detail_images = '|'.join(detail_images) # 详情信息图片
  100. current_at = json_data.get("current_at")
  101. live_time = json_data.get("live_time")
  102. stock_percentage = json_data.get("stock_percentage") # 库存百分比
  103. live_link = json_data.get("live_link")
  104. buy_mode = json_data.get("buy_mode") # 购买模式
  105. data_dict = {
  106. "goods_id": product_id,
  107. "sale_status": sale_status,
  108. "card_number": card_number,
  109. "tags_name": tags_name,
  110. "title": title,
  111. "score": score,
  112. "available_at": available_at,
  113. "end_at": end_at,
  114. "buy_count": buy_count,
  115. "total_stock": total_stock,
  116. "available_stock": available_stock,
  117. "detail_str": clean_detail,
  118. "cover_image": cover_image,
  119. "top_images": top_images,
  120. "detail_images": detail_images,
  121. "current_at": current_at,
  122. "live_time": live_time,
  123. "stock_percentage": stock_percentage,
  124. "live_link": live_link,
  125. "buy_mode": buy_mode
  126. }
  127. # print(data_dict)
  128. # 根据商品id更新数据库
  129. # sql_pool.update_one_or_dict(table="urbox_product_record", data=data_dict, condition={"goods_id": product_id})
  130. sql_pool.insert_one_or_dict(table="urbox_product_record", data=data_dict)
  131. def get_player_list(log, goods_id, sql_pool):
  132. page = 1
  133. while page <= 500:
  134. try:
  135. log.debug(
  136. f'--------------- {inspect.currentframe().f_code.co_name}, page {page}, goods_id {goods_id} start ---------------')
  137. len_items = get_player_single_page(log, goods_id, sql_pool, page)
  138. except Exception as e:
  139. log.error(
  140. f"{inspect.currentframe().f_code.co_name} Request get_player_single_page for page:{page}, {e}")
  141. len_items = 0
  142. if len_items < 20:
  143. log.debug(f'--------------- page {page} has {len_items} items, break ---------------')
  144. break
  145. page += 1
  146. # 设置等待时间 避免查询太频繁
  147. time.sleep(random.uniform(0.5, 1))
  148. # 更新商品状态
  149. sql_pool.update_one_or_dict(table="urbox_product_record", data={"state": 1}, condition={"goods_id": goods_id})
  150. def get_player_single_page(log, goods_id, sql_pool, page):
  151. headers = {
  152. "Accept": "application/json, text/plain, */*",
  153. "Accept-Language": "en,zh-CN;q=0.9,zh;q=0.8",
  154. "Authorization": "Bearer 4001|rZwBadHCeDlJTRK52IFVpvcuay2hQjYFDLMO72xo",
  155. "Connection": "keep-alive",
  156. "Content-Type": "application/json",
  157. "Referer": "https://www.urboxwin.com/",
  158. "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36",
  159. }
  160. url = "https://jp.urboxwin.com/webapi/breaking/report"
  161. data = {
  162. # "page": 1,
  163. "page": page,
  164. "per_page": 20,
  165. "id": f"{goods_id}"
  166. # "id": "353"
  167. }
  168. response = requests.post(url, headers=headers, json=data)
  169. # print(response.text)
  170. response.raise_for_status()
  171. resp_json = response.json()
  172. json_list = resp_json.get("data", {}).get("data", [])
  173. if not json_list:
  174. log.debug("没有数据")
  175. return 0
  176. info_list = []
  177. for item in json_list:
  178. data_id = item.get("id")
  179. user_name = item.get("user", {}).get("name")
  180. title = item.get("title")
  181. is_win_prize = item.get("is_win_prize") # 是否中奖
  182. win_prize_text = item.get("win_prize_text") # 中奖信息
  183. draw_finish_at = item.get("draw_finish_at") # 开奖时间
  184. prize_image = item.get("prize_image", {}) # 奖品图片
  185. prize_image = prize_image.get('thumb_o_file') if prize_image else ""
  186. data_dict = {
  187. "goods_id": goods_id,
  188. "data_id": data_id,
  189. "user_name": user_name,
  190. "title": title,
  191. "is_win_prize": is_win_prize,
  192. "win_prize_text": win_prize_text,
  193. "draw_finish_at": draw_finish_at,
  194. "prize_image": prize_image
  195. }
  196. # print(data_dict)
  197. info_list.append(data_dict)
  198. if info_list:
  199. try:
  200. sql_pool.insert_many(table="urbox_player_record", data_list=info_list, ignore=True)
  201. except Exception as e:
  202. log.error(f"商品 {goods_id} 的player_record数据插入失败: {e}")
  203. return len(json_list)
  204. @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
  205. def urbox_id_main(log):
  206. """
  207. 主函数
  208. :param log: logger对象
  209. """
  210. log.info(
  211. f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务.................................................')
  212. # 配置 MySQL 连接池
  213. sql_pool = MySQLConnectionPool(log=log)
  214. if not sql_pool.check_pool_health():
  215. log.error("数据库连接池异常")
  216. raise RuntimeError("数据库连接池异常")
  217. try:
  218. # 查询商品详情数据
  219. log.debug(f'Request start for product detail.........')
  220. # 查询库中的最大的商品id
  221. sql_max = "SELECT MAX(goods_id) AS max_goods_id FROM urbox_product_record"
  222. max_goods_id = sql_pool.select_one(sql_max)
  223. max_goods_id = max_goods_id[0]
  224. # print("max_goods_id: ", max_goods_id)
  225. log.debug(f'max_goods_id: {max_goods_id}')
  226. # 查询库中的goods_id 拿到不在库中的goods_id列表
  227. sql_goods_id_list = sql_pool.select_all("select goods_id from urbox_product_record")
  228. sql_goods_id_list = [item[0] for item in sql_goods_id_list]
  229. goods_id_list = []
  230. for i in range(1, max_goods_id + 50):
  231. if i not in sql_goods_id_list:
  232. goods_id_list.append(i)
  233. logger.debug(f'goods_id_list: {goods_id_list}')
  234. for goods_id in goods_id_list:
  235. try:
  236. get_product_detail(log, goods_id, sql_pool)
  237. except Exception as e:
  238. log.error(
  239. f"{inspect.currentframe().f_code.co_name} Request get_product_detail for goods_id:{goods_id} error: {e}")
  240. # 设置等待时间 避免查询太频繁
  241. time.sleep(random.uniform(0.5, 1))
  242. log.success(f'Request product detail end.................................................')
  243. # 查询 player 数据, 先查询 urbox_product_record 表中 状态为0的
  244. log.debug(f'Request start for player list.........')
  245. sql_goods_id_list_player = sql_pool.select_all(
  246. query="SELECT goods_id FROM urbox_product_record WHERE state = 0")
  247. sql_goods_id_list_player = [item[0] for item in sql_goods_id_list_player]
  248. for goods_id in sql_goods_id_list_player:
  249. try:
  250. get_player_list(log, goods_id, sql_pool)
  251. except Exception as e:
  252. log.error(
  253. f"{inspect.currentframe().f_code.co_name} Request get_player_list for goods_id:{goods_id} error: {e}")
  254. log.success(f'Request player list end.................................................')
  255. except Exception as e:
  256. log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
  257. finally:
  258. log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
  259. def schedule_task():
  260. """
  261. 爬虫模块 定时任务 的启动文件
  262. """
  263. # 立即运行一次任务
  264. urbox_id_main(log=logger)
  265. # 设置定时任务
  266. schedule.every().day.at("00:01").do(urbox_id_main, log=logger)
  267. while True:
  268. schedule.run_pending()
  269. time.sleep(1)
  270. if __name__ == '__main__':
  271. # get_product_list()
  272. # get_product_detail(logger,354, None)
  273. # get_player_list(logger, 350, None)
  274. # schedule_task()
  275. urbox_id_main(log=logger)