jz_one_piece_spider.py 8.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252
  1. # -*- coding: utf-8 -*-
  2. # Author : Charley
  3. # Python : 3.10.8
  4. # Date : 2025/10/24 10:20
  5. import user_agent
  6. import requests
  7. import inspect
  8. from loguru import logger
  9. from tenacity import retry, stop_after_attempt, wait_fixed
  10. from mysql_pool import MySQLConnectionPool
  11. logger.remove()
  12. logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
  13. format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
  14. level="DEBUG", retention="7 day")
  15. headers = {
  16. "accept": "application/json, text/plain, */*",
  17. "user-agent": user_agent.generate_user_agent()
  18. }
  19. crawler_language = '简中'
  20. def after_log(retry_state):
  21. """
  22. retry 回调
  23. :param retry_state: RetryCallState 对象
  24. """
  25. # 检查 args 是否存在且不为空
  26. if retry_state.args and len(retry_state.args) > 0:
  27. log = retry_state.args[0] # 获取传入的 logger
  28. else:
  29. log = logger # 使用全局 logger
  30. if retry_state.outcome.failed:
  31. log.warning(
  32. f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
  33. else:
  34. log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
  35. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  36. def get_proxys(log):
  37. """
  38. 获取代理
  39. :return: 代理
  40. """
  41. tunnel = "x371.kdltps.com:15818"
  42. kdl_username = "t13753103189895"
  43. kdl_password = "o0yefv6z"
  44. try:
  45. proxies = {
  46. "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
  47. "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
  48. }
  49. return proxies
  50. except Exception as e:
  51. log.error(f"Error getting proxy: {e}")
  52. raise e
  53. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  54. def get_cachelist(log, sql_pool):
  55. """
  56. 获取 cachelist 选项数据
  57. :param log: 日志对象
  58. :param sql_pool: MySQL连接池对象
  59. """
  60. log.debug("Getting cachelist ..........................")
  61. url = "https://onepieceserve.windoent.com/cardType/cardofferingtype/cachelist"
  62. response = requests.get(url, headers=headers)
  63. response.raise_for_status()
  64. resp_json = response.json()
  65. if resp_json["code"] == 0:
  66. resp_json_list = resp_json["list"]
  67. for item in resp_json_list:
  68. try:
  69. log.debug(f"Getting cachelist: {item['name']}")
  70. card_type_name = item.get("name")
  71. card_type_id = item.get("id")
  72. # card_type_position = item.get("position")
  73. get_all_page(log, card_type_id, card_type_name, sql_pool)
  74. except Exception as e:
  75. log.error(f"Error getting cachelist: {e}")
  76. else:
  77. log.error(f"Error getting cachelist: {resp_json['message']}")
  78. raise Exception(resp_json["message"])
  79. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  80. def get_single_page(log, page, card_type_name):
  81. """
  82. 获取单页数据
  83. :param log: 日志对象
  84. :param page: 页码
  85. :param card_type_name: 卡牌类型
  86. :return: 响应JSON数据
  87. """
  88. url = "https://onepieceserve.windoent.com/cardList/cardlist/weblist"
  89. params = {
  90. "cardOfferType": card_type_name,
  91. "cardColor": "",
  92. "cardType": "",
  93. "cardCartograph": "",
  94. "subscript": "",
  95. "limit": "20",
  96. "page": str(page)
  97. }
  98. response = requests.get(url, headers=headers, params=params)
  99. response.raise_for_status()
  100. resp_json = response.json()
  101. if resp_json["code"] == 0:
  102. return resp_json
  103. else:
  104. log.error(f"Error getting page {page}: {resp_json['message']}")
  105. raise Exception(resp_json["message"])
  106. def get_all_page(log, card_type_id, card_type_name, sql_pool):
  107. """
  108. 获取所有页面数据
  109. :param log: 日志对象
  110. :param card_type_id: 卡牌id
  111. :param card_type_name: 卡牌类型
  112. :param sql_pool: MySQL连接池对象
  113. """
  114. log.debug("Getting all single page ..........................")
  115. page = 1
  116. total_page = 1
  117. while page <= total_page:
  118. log.debug(f"Getting single page: {page}")
  119. try:
  120. resp_json = get_single_page(log, page, card_type_name)
  121. # 更新总页数
  122. total_page = resp_json["page"]["totalPage"]
  123. log.debug(f"Total pages: {total_page}, Current page: {page}")
  124. resp_json_list = resp_json.get("page", {}).get("list", [])
  125. for item in resp_json_list:
  126. card_id = item.get("id")
  127. # card_img = item.get("cardImg")
  128. try:
  129. get_detail(log, card_type_id, card_type_name, card_id, sql_pool)
  130. except Exception as e:
  131. log.error(f"Error getting detail: {e}")
  132. if len(resp_json_list) < 20:
  133. log.debug(
  134. f"No more data, total pages: {total_page}, current page: {page}, len_resp_json_list:{len(resp_json_list)}, break !!!")
  135. break
  136. page += 1
  137. except Exception as e:
  138. log.error(f"Error getting page {page}: {e}")
  139. raise e
  140. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  141. def get_detail(log, card_type_id, card_type_name, card_id, sql_pool):
  142. """
  143. 获取卡片详情信息
  144. :param log: 日志对象
  145. :param card_type_id: 卡片类型 id
  146. :param card_type_name: 卡片类型 名称
  147. :param card_id: 卡片 id
  148. :param sql_pool: MySQL连接池对象
  149. """
  150. log.debug(f"Getting detail: {card_id} >>>>>>>>>>>>>>>>>>>>")
  151. url = f"https://onepieceserve.windoent.com/cardList/cardlist/webInfo/{card_id}"
  152. response = requests.get(url, headers=headers)
  153. response.raise_for_status()
  154. resp_json = response.json()
  155. if resp_json["code"] == 0:
  156. resp_json_info = resp_json.get("info")
  157. card_name = resp_json_info.get("cardName") # 名称
  158. card_number = resp_json_info.get("cardNumber") # 编号
  159. card_rarity = resp_json_info.get("cardRarity") # 稀有度
  160. detail_card_type = resp_json_info.get("cardType") # 类型
  161. card_img = resp_json_info.get("cardImg") # 图片
  162. card_life = resp_json_info.get("cardLife") # 费用
  163. cardAttribute = resp_json_info.get("cardAttribute", []) # 属性
  164. card_attribute = "|".join(cardAttribute) if cardAttribute else ""
  165. card_power = resp_json_info.get("cardPower") # 力量
  166. card_attack = resp_json_info.get("cardAttack") # 反击值
  167. card_color = resp_json_info.get("cardColor") # 颜色
  168. subscript = resp_json_info.get("subscript") # 角标
  169. card_features = resp_json_info.get("cardFeatures") # 特征
  170. card_text_desc = resp_json_info.get("cardTextDesc") # 效果
  171. card_offer_type = resp_json_info.get("cardOfferType") # 获取方式
  172. data_dict = {
  173. "card_type_id": card_type_id,
  174. "card_type_name": card_type_name,
  175. "card_id": card_id,
  176. "card_name": card_name,
  177. "card_number": card_number,
  178. "card_rarity": card_rarity,
  179. "detail_card_type": detail_card_type,
  180. "card_img": card_img,
  181. "card_life": card_life,
  182. "card_attribute": card_attribute,
  183. "card_power": card_power,
  184. "card_attack": card_attack,
  185. "card_color": card_color,
  186. "subscript": subscript,
  187. "card_features": card_features,
  188. "card_text_desc": card_text_desc,
  189. "card_offer_type": card_offer_type,
  190. "crawler_language": crawler_language
  191. }
  192. # print(data_dict)
  193. sql_pool.insert_one_or_dict(table="one_piece_record", data=data_dict)
  194. else:
  195. log.error(f"Error getting detail: {resp_json['message']}")
  196. raise Exception(resp_json["message"])
  197. @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
  198. def op_main(log):
  199. """
  200. 主函数
  201. :param log: logger对象
  202. """
  203. log.info(
  204. f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
  205. # 配置 MySQL 连接池
  206. sql_pool = MySQLConnectionPool(log=log)
  207. if not sql_pool.check_pool_health():
  208. log.error("数据库连接池异常")
  209. raise RuntimeError("数据库连接池异常")
  210. try:
  211. get_cachelist(log, sql_pool)
  212. except Exception as e:
  213. log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
  214. finally:
  215. log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
  216. if __name__ == '__main__':
  217. # get_single_page(logger, 1)
  218. # get_cachelist(logger)
  219. op_main(logger)