jp_one_piece_spider.py 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179
  1. # -*- coding: utf-8 -*-
  2. # Author : Charley
  3. # Python : 3.10.8
  4. # Date : 2025/10/28 10:20
  5. import inspect
  6. import requests
  7. import settings
  8. import user_agent
  9. from loguru import logger
  10. from parsel import Selector
  11. from mysql_pool import MySQLConnectionPool
  12. from tenacity import retry, stop_after_attempt, wait_fixed
  13. logger.remove()
  14. logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
  15. format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
  16. level="DEBUG", retention="7 day")
  17. headers = {
  18. "accept": "application/json, text/plain, */*",
  19. "user-agent": user_agent.generate_user_agent()
  20. }
  21. crawler_language = '日文'
  22. def after_log(retry_state):
  23. """
  24. retry 回调
  25. :param retry_state: RetryCallState 对象
  26. """
  27. # 检查 args 是否存在且不为空
  28. if retry_state.args and len(retry_state.args) > 0:
  29. log = retry_state.args[0] # 获取传入的 logger
  30. else:
  31. log = logger # 使用全局 logger
  32. if retry_state.outcome.failed:
  33. log.warning(
  34. f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
  35. else:
  36. log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
  37. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  38. def get_proxys(log):
  39. """
  40. 获取代理
  41. :return: 代理
  42. """
  43. tunnel = "x371.kdltps.com:15818"
  44. kdl_username = "t13753103189895"
  45. kdl_password = "o0yefv6z"
  46. try:
  47. proxies = {
  48. "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
  49. "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
  50. }
  51. return proxies
  52. except Exception as e:
  53. log.error(f"Error getting proxy: {e}")
  54. raise e
  55. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  56. def get_single_page(log, card_type_id, card_type_name, sql_pool):
  57. """
  58. 获取单页数据
  59. :param log: 日志对象
  60. :param card_type_id: 卡牌id
  61. :param card_type_name: 卡牌类型
  62. :param sql_pool: MySQL对象
  63. :return: 响应JSON数据
  64. """
  65. log.debug(f"Getting cachelist -> card_type_id:{card_type_id}, card_type_name:{card_type_name}")
  66. url = "https://onepiece-cardgame.com/cardlist/"
  67. params = {
  68. # "series": "569302"
  69. "series": card_type_id
  70. }
  71. response = requests.get(url, headers=headers, params=params, timeout=10)
  72. # print(response)
  73. response.raise_for_status()
  74. selector = Selector(response.text)
  75. tag_a_list = selector.xpath('//div[@class="resultCol"]/a')
  76. info_list = []
  77. for tag_a in tag_a_list:
  78. card_id = tag_a.xpath('./@data-src').get()
  79. card_id = card_id.replace('#', '')
  80. # img = tag_a.xpath('./img/@src').get()
  81. # if not img:
  82. img = tag_a.xpath('./img/@data-src').get()
  83. card_img = 'https://onepiece-cardgame.com' + img.lstrip('..')
  84. # 查找 id 为 card_id的dl标签
  85. tag_dl = selector.xpath(f'//div[@class="resultCol"]/dl[@id="{card_id}"]')
  86. tag_backcol = tag_dl.xpath('./dd/div[@class="backCol"]')
  87. cost = tag_backcol.xpath('./div/div[@class="cost"]/text()').get()
  88. attribute = tag_backcol.xpath('./div/div[@class="attribute"]/text()').get().strip()
  89. if not attribute:
  90. attribute = '-'
  91. power = tag_backcol.xpath('./div/div[@class="power"]/text()').get()
  92. counter = tag_backcol.xpath('./div/div[@class="counter"]/text()').get()
  93. color = tag_backcol.xpath('./div/div[@class="color"]/text()').get()
  94. block_icon = tag_backcol.xpath('./div/div[@class="block"]/text()').get()
  95. card_features = tag_backcol.xpath('.//div[@class="feature"]/text()').get() # type
  96. card_text_desc = tag_backcol.xpath('./div[@class="text"]/text()').get() # Effect
  97. getInfo = tag_backcol.xpath('./div[@class="getInfo"]/text()').get() # Card Set(s)
  98. card_name = tag_dl.xpath('./dt/div[@class="cardName"]/text()').get()
  99. card_number = tag_dl.xpath('./dt/div[@class="infoCol"]/span[1]/text()').get()
  100. card_rarity = tag_dl.xpath('./dt/div[@class="infoCol"]/span[2]/text()').get()
  101. detail_card_type = tag_dl.xpath('./dt/div[@class="infoCol"]/span[3]/text()').get()
  102. data_dict = {
  103. "card_type_id": card_type_id,
  104. "card_type_name": card_type_name,
  105. "card_id": card_id,
  106. "card_name": card_name,
  107. "card_number": card_number,
  108. "card_rarity": card_rarity,
  109. "detail_card_type": detail_card_type,
  110. "card_img": card_img,
  111. "card_life": cost,
  112. "card_attribute": attribute,
  113. "card_power": power,
  114. "card_attack": counter,
  115. "card_color": color,
  116. "subscript": block_icon,
  117. "card_features": card_features,
  118. "card_text_desc": card_text_desc,
  119. "card_offer_type": getInfo,
  120. "crawler_language": crawler_language
  121. }
  122. # print(data_dict)
  123. info_list.append(data_dict)
  124. # 保存数据
  125. try:
  126. sql_pool.insert_many(table="one_piece_record", data_list=info_list)
  127. except Exception as e:
  128. log.error(f"Error inserting data: {e}")
  129. @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
  130. def op_main(log):
  131. """
  132. 主函数
  133. :param log: logger对象
  134. """
  135. log.info(
  136. f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
  137. # 配置 MySQL 连接池
  138. sql_pool = MySQLConnectionPool(log=log)
  139. if not sql_pool.check_pool_health():
  140. log.error("数据库连接池异常")
  141. raise RuntimeError("数据库连接池异常")
  142. try:
  143. for card_type_id, card_type_name in settings.JP_ONE_PIECE_CARDS.items():
  144. try:
  145. get_single_page(log, card_type_id, card_type_name, sql_pool)
  146. except Exception as e:
  147. log.error(f"Error getting cachelist: {e}")
  148. except Exception as e:
  149. log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
  150. finally:
  151. log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
  152. if __name__ == '__main__':
  153. # get_single_page(logger, 1, 569302)
  154. # get_cachelist(logger)
  155. op_main(logger)