fan_pokemon_card_spider.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284
  1. # -*- coding: utf-8 -*-
  2. # Author : Charley
  3. # Python : 3.10.8
  4. # Date : 2025/8/25 14:38
  5. import inspect
  6. from datetime import datetime
  7. import requests
  8. import user_agent
  9. from loguru import logger
  10. from parsel import Selector
  11. from tenacity import retry, stop_after_attempt, wait_fixed
  12. from mysql_pool import MySQLConnectionPool
  13. crawler_language = "繁中"
  14. headers = {
  15. # "referer": "https://asia.pokemon-card.com/tw/card-search/list/",
  16. "user-agent": user_agent.generate_user_agent()
  17. }
  18. logger.remove()
  19. logger.add("./logs/fan_{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
  20. format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
  21. level="DEBUG", retention="7 day")
  22. """
  23. expansion_series ->
  24. """
  25. def after_log(retry_state):
  26. """
  27. retry 回调
  28. :param retry_state: RetryCallState 对象
  29. """
  30. # 检查 args 是否存在且不为空
  31. if retry_state.args and len(retry_state.args) > 0:
  32. log = retry_state.args[0] # 获取传入的 logger
  33. else:
  34. log = logger # 使用全局 logger
  35. if retry_state.outcome.failed:
  36. log.warning(
  37. f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
  38. else:
  39. log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
  40. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  41. def get_proxys(log):
  42. """
  43. 获取代理
  44. :return: 代理
  45. """
  46. tunnel = "x371.kdltps.com:15818"
  47. kdl_username = "t13753103189895"
  48. kdl_password = "o0yefv6z"
  49. try:
  50. proxies = {
  51. "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
  52. "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
  53. }
  54. return proxies
  55. except Exception as e:
  56. log.error(f"Error getting proxy: {e}")
  57. raise e
  58. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  59. def get_category_list(log, sql_pool):
  60. log.debug(f'Request get_category_list.........................')
  61. url = 'https://asia.pokemon-card.com/tw/card-search/'
  62. page = 1
  63. max_page = 100
  64. while page <= max_page:
  65. params = {
  66. # "pageNo": "2"
  67. "pageNo": page
  68. }
  69. response = requests.get(url, headers=headers, params=params, timeout=10)
  70. response.raise_for_status()
  71. selector = Selector(response.text)
  72. tag_li_list = selector.xpath('//ul[@class="expansionList"]/li')
  73. info_list = []
  74. for tag_li in tag_li_list:
  75. expansionLink = tag_li.xpath('./a/@href').get()
  76. expansion_link = f'https://asia.pokemon-card.com{expansionLink}' if expansionLink else None
  77. expansion_img = tag_li.xpath('./a//img/@src').get()
  78. expansion_series = tag_li.xpath('./a//div[@class="seriesBlock"]/span/text()').get()
  79. expansion_title = tag_li.xpath('./a//div[@class="titleBlock"]/h3/text()').get()
  80. expansion_title = expansion_title.strip() if expansion_title else None
  81. expansion_release_time = tag_li.xpath('./a//div[@class="titleBlock"]/time/@datetime').get()
  82. data_dict = {
  83. "expansion_link": expansion_link,
  84. "expansion_img": expansion_img,
  85. "expansion_series": expansion_series,
  86. "expansion_title": expansion_title,
  87. "expansion_release_time": expansion_release_time,
  88. "crawler_language": crawler_language
  89. }
  90. # print(data_dict)
  91. info_list.append(data_dict)
  92. if info_list:
  93. sql_pool.insert_many(table="pokemon_fanz_category", data_list=info_list, ignore=True)
  94. if not tag_li_list:
  95. log.debug(f'not tag_li_list!!! page: {page}!!!!!!!!!!')
  96. break
  97. if len(tag_li_list) < 20:
  98. log.debug(
  99. f'--------------- page {page} has {len(tag_li_list)} items, [len(tag_li_list) < 20] ->->-> break ---------------')
  100. break
  101. page += 1
  102. # -----------------------------------------------------------------------------------------------------------------------
  103. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  104. def get_list_single_page(log, sql_pool, page, cate_tuple):
  105. # expansion_link,expansion_series,expansion_title,expansion_release_time
  106. url = cate_tuple[0]
  107. expansion_series = cate_tuple[1]
  108. expansion_title = cate_tuple[2]
  109. expansion_release_time = cate_tuple[3]
  110. # expansion_release_time 为'03-28-2025'格式 转换为正常的 年月日
  111. date_obj = datetime.strptime(expansion_release_time, '%m-%d-%Y')
  112. expansion_release_time = date_obj.strftime('%Y-%m-%d')
  113. log.debug(f'Request get_list_single_page for page: {page}')
  114. # url = "https://asia.pokemon-card.com/tw/card-search/list/"
  115. params = {
  116. # "pageNo": "2"
  117. "pageNo": page
  118. }
  119. # response = requests.get(url, headers=headers, params=params, timeout=10, proxies=get_proxys(log))
  120. response = requests.get(url, headers=headers, params=params, timeout=10)
  121. # print(response.text)
  122. response.raise_for_status()
  123. selector = Selector(response.text)
  124. tag_li_list = selector.xpath('//*[@id="searchForm"]//ul/li')
  125. info_list = []
  126. for tag_li in tag_li_list:
  127. detail_url_str = tag_li.xpath('./a/@href').get()
  128. card_id = detail_url_str.split('/')[-2]
  129. detail_url = f"https://asia.pokemon-card.com{detail_url_str}"
  130. img = tag_li.xpath('.//img/@src').get()
  131. if not img:
  132. img = tag_li.xpath('.//img/@data-original').get()
  133. data_dict = {
  134. "card_id": card_id,
  135. "major_category_name": expansion_series,
  136. "pg_label": expansion_title,
  137. "sales_date": expansion_release_time,
  138. "detail_url": detail_url,
  139. "img": img,
  140. "crawler_language": crawler_language
  141. }
  142. # print(data_dict)
  143. info_list.append(data_dict)
  144. if info_list:
  145. sql_pool.insert_many(table="pokemon_card_record", data_list=info_list, ignore=True)
  146. return len(tag_li_list)
  147. def get_data_list(log, sql_pool, cate_tuple):
  148. page = 1
  149. max_page = 600
  150. while page <= max_page:
  151. try:
  152. log.debug(
  153. f'--------------- {inspect.currentframe().f_code.co_name}, page {page}, start ---------------')
  154. len_items = get_list_single_page(log, sql_pool, page, cate_tuple)
  155. except Exception as e:
  156. log.error(
  157. f"{inspect.currentframe().f_code.co_name} Request get_list_single_page for page:{page}, {e}")
  158. len_items = 0
  159. if len_items < 20:
  160. log.debug(f'--------------- page {page} has {len_items} items, break ---------------')
  161. break
  162. if page > 50:
  163. log.debug(f'--------------- page {page} has {len_items} items, [page > 50] ->->-> break ---------------')
  164. break
  165. page += 1
  166. # ----------------------------------------------------------------------------------------------------------------------
  167. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  168. def get_details(log, sql_id_detail_url: tuple, sql_pool):
  169. log.debug(f'Request get_details for sql_id_detail_url: {sql_id_detail_url}')
  170. # url = "https://asia.pokemon-card.com/tw/card-search/detail/13958/"
  171. url = sql_id_detail_url[1]
  172. # response = requests.get(url, headers=headers, timeout=10, proxies=get_proxys(log))
  173. response = requests.get(url, headers=headers, timeout=10)
  174. # print(response.text)
  175. response.raise_for_status()
  176. selector = Selector(response.text)
  177. card_name = selector.xpath('//div[@class="wrapper"]/header/h1/text()').getall()
  178. card_name = ''.join(card_name) if card_name else None
  179. card_name = card_name.strip() if card_name else None
  180. card_no = selector.xpath('//div[@class="wrapper"]//span[@class="collectorNumber"]/text()').get()
  181. card_no = card_no.strip() if card_no else None
  182. data_dict = {
  183. "card_name": card_name,
  184. "card_no": card_no
  185. }
  186. # print(data_dict)
  187. sql_pool.update_one_or_dict(
  188. table="pokemon_card_record",
  189. data=data_dict,
  190. condition={"id": sql_id_detail_url[0]}
  191. )
  192. @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
  193. def fz_pokemon_main(log):
  194. """
  195. 主函数
  196. """
  197. log.info(f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务.............................................')
  198. # 配置 MySQL 连接池
  199. sql_pool = MySQLConnectionPool(log=log)
  200. if not sql_pool.check_pool_health():
  201. log.error("数据库连接池异常")
  202. raise RuntimeError("数据库连接池异常")
  203. try:
  204. # 获取分类列表
  205. log.debug(".......... 获取分类列表 ..........")
  206. try:
  207. get_category_list(logger, sql_pool)
  208. except Exception as e:
  209. log.error(f"{inspect.currentframe().f_code.co_name} Request get_category_list error: {e}")
  210. # 获取每个分类下的 产品列表
  211. sql_cate_list = sql_pool.select_all(
  212. f"SELECT expansion_link,expansion_series,expansion_title,expansion_release_time FROM pokemon_fanz_category WHERE crawler_language='{crawler_language}'")
  213. # sql_cate_list = [x[0] for x in sql_cate_list]
  214. for cate_tuple in sql_cate_list:
  215. try:
  216. # 获取商品列表 https://asia.pokemon-card.com/tw/card-search/list/?expansionCodes=M1L
  217. log.debug(f'Request get_data_list for cate: {cate_tuple}')
  218. get_data_list(logger, sql_pool, cate_tuple)
  219. except Exception as e:
  220. log.error(f"{inspect.currentframe().f_code.co_name} Request get_data_list error: {e}")
  221. # 获取商品详情
  222. log.debug(f"........... 获取商品详情 ..........")
  223. sql_ietm_id_list = sql_pool.select_all(
  224. f"SELECT id, detail_url FROM pokemon_card_record WHERE card_name IS NULL AND crawler_language='{crawler_language}'")
  225. for item_id in sql_ietm_id_list:
  226. try:
  227. get_details(log, item_id, sql_pool)
  228. except Exception as e:
  229. log.error(f"Request get_details error: {e}")
  230. except Exception as e:
  231. log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
  232. finally:
  233. log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
  234. if __name__ == '__main__':
  235. # get_list_single_page(logger, None, 1, 'https://asia.pokemon-card.com/tw/card-search/list/?expansionCodes=M1L')
  236. # get_details(logger, (None, None), None)
  237. # get_category_list(logger)
  238. fz_pokemon_main(logger)