psa_pop_spider.py 9.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235
  1. # -*- coding: utf-8 -*-
  2. # Author : Charley
  3. # Python : 3.10.8
  4. # Date : 2025/9/15 14:53
  5. import inspect
  6. import random
  7. import time
  8. import schedule
  9. import user_agent
  10. from curl_cffi import requests
  11. from loguru import logger
  12. from parsel import Selector
  13. from mysql_pool import MySQLConnectionPool
  14. from tenacity import retry, stop_after_attempt, wait_fixed
  15. logger.remove()
  16. logger.add("logs/pop_{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
  17. format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
  18. level="DEBUG", retention="3 day")
  19. client_identifier_list = [
  20. "edge99", "edge101", "chrome99", "chrome100", "chrome101", "chrome104", "chrome107",
  21. "chrome110", "chrome116", "chrome119", "chrome120", "chrome123", "chrome124",
  22. "chrome99_android", "safari15_3", "safari15_5", "safari17_0", "safari17_2_ios"
  23. ]
  24. BASE_URL = 'https://www.psacard.com'
  25. headers = {
  26. "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
  27. 'User-Agent': user_agent.generate_user_agent()
  28. }
  29. category_link_list = {'Baseball Cards': 'https://www.psacard.com/pop/baseball-cards/20003',
  30. 'Baseball Coins': 'https://www.psacard.com/pop/baseball-coins/82797',
  31. 'Basketball Cards': 'https://www.psacard.com/pop/basketball-cards/20019',
  32. 'Basketball Coins': 'https://www.psacard.com/pop/basketball-coins/83007',
  33. 'Boxing / Wrestling Cards / MMA': 'https://www.psacard.com/pop/boxing-wrestling-cards-mma/20021',
  34. 'Football Cards': 'https://www.psacard.com/pop/football-cards/20014',
  35. 'Football Coins': 'https://www.psacard.com/pop/football-coins/83011',
  36. 'Golf Cards': 'https://www.psacard.com/pop/golf-cards/20023',
  37. 'Hockey Cards': 'https://www.psacard.com/pop/hockey-cards/20020',
  38. 'Hockey Coins': 'https://www.psacard.com/pop/hockey-coins/83012',
  39. 'Minor League Cards': 'https://www.psacard.com/pop/minor-league-cards/20031',
  40. 'Misc Cards': 'https://www.psacard.com/pop/misc-cards/20033',
  41. 'Multi-Sport Cards': 'https://www.psacard.com/pop/multi-sport-cards/20006',
  42. 'Multi-Sport Coins': 'https://www.psacard.com/pop/multi-sport-coins/102825',
  43. 'Non-Sport Cards': 'https://www.psacard.com/pop/non-sport-cards/20032',
  44. 'Non-Sport Coins': 'https://www.psacard.com/pop/non-sport-coins/82981',
  45. 'Packs': 'https://www.psacard.com/pop/packs/20017',
  46. 'Pins': 'https://www.psacard.com/pop/pins/20013',
  47. 'Soccer Cards': 'https://www.psacard.com/pop/soccer-cards/20004',
  48. 'TCG Cards': 'https://www.psacard.com/pop/tcg-cards/156940',
  49. 'Tickets': 'https://www.psacard.com/pop/tickets/20022',
  50. 'Game-Used Bats': 'https://www.psacard.com/pop/bats'}
  51. def after_log(retry_state):
  52. """
  53. retry 回调
  54. :param retry_state: RetryCallState 对象
  55. """
  56. # 检查 args 是否存在且不为空
  57. if retry_state.args and len(retry_state.args) > 0:
  58. log = retry_state.args[0] # 获取传入的 logger
  59. else:
  60. log = logger # 使用全局 logger
  61. if retry_state.outcome.failed:
  62. log.warning(
  63. f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
  64. else:
  65. log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
  66. @retry(stop=stop_after_attempt(5), wait=wait_fixed(2), after=after_log)
  67. def get_proxys(log):
  68. # 已购买账户 北美
  69. # http_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36927"
  70. # https_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36927"
  71. http_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36931"
  72. https_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36931"
  73. try:
  74. proxySettings = {
  75. "http": http_proxy,
  76. "https": https_proxy,
  77. }
  78. return proxySettings
  79. except Exception as e:
  80. log.error(f"Error getting proxy: {e}")
  81. raise e
  82. @retry(stop=stop_after_attempt(10), wait=wait_fixed(3), after=after_log)
  83. def get_detail_data(log, category, link, sql_pool):
  84. """
  85. 获取详情数据
  86. :param log:
  87. :param category:
  88. :param link:
  89. :param sql_pool:
  90. """
  91. try:
  92. with requests.Session() as session:
  93. resp = session.get(link, impersonate=random.choice(client_identifier_list), headers=headers,
  94. proxies=get_proxys(log), timeout=22, allow_redirects=False)
  95. # log.debug(resp.text)
  96. if 'Just a moment' in resp.text:
  97. log.debug('Just a moment , retrying.....')
  98. raise Exception('Just a moment')
  99. resp_selector = Selector(text=resp.text)
  100. tag_td_list = resp_selector.xpath(
  101. '//*[@id="tableCategory"]/thead/tr/td[@class="text-right"]/text() | //*[@id="tableBats"]/thead/tr/td[@class="text-right"]/text()').getall()
  102. if tag_td_list:
  103. # number_of_sets = tag_td_list[0]
  104. # total_items = tag_td_list[1]
  105. total_graded = tag_td_list[-1]
  106. if total_graded:
  107. total_graded = total_graded.replace(',', '')
  108. log.debug(f"Total Graded: {total_graded}")
  109. data_dict = {
  110. "category": category,
  111. "category_link": link,
  112. "total_graded": total_graded,
  113. "crawl_date": time.strftime("%Y-%m-%d", time.localtime())
  114. }
  115. # print(data_dict)
  116. try:
  117. sql_pool.insert_one_or_dict(table="psa_pop_record", data=data_dict, ignore=True)
  118. except Exception as e1:
  119. log.error(f"Error inserting data: {e1}")
  120. except Exception as e:
  121. log.error(f"Error getting detail data: {e}")
  122. raise e
  123. @retry(stop=stop_after_attempt(10), wait=wait_fixed(3), after=after_log)
  124. def get_pop_data(log, sql_pool):
  125. """
  126. 获取 pop 列表页数据
  127. :param log:
  128. :param sql_pool:
  129. """
  130. url = "https://www.psacard.com/pop"
  131. try:
  132. with requests.Session() as session:
  133. resp = session.get(url, impersonate=random.choice(client_identifier_list), headers=headers,
  134. proxies=get_proxys(log), timeout=22, allow_redirects=False)
  135. # log.debug(resp.text)
  136. if 'Just a moment' in resp.text:
  137. log.debug('Just a moment , retrying.....')
  138. raise Exception('Just a moment')
  139. resp_selector = Selector(text=resp.text)
  140. tag_a_list = resp_selector.xpath('//*[@id="mainContent"]/div[2]/div/a')
  141. for tag_a in tag_a_list:
  142. category = tag_a.xpath('./div/div/text()').get()
  143. category_link = tag_a.xpath('./@href').get()
  144. if 'https://' not in category_link:
  145. category_link = BASE_URL + category_link
  146. log.debug(f"Category: {category}, Link: {category_link}")
  147. try:
  148. get_detail_data(log, category, category_link, sql_pool)
  149. except Exception as e1:
  150. log.error(f"Error getting detail data: {e1}")
  151. except Exception as e:
  152. log.error(f"Error getting pop data: {e}")
  153. raise e
  154. @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
  155. def pop_main(log):
  156. """
  157. 主函数
  158. """
  159. log.info(
  160. f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
  161. # 配置 MySQL 连接池
  162. sql_pool = MySQLConnectionPool(log=log)
  163. if not sql_pool.check_pool_health():
  164. log.error("数据库连接池异常")
  165. raise RuntimeError("数据库连接池异常")
  166. try:
  167. log.debug(".......... 开始获取数据报告 ..........")
  168. # get_pop_data(log, sql_pool)
  169. for _ in range(2):
  170. for category, category_link in category_link_list.items():
  171. log.debug(f"{category}第一次查询, 开始获取数据.......")
  172. try:
  173. get_detail_data(log, category, category_link, sql_pool)
  174. except Exception as e1:
  175. log.error(f"Error getting detail data: {e1}")
  176. time.sleep(5)
  177. except Exception as e:
  178. log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
  179. finally:
  180. log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
  181. def schedule_task():
  182. """
  183. 两个爬虫模块的启动文件
  184. bidding_main
  185. weika_change_card_by_id_spider
  186. change_card_main
  187. """
  188. # 立即运行一次任务
  189. pop_main(log=logger)
  190. # 设置定时任务
  191. schedule.every().day.at("08:00").do(pop_main, log=logger)
  192. while True:
  193. schedule.run_pending()
  194. time.sleep(1)
  195. if __name__ == '__main__':
  196. # get_pop_data(logger)
  197. schedule_task()
  198. # get_detail_data(logger, '','https://www.psacard.com/pop/bats',None)