# -*- coding: utf-8 -*- # Author : Charley # Python : 3.10.8 # Date : 2025/10/31 13:48 import inspect import random import time import requests import user_agent from loguru import logger from parsel import Selector from mysql_pool import MySQLConnectionPool from tenacity import retry, stop_after_attempt, wait_fixed """ https://gatherer.wizards.com/sets """ # logger.remove() # logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00", # format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}", # level="DEBUG", retention="7 day") headers = { "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7", # "referer": "https://gatherer.wizards.com/M20/en-us/1/aerial-assault", "user-agent": user_agent.generate_user_agent() } def after_log(retry_state): """ retry 回调 :param retry_state: RetryCallState 对象 """ # 检查 args 是否存在且不为空 if retry_state.args and len(retry_state.args) > 0: log = retry_state.args[0] # 获取传入的 logger else: log = logger # 使用全局 logger if retry_state.outcome.failed: log.warning( f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times") else: log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded") @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log) def get_proxys(log): """ 获取代理 :return: 代理 """ tunnel = "x371.kdltps.com:15818" kdl_username = "t13753103189895" kdl_password = "o0yefv6z" try: proxies = { "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}, "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel} } return proxies except Exception as e: log.error(f"Error getting proxy: {e}") raise e @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log) def get_sets(log, sql_pool): """ 获取所有卡组信息 :param log: logger对象 :param sql_pool: MySQL对象 :return: None """ log.debug('开始查询卡组信息 ->->->->->->->->->->->->->->->->->->->') for page in range(1, 20): log.debug(f" ------------------------- 当前查询的页码为:{page} -------------------------") url = f"https://gatherer.wizards.com/sets?page={page}" response = requests.get(url, headers=headers) # print(response.text) response.raise_for_status() selector = Selector(response.text) tag_tr_list = selector.xpath('//table[@data-testid="setResultsTable"]/tbody/tr') info_list = [] for tag_tr in tag_tr_list: set_name = tag_tr.xpath('./td[2]/a/text()').get() set_url = tag_tr.xpath('./td[2]/a/@href').get() if set_url: set_url = "https://gatherer.wizards.com" + set_url set_code = tag_tr.xpath('./td[3]/text()').get() card_count = tag_tr.xpath('./td[4]/text()').get() release_date = tag_tr.xpath('./td[5]/text()').get() data_dict = { "set_name": set_name, "set_url": set_url, "set_code": set_code, "card_count": card_count, "release_date": release_date } print(data_dict) info_list.append(data_dict) # 保存数据 # sql_pool.insert_many(table="magic_sets_record", data_list=info_list, ignore=True) if len(tag_tr_list) < 30: log.debug(f"当前页为 {page}, 当页卡组数量为 {len(tag_tr_list)}, 没有更多数据了.........") break @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log) def get_cards_list(log, sql_pool, card_url): """ card 列表 翻页 每页72条 https://gatherer.wizards.com/sets/SPM?page=2 :param log: :param sql_pool: :param card_url: """ log.debug('开始查询卡组信息 ->->->->->->->->->->->->->->->->->->->') for page in range(1, 50): card_list_url = f"{card_url}?page={page}" log.debug(f" ------------------------- 当前查询的页码为:{page} -------------------------") log.debug(f" ------------------------- 当前查询的URL为:{card_list_url} -------------------------") response = requests.get(card_list_url, headers=headers, timeout=5) # response = requests.get(card_list_url, headers=headers, proxies=get_proxys(log)) response.raise_for_status() selector = Selector(response.text) tag_div_list = selector.xpath( '//div[@data-testid="resultsListWrapper"]/div') # data-testid="resultsListWrapper" info_list = [] for tag_div in tag_div_list: # card_name = tag_div.xpath('./div[@data-testid="cardName"]/a/text()').get() card_detail_url = tag_div.xpath('./a/@href').get() if card_detail_url: card_detail_url = "https://gatherer.wizards.com" + card_detail_url info_list.append({ "card_url": card_url, "card_detail_url": card_detail_url }) # 保存数据 print(info_list) # sql_pool.insert_many(table="magic_cards_record", data_list=info_list, ignore=True) if len(tag_div_list) < 72: log.debug(f"当前页为 {page}, 当页卡牌数量为 {len(tag_div_list)}, 没有更多数据了.........") break def get_card_detail(log, sql_pool, card_detail_url_tuple): """ 卡牌详情 :param log: :param sql_pool: :param card_detail_url_tuple: """ log.debug('开始查询卡牌 <详情> 信息 ->->->->->->->->->->->->->->->->->->->') sql_id = card_detail_url_tuple[0] card_detail_url = card_detail_url_tuple[1] response = requests.get(card_detail_url, headers=headers) response.raise_for_status() selector = Selector(response.text) tag_detail_section = selector.xpath('//section[@data-testid="cardDetailsWrapper"]') card_name = tag_detail_section.xpath('.//h1[@data-testid="cardDetailsCardName"]/text()').get() card_type = tag_detail_section.xpath('.//h1[@data-testid="cardDetailsTypeLine"]/text()').get() card_rarity = tag_detail_section.xpath('.//h1[@data-testid="cardDetailsRarity"]/text()').get() card_artist = tag_detail_section.xpath('./article[3]/div[1]/div[1]/section//h1/text()').get() # 作者 card_p_t = tag_detail_section.xpath('./article[3]/div[1]/div[2]/section/section/div//text()').getall() card_p_t = "".join(card_p_t).strip() if card_p_t else None card_set = tag_detail_section.xpath('./article[3]/div[2]/div[1]/section/a/h1//text()').getall() # 卡组 card_set = "".join(card_set).strip() if card_set else None card_number = tag_detail_section.xpath('.//h1[@data-testid="cardDetailsCardNumber"]/text()').get() # 卡组 card_language = tag_detail_section.xpath('.//h1[@data-testid="cardDetailsLanguage"]/text()').get() detail_data_dict = { "card_name": card_name, "card_type": card_type, "card_rarity": card_rarity, "card_artist": card_artist, "card_p_t": card_p_t, "card_set": card_set, "card_number": card_number, "card_language": card_language } print(detail_data_dict) # 更新数据 # sql_pool.update_one_or_dict(table="magic_cards_record", data=detail_data_dict, condition={"id": sql_id}) # 获取其他语言sets """ 先获取都有哪些语种 再从 card_detail_url 中替换 """ @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log) def magic_main(log): """ 主函数 :param log: logger对象 """ log.info( f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................') # 配置 MySQL 连接池 sql_pool = MySQLConnectionPool(log=log) if not sql_pool.check_pool_health(): log.error("数据库连接池异常") raise RuntimeError("数据库连接池异常") try: # 获取所有卡组信息 try: get_sets(log, sql_pool) except Exception as e: log.error(f"Error get_sets: {e}") # 获取所有卡组中的卡牌信息 try: get_cards_list(log, sql_pool, '') except Exception as e: log.error(f"Error get_cards: {e}") except Exception as e: log.error(f'{inspect.currentframe().f_code.co_name} error: {e}') finally: log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............') if __name__ == '__main__': # magic_main(logger) # get_sets(logger,None) card_url_ = "https://gatherer.wizards.com/sets/SPM" # get_cards_list(logger, None, card_url_) get_card_detail(logger, None, "https://gatherer.wizards.com/SPE/en-us/1/amateur-hero")