| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175 |
- # -*- coding: utf-8 -*-
- # Author : Charley
- # Python : 3.10.8
- # Date : 2025/10/27 10:20
- import inspect
- import requests
- import settings
- import user_agent
- from loguru import logger
- from parsel import Selector
- from mysql_pool import MySQLConnectionPool
- from tenacity import retry, stop_after_attempt, wait_fixed
- logger.remove()
- logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
- format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
- level="DEBUG", retention="7 day")
- headers = {
- "accept": "application/json, text/plain, */*",
- "user-agent": user_agent.generate_user_agent()
- }
- crawler_language = '英文'
- def after_log(retry_state):
- """
- retry 回调
- :param retry_state: RetryCallState 对象
- """
- # 检查 args 是否存在且不为空
- if retry_state.args and len(retry_state.args) > 0:
- log = retry_state.args[0] # 获取传入的 logger
- else:
- log = logger # 使用全局 logger
- if retry_state.outcome.failed:
- log.warning(
- f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
- else:
- log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
- @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
- def get_proxys(log):
- """
- 获取代理
- :return: 代理
- """
- tunnel = "x371.kdltps.com:15818"
- kdl_username = "t13753103189895"
- kdl_password = "o0yefv6z"
- try:
- proxies = {
- "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
- "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
- }
- return proxies
- except Exception as e:
- log.error(f"Error getting proxy: {e}")
- raise e
- @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
- def get_single_page(log, card_type_id, card_type_name, sql_pool):
- """
- 获取单页数据
- :param log: 日志对象
- :param card_type_id: 卡牌id
- :param card_type_name: 卡牌类型
- :param sql_pool: MySQL对象
- :return: 响应JSON数据
- """
- log.debug(f"Getting cachelist -> card_type_id:{card_type_id}, card_type_name:{card_type_name}")
- url = "https://en.onepiece-cardgame.com/cardlist/"
- params = {
- # "series": "569302"
- "series": card_type_id
- }
- response = requests.get(url, headers=headers, params=params, timeout=10)
- # print(response)
- response.raise_for_status()
- selector = Selector(response.text)
- tag_a_list = selector.xpath('//div[@class="resultCol"]/a')
- info_list = []
- for tag_a in tag_a_list:
- card_id = tag_a.xpath('./@data-src').get()
- card_id = card_id.replace('#', '')
- # img = tag_a.xpath('./img/@src').get()
- # if not img:
- img = tag_a.xpath('./img/@data-src').get()
- card_img = 'https://en.onepiece-cardgame.com' + img.lstrip('..')
- # 查找 id 为 card_id的dl标签
- tag_dl = selector.xpath(f'//div[@class="resultCol"]/dl[@id="{card_id}"]')
- tag_backcol = tag_dl.xpath('./dd/div[@class="backCol"]')
- cost = tag_backcol.xpath('./div/div[@class="cost"]/text()').get()
- attribute = tag_backcol.xpath('./div/div[@class="attribute"]/text()').get().strip()
- if not attribute:
- attribute = '-'
- power = tag_backcol.xpath('./div/div[@class="power"]/text()').get()
- counter = tag_backcol.xpath('./div/div[@class="counter"]/text()').get()
- color = tag_backcol.xpath('./div/div[@class="color"]/text()').get()
- block_icon = tag_backcol.xpath('./div/div[@class="block"]/text()').get()
- card_features = tag_backcol.xpath('.//div[@class="feature"]/text()').get() # type
- card_text_desc = tag_backcol.xpath('./div[@class="text"]/text()').get() # Effect
- getInfo = tag_backcol.xpath('./div[@class="getInfo"]/text()').get() # Card Set(s)
- card_name = tag_dl.xpath('./dt/div[@class="cardName"]/text()').get()
- card_number = tag_dl.xpath('./dt/div[@class="infoCol"]/span[1]/text()').get()
- card_rarity = tag_dl.xpath('./dt/div[@class="infoCol"]/span[2]/text()').get()
- detail_card_type = tag_dl.xpath('./dt/div[@class="infoCol"]/span[3]/text()').get()
- data_dict = {
- "card_type_id": card_type_id,
- "card_type_name": card_type_name,
- "card_id": card_id,
- "card_name": card_name,
- "card_number": card_number,
- "card_rarity": card_rarity,
- "detail_card_type": detail_card_type,
- "card_img": card_img,
- "card_life": cost,
- "card_attribute": attribute,
- "card_power": power,
- "card_attack": counter,
- "card_color": color,
- "subscript": block_icon,
- "card_features": card_features,
- "card_text_desc": card_text_desc,
- "card_offer_type": getInfo,
- "crawler_language": crawler_language
- }
- # print(data_dict)
- info_list.append(data_dict)
- # 保存数据
- sql_pool.insert_many(table="one_piece_record", data_list=info_list)
- @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
- def op_main(log):
- """
- 主函数
- :param log: logger对象
- """
- log.info(
- f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
- # 配置 MySQL 连接池
- sql_pool = MySQLConnectionPool(log=log)
- if not sql_pool.check_pool_health():
- log.error("数据库连接池异常")
- raise RuntimeError("数据库连接池异常")
- try:
- for card_type_id, card_type_name in settings.EN_ONE_PIECE_CARDS.items():
- try:
- get_single_page(log, card_type_id, card_type_name, sql_pool)
- except Exception as e:
- log.error(f"Error getting cachelist: {e}")
- except Exception as e:
- log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
- finally:
- log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
- if __name__ == '__main__':
- # get_single_page(logger, 1, 569302)
- # get_cachelist(logger)
- op_main(logger)
|