# -*- coding: utf-8 -*- # Author : Charley # Python : 3.10.8 # Date : 2025/12/8 19:23 import random import time import inspect import requests import schedule import user_agent from loguru import logger from datetime import datetime from mysql_pool import MySQLConnectionPool from tenacity import retry, stop_after_attempt, wait_fixed """ 沃瑞斯外部 """ logger.remove() logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00", format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}", level="DEBUG", retention="7 day") def after_log(retry_state): """ retry 回调 :param retry_state: RetryCallState 对象 """ # 检查 args 是否存在且不为空 if retry_state.args and len(retry_state.args) > 0: log = retry_state.args[0] # 获取传入的 logger else: log = logger # 使用全局 logger if retry_state.outcome.failed: log.warning( f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times") else: log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded") @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log) def get_proxys(log): """ 获取代理 :return: 代理 """ tunnel = "x371.kdltps.com:15818" kdl_username = "t13753103189895" kdl_password = "o0yefv6z" try: proxies = { "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}, "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel} } return proxies except Exception as e: log.error(f"Error getting proxy: {e}") raise e @retry(stop=stop_after_attempt(5), wait=wait_fixed(5), after=after_log) def get_single_page(log, page, sql_pool): """ 获取单页商品 :param log: logger对象 :param page: 页码 :param sql_pool: MySQL连接池对象 :return: ietms 长度 """ log.debug(f"............... 开始获取第 {page} 页数据 ...............") headers = { "content-type": "application/json", "referer": "https://veriswap.com/", "user-agent": user_agent.generate_user_agent() } url = "https://veriswap-backend-689107296832.us-central1.run.app/cards/get/all" data = { "query": "", "page": page, "filterBy": "isHidden:=false", "sortBy": "updatedAt:desc" } response = requests.post(url, headers=headers, json=data, timeout=22) # print(response.json()) response.raise_for_status() resp_json = response.json().get("items", []) if not resp_json: log.debug("没有数据........") log.debug(response.json()) return 0 info_list = [] for item in resp_json: card_id = item.get("cardId") card_name = item.get("cardName") condition = item.get("condition") created_at = item.get("createdAt") # 1747107593 created_at = datetime.fromtimestamp(created_at).strftime('%Y-%m-%d %H:%M:%S') updated_at = item.get("updatedAt") updated_at = datetime.fromtimestamp(updated_at).strftime('%Y-%m-%d %H:%M:%S') era = item.get("era") for_sale = item.get("forSale") images = item.get("images", {}) img_front = images.get("front") img_back = images.get("back") is_vaulted = item.get("isVaulted") parallel = item.get("parallel") player_name = item.get("playerName") price = item.get("price") print_run = item.get("printRun") set_name = item.get("set") sport = item.get("sport") card_type = item.get("type") user_id = item.get("userId") year = item.get("year") data_dict = { "card_id": card_id, "card_name": card_name, "card_condition": condition, "created_at": created_at, "updated_at": updated_at, "era": era, "for_sale": for_sale, "img_front": img_front, "img_back": img_back, "is_vaulted": is_vaulted, "parallel": parallel, "player_name": player_name, "price": price, "print_run": print_run, "set_name": set_name, "sport": sport, "card_type": card_type, "user_id": user_id, "year": year } # log.debug(data_dict) info_list.append(data_dict) # 保存数据 if info_list: sql_pool.insert_many(table="veriswap_card_record", data_list=info_list, ignore=True) return len(resp_json) def get_all_page(log, sql_pool): """ 获取所有页码商品 翻页 :param log: logger对象 :param sql_pool: MySQL连接池对象 """ # page = 1 page = 16903 max_page = 20000 while page <= max_page: try: len_list = get_single_page(log, page, sql_pool) except Exception as e: log.error(f'Request get_single_page error: {e}') len_list = 0 if len_list < 20: log.debug(f'当前页码为: {page}, 数据长度为: {len_list}, 停止爬取 !!!') break page += 1 # time.sleep(random.uniform(1, 2)) @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log) def wap_main(log): """ 主函数 自动售货机 :param log: logger对象 """ log.info( f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................') # 配置 MySQL 连接池 sql_pool = MySQLConnectionPool(log=log) if not sql_pool.check_pool_health(): log.error("数据库连接池异常") raise RuntimeError("数据库连接池异常") try: try: log.debug('------------------- 获取所有页码商品 -------------------') get_all_page(log, sql_pool) except Exception as e: log.error(f'get_all_page error: {e}') except Exception as e: log.error(f'{inspect.currentframe().f_code.co_name} error: {e}') finally: log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............') def schedule_task(): """ 爬虫模块 定时任务 的启动文件 """ # 立即运行一次任务 # wap_main(log=logger) # 设置定时任务 schedule.every().day.at("00:01").do(wap_main, log=logger) while True: schedule.run_pending() time.sleep(1) if __name__ == '__main__': wap_main(logger) # get_all_page(logger, None)