瀏覽代碼

feat(mysql): 新增MySQL连接池模块及爬虫详情抓取功能

- 新增mysql_pool.py,实现MySQL连接池及多种插入、更新操作支持
- 支持单条和批量插入,自动降级为单条插入处理异常
- 实现单条和批量更新,支持条件字典和自定义SQL语句
- 添加连接池健康检查机制,保障数据库连接稳定
- 添加psa_pop_detail_spider.py,基于Chromium和requests爬取PSA牌卡详情数据
- 实现代理获取、列表分页抓取、异常重试及日志记录功能
- 通过爬取集合、年份及球员数据,实现数据结构化存储入库
- 配置application.yml,支持MySQL及Fluent日志动态配置
charley 1 月之前
父節點
當前提交
a6c160d447

+ 74 - 0
psa_spider/YamlLoader.py

@@ -0,0 +1,74 @@
+# -*- coding: utf-8 -*-
+#
+import os, re
+import yaml
+
+regex = re.compile(r'^\$\{(?P<ENV>[A-Z_\-]+\:)?(?P<VAL>[\w\.]+)\}$')
+
+class YamlConfig:
+    def __init__(self, config):
+        self.config = config
+
+    def get(self, key:str):
+        return YamlConfig(self.config.get(key))
+    
+    def getValueAsString(self, key: str):
+        try:
+            match = regex.match(self.config[key])
+            group = match.groupdict()
+            if group['ENV'] != None:
+                env = group['ENV'][:-1]
+                return os.getenv(env, group['VAL'])
+            return None
+        except:
+            return self.config[key]
+    
+    def getValueAsInt(self, key: str):
+        try:
+            match = regex.match(self.config[key])
+            group = match.groupdict()
+            if group['ENV'] != None:
+                env = group['ENV'][:-1]
+                return int(os.getenv(env, group['VAL']))
+            return 0
+        except:
+            return int(self.config[key])
+        
+    def getValueAsBool(self, key: str, env: str = None):
+        try:
+            match = regex.match(self.config[key])
+            group = match.groupdict()
+            if group['ENV'] != None:
+                env = group['ENV'][:-1]
+                return bool(os.getenv(env, group['VAL']))
+            return False
+        except:
+            return bool(self.config[key])
+    
+def readYaml(path:str = 'application.yml', profile:str = None) -> YamlConfig:
+    if os.path.exists(path):
+        with open(path) as fd:
+            conf = yaml.load(fd, Loader=yaml.FullLoader)
+
+    if profile != None:
+        result = path.split('.')
+        profiledYaml = f'{result[0]}-{profile}.{result[1]}'
+        if os.path.exists(profiledYaml):
+            with open(profiledYaml) as fd:
+                conf.update(yaml.load(fd, Loader=yaml.FullLoader))
+
+    return YamlConfig(conf)
+
+# res = readYaml()
+# mysqlConf = res.get('mysql')
+# print(mysqlConf)
+
+# print(res.getValueAsString("host"))
+# mysqlYaml = mysqlConf.getValueAsString("host")
+# print(mysqlYaml)
+# host = mysqlYaml.get("host").split(':')[-1][:-1]
+# port = mysqlYaml.get("port").split(':')[-1][:-1]
+# username = mysqlYaml.get("username").split(':')[-1][:-1]
+# password = mysqlYaml.get("password").split(':')[-1][:-1]
+# mysql_db = mysqlYaml.get("db").split(':')[-1][:-1]
+# print(host,port,username,password)

+ 11 - 0
psa_spider/application.yml

@@ -0,0 +1,11 @@
+mysql:
+  host: ${MYSQL_HOST:100.64.0.21}
+  port: ${MYSQL_PROT:3306}
+  username: ${MYSQL_USERNAME:crawler}
+  password: ${MYSQL_PASSWORD:Pass2022}
+  db: ${MYSQL_DATABASE:crawler}
+
+fluent:
+  host: ${FIUENT_HOST:192.168.66.152}
+  port: ${FIUENT_PORT:24225}
+  appname: ${FIUENT_APPNAME:psa_spider.log}

+ 574 - 0
psa_spider/mysql_pool.py

@@ -0,0 +1,574 @@
+# -*- coding: utf-8 -*-
+# Author : Charley
+# Python : 3.10.8
+# Date   : 2025/3/25 14:14
+import re
+import pymysql
+import YamlLoader
+from loguru import logger
+from dbutils.pooled_db import PooledDB
+
+# 获取yaml配置
+yaml = YamlLoader.readYaml()
+mysqlYaml = yaml.get("mysql")
+sql_host = mysqlYaml.getValueAsString("host")
+sql_port = mysqlYaml.getValueAsInt("port")
+sql_user = mysqlYaml.getValueAsString("username")
+sql_password = mysqlYaml.getValueAsString("password")
+sql_db = mysqlYaml.getValueAsString("db")
+
+
+class MySQLConnectionPool:
+    """
+    MySQL连接池
+    """
+
+    def __init__(self, mincached=4, maxcached=5, maxconnections=10, log=None):
+        """
+        初始化连接池
+        :param mincached: 初始化时,链接池中至少创建的链接,0表示不创建
+        :param maxcached: 池中空闲连接的最大数目(0 或 None 表示池大小不受限制)
+        :param maxconnections: 允许的最大连接数(0 或 None 表示任意数量的连接)
+        :param log: 自定义日志记录器
+        """
+        # 使用 loguru 的 logger,如果传入了其他 logger,则使用传入的 logger
+        self.log = log or logger
+        self.pool = PooledDB(
+            creator=pymysql,
+            mincached=mincached,
+            maxcached=maxcached,
+            maxconnections=maxconnections,
+            blocking=True,  # 连接池中如果没有可用连接后,是否阻塞等待。True,等待;False,不等待然后报错
+            host=sql_host,
+            port=sql_port,
+            user=sql_user,
+            password=sql_password,
+            database=sql_db,
+            ping=0  # 每次连接使用时自动检查有效性(0=不检查,1=执行query前检查,2=每次执行前检查)
+        )
+
+    def _execute(self, query, args=None, commit=False):
+        """
+        执行SQL
+        :param query: SQL语句
+        :param args: SQL参数
+        :param commit: 是否提交事务
+        :return: 查询结果
+        """
+        try:
+            with self.pool.connection() as conn:
+                with conn.cursor() as cursor:
+                    cursor.execute(query, args)
+                    if commit:
+                        conn.commit()
+                    self.log.debug(f"sql _execute, Query: {query}, Rows: {cursor.rowcount}")
+                    return cursor
+        except Exception as e:
+            if commit:
+                conn.rollback()
+            self.log.exception(f"Error executing query: {e}, Query: {query}, Args: {args}")
+            raise e
+
+    def select_one(self, query, args=None):
+        """
+        执行查询,返回单个结果
+        :param query: 查询语句
+        :param args: 查询参数
+        :return: 查询结果
+        """
+        cursor = self._execute(query, args)
+        return cursor.fetchone()
+
+    def select_all(self, query, args=None):
+        """
+        执行查询,返回所有结果
+        :param query: 查询语句
+        :param args: 查询参数
+        :return: 查询结果
+        """
+        cursor = self._execute(query, args)
+        return cursor.fetchall()
+
+    def insert_one(self, query, args):
+        """
+        执行单条插入语句
+        :param query: 插入语句
+        :param args: 插入参数
+        """
+        self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data insert_one 入库中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
+        cursor = self._execute(query, args, commit=True)
+        return cursor.lastrowid  # 返回插入的ID
+
+    def insert_all(self, query, args_list):
+        """
+        执行批量插入语句,如果失败则逐条插入
+        :param query: 插入语句
+        :param args_list: 插入参数列表
+        """
+        conn = None
+        cursor = None
+        try:
+            conn = self.pool.connection()
+            cursor = conn.cursor()
+            cursor.executemany(query, args_list)
+            conn.commit()
+            self.log.debug(f"sql insert_all, SQL: {query}, Rows: {len(args_list)}")
+            self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data insert_all 入库中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
+        except Exception as e:
+            conn.rollback()
+            self.log.error(f"Batch insertion failed after 5 attempts. Trying single inserts. Error: {e}")
+            # 如果批量插入失败,则逐条插入
+            rowcount = 0
+            for args in args_list:
+                self.insert_one(query, args)
+                rowcount += 1
+            self.log.debug(f"Batch insertion failed. Inserted {rowcount} rows individually.")
+        finally:
+            if cursor:
+                cursor.close()
+            if conn:
+                conn.close()
+
+    def insert_one_or_dict(self, table=None, data=None, query=None, args=None, commit=True, ignore=False):
+        """
+        单条插入(支持字典或原始SQL)
+        :param table: 表名(字典插入时必需)
+        :param data: 字典数据 {列名: 值}
+        :param query: 直接SQL语句(与data二选一)
+        :param args: SQL参数(query使用时必需)
+        :param commit: 是否自动提交
+        :param ignore: 是否使用ignore
+        :return: 最后插入ID
+        """
+        if data is not None:
+            if not isinstance(data, dict):
+                raise ValueError("Data must be a dictionary")
+
+            keys = ', '.join([self._safe_identifier(k) for k in data.keys()])
+            values = ', '.join(['%s'] * len(data))
+
+            # query = f"INSERT INTO {self._safe_identifier(table)} ({keys}) VALUES ({values})"
+            # 构建 INSERT IGNORE 语句
+            ignore_clause = "IGNORE" if ignore else ""
+            # insert_sql = f"INSERT {ignore_clause} INTO {table} ({columns}) VALUES ({placeholders})"
+            query = f"INSERT {ignore_clause} INTO {self._safe_identifier(table)} ({keys}) VALUES ({values})"
+            args = tuple(data.values())
+        elif query is None:
+            raise ValueError("Either data or query must be provided")
+
+        # cursor = self._execute(query, args, commit)
+        # self.log.info(f"sql insert_one_or_dict, Table: {table}, Rows: {cursor.rowcount}")
+        # self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data insert_one_or_dict 入库中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
+        # return cursor.lastrowid
+
+        try:
+            cursor = self._execute(query, args, commit)
+            self.log.info(f"sql insert_one_or_dict, Table: {table}, Rows: {cursor.rowcount}")
+            self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data insert_one_or_dict 入库中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
+            return cursor.lastrowid
+        except pymysql.err.IntegrityError as e:
+            if "Duplicate entry" in str(e):
+                self.log.warning(f"插入失败:重复条目,已跳过。错误详情: {e}")
+                # print("插入失败:重复条目", e)
+                return -1  # 返回 -1 表示重复条目被跳过
+            else:
+                self.log.exception(f"数据库完整性错误: {e}")
+                # print("插入失败:完整性错误", e)
+                raise
+        except Exception as e:
+            # self.log.error(f"未知错误: {str(e)}", exc_info=True)
+            self.log.exception(f"未知错误: {e}")  # 记录完整异常信息
+            # print("插入失败:未知错误", e)
+            raise
+
+    def insert_many(self, table=None, data_list=None, query=None, args_list=None, batch_size=1000, commit=True, ignore=False):
+        """
+        批量插入(支持字典列表或原始SQL)
+        :param table: 表名(字典插入时必需)
+        :param data_list: 字典列表 [{列名: 值}]
+        :param query: 直接SQL语句(与data_list二选一)
+        :param args_list: SQL参数列表(query使用时必需)
+        :param batch_size: 分批大小
+        :param commit: 是否自动提交
+        :param ignore: 是否使用ignore
+        :return: 影响行数
+        """
+        if data_list is not None:
+            if not data_list or not isinstance(data_list[0], dict):
+                raise ValueError("Data_list must be a non-empty list of dictionaries")
+
+            keys = ', '.join([self._safe_identifier(k) for k in data_list[0].keys()])
+            values = ', '.join(['%s'] * len(data_list[0]))
+
+            # 构建 INSERT IGNORE 语句
+            ignore_clause = "IGNORE" if ignore else ""
+            # insert_sql = f"INSERT {ignore_clause} INTO {table} ({columns}) VALUES ({placeholders})"
+            query = f"INSERT {ignore_clause} INTO {self._safe_identifier(table)} ({keys}) VALUES ({values})"
+            args_list = [tuple(d.values()) for d in data_list]
+        elif query is None:
+            raise ValueError("Either data_list or query must be provided")
+
+        total = 0
+        for i in range(0, len(args_list), batch_size):
+            batch = args_list[i:i + batch_size]
+            try:
+                with self.pool.connection() as conn:
+                    with conn.cursor() as cursor:
+                        cursor.executemany(query, batch)
+                        if commit:
+                            conn.commit()
+                        total += cursor.rowcount
+            except pymysql.Error as e:
+                if "Duplicate entry" in str(e):
+                    # self.log.warning(f"检测到重复条目,开始逐条插入。错误详情: {e}")
+                    raise  e
+                    # rowcount = 0
+                    # for args in batch:
+                    #     try:
+                    #         self.insert_one_or_dict(table=table, data=dict(zip(data_list[0].keys(), args)),
+                    #                                 commit=commit)
+                    #         rowcount += 1
+                    #     except pymysql.err.IntegrityError as e2:
+                    #         if "Duplicate entry" in str(e2):
+                    #             self.log.warning(f"跳过重复条目: {args}")
+                    #         else:
+                    #             self.log.error(f"插入失败: {e2}, 参数: {args}")
+                    # total += rowcount
+                else:
+                    self.log.exception(f"数据库错误: {e}")
+                    if commit:
+                        conn.rollback()
+                    raise e
+                # 重新抛出异常,供外部捕获
+                # 降级为单条插入
+                # for args in batch:
+                #     try:
+                #         self.insert_one_or_dict(table=None, query=query, args=args, commit=commit)
+                #         total += 1
+                #     except Exception as e2:
+                #         self.log.error(f"Single insert failed: {e2}")
+                        # continue
+        self.log.info(f"sql insert_many, Table: {table}, Total Rows: {total}")
+        return total
+
+    def insert_many_two(self, table=None, data_list=None, query=None, args_list=None, batch_size=1000, commit=True):
+        """
+        批量插入(支持字典列表或原始SQL)
+        :param table: 表名(字典插入时必需)
+        :param data_list: 字典列表 [{列名: 值}]
+        :param query: 直接SQL语句(与data_list二选一)
+        :param args_list: SQL参数列表(query使用时必需)
+        :param batch_size: 分批大小
+        :param commit: 是否自动提交
+        :return: 影响行数
+        """
+        if data_list is not None:
+            if not data_list or not isinstance(data_list[0], dict):
+                raise ValueError("Data_list must be a non-empty list of dictionaries")
+            keys = ', '.join([self._safe_identifier(k) for k in data_list[0].keys()])
+            values = ', '.join(['%s'] * len(data_list[0]))
+            query = f"INSERT INTO {self._safe_identifier(table)} ({keys}) VALUES ({values})"
+            args_list = [tuple(d.values()) for d in data_list]
+        elif query is None:
+            raise ValueError("Either data_list or query must be provided")
+
+        total = 0
+        for i in range(0, len(args_list), batch_size):
+            batch = args_list[i:i + batch_size]
+            try:
+                with self.pool.connection() as conn:
+                    with conn.cursor() as cursor:
+                        # 添加调试日志:输出 SQL 和参数示例
+                        # self.log.debug(f"Batch insert SQL: {query}")
+                        # self.log.debug(f"Sample args: {batch[0] if batch else 'None'}")
+                        cursor.executemany(query, batch)
+                        if commit:
+                            conn.commit()
+                        total += cursor.rowcount
+                        # self.log.debug(f"Batch insert succeeded. Rows: {cursor.rowcount}")
+            except Exception as e:  # 明确捕获数据库异常
+                self.log.exception(f"Batch insert failed: {e}")  # 使用 exception 记录堆栈
+                self.log.error(f"Failed SQL: {query}, Args count: {len(batch)}")
+                if commit:
+                    conn.rollback()
+                # 降级为单条插入,并记录每个错误
+                rowcount = 0
+                for args in batch:
+                    try:
+                        self.insert_one(query, args)
+                        rowcount += 1
+                    except Exception as e2:
+                        self.log.error(f"Single insert failed: {e2}, Args: {args}")
+                total += rowcount
+                self.log.debug(f"Inserted {rowcount}/{len(batch)} rows individually.")
+        self.log.info(f"sql insert_many, Table: {table}, Total Rows: {total}")
+        return total
+
+    def insert_too_many(self, query, args_list, batch_size=1000):
+        """
+        执行批量插入语句,分片提交, 单次插入大于十万+时可用, 如果失败则降级为逐条插入
+        :param query: 插入语句
+        :param args_list: 插入参数列表
+        :param batch_size: 每次插入的条数
+        """
+        for i in range(0, len(args_list), batch_size):
+            batch = args_list[i:i + batch_size]
+            try:
+                with self.pool.connection() as conn:
+                    with conn.cursor() as cursor:
+                        cursor.executemany(query, batch)
+                        conn.commit()
+            except Exception as e:
+                self.log.error(f"insert_too_many error. Trying single insert. Error: {e}")
+                # 当前批次降级为单条插入
+                for args in batch:
+                    self.insert_one(query, args)
+
+    def update_one(self, query, args):
+        """
+        执行单条更新语句
+        :param query: 更新语句
+        :param args: 更新参数
+        """
+        self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data update_one 更新中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
+        return self._execute(query, args, commit=True)
+
+    def update_all(self, query, args_list):
+        """
+        执行批量更新语句,如果失败则逐条更新
+        :param query: 更新语句
+        :param args_list: 更新参数列表
+        """
+        conn = None
+        cursor = None
+        try:
+            conn = self.pool.connection()
+            cursor = conn.cursor()
+            cursor.executemany(query, args_list)
+            conn.commit()
+            self.log.debug(f"sql update_all, SQL: {query}, Rows: {len(args_list)}")
+            self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data update_all 更新中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
+        except Exception as e:
+            conn.rollback()
+            self.log.error(f"Error executing query: {e}")
+            # 如果批量更新失败,则逐条更新
+            rowcount = 0
+            for args in args_list:
+                self.update_one(query, args)
+                rowcount += 1
+            self.log.debug(f'Batch update failed. Updated {rowcount} rows individually.')
+        finally:
+            if cursor:
+                cursor.close()
+            if conn:
+                conn.close()
+
+    def update_one_or_dict(self, table=None, data=None, condition=None, query=None, args=None, commit=True):
+        """
+        单条更新(支持字典或原始SQL)
+        :param table: 表名(字典模式必需)
+        :param data: 字典数据 {列名: 值}(与 query 二选一)
+        :param condition: 更新条件,支持以下格式:
+            - 字典: {"id": 1} → "WHERE id = %s"
+            - 字符串: "id = 1" → "WHERE id = 1"(需自行确保安全)
+            - 元组: ("id = %s", [1]) → "WHERE id = %s"(参数化查询)
+        :param query: 直接SQL语句(与 data 二选一)
+        :param args: SQL参数(query 模式下必需)
+        :param commit: 是否自动提交
+        :return: 影响行数
+        :raises: ValueError 参数校验失败时抛出
+        """
+        # 参数校验
+        if data is not None:
+            if not isinstance(data, dict):
+                raise ValueError("Data must be a dictionary")
+            if table is None:
+                raise ValueError("Table name is required for dictionary update")
+            if condition is None:
+                raise ValueError("Condition is required for dictionary update")
+
+            # 构建 SET 子句
+            set_clause = ", ".join([f"{self._safe_identifier(k)} = %s" for k in data.keys()])
+            set_values = list(data.values())
+
+            # 解析条件
+            condition_clause, condition_args = self._parse_condition(condition)
+            query = f"UPDATE {self._safe_identifier(table)} SET {set_clause} WHERE {condition_clause}"
+            args = set_values + condition_args
+
+        elif query is None:
+            raise ValueError("Either data or query must be provided")
+
+        # 执行更新
+        cursor = self._execute(query, args, commit)
+        # self.log.debug(
+        #     f"Updated table={table}, rows={cursor.rowcount}, query={query[:100]}...",
+        #     extra={"table": table, "rows": cursor.rowcount}
+        # )
+        return cursor.rowcount
+
+    def _parse_condition(self, condition):
+        """
+        解析条件为 (clause, args) 格式
+        :param condition: 字典/字符串/元组
+        :return: (str, list) SQL 子句和参数列表
+        """
+        if isinstance(condition, dict):
+            clause = " AND ".join([f"{self._safe_identifier(k)} = %s" for k in condition.keys()])
+            args = list(condition.values())
+        elif isinstance(condition, str):
+            clause = condition  # 注意:需调用方确保安全
+            args = []
+        elif isinstance(condition, (tuple, list)) and len(condition) == 2:
+            clause, args = condition[0], condition[1]
+            if not isinstance(args, (list, tuple)):
+                args = [args]
+        else:
+            raise ValueError("Condition must be dict/str/(clause, args)")
+        return clause, args
+
+    def update_many(self, table=None, data_list=None, condition_list=None, query=None, args_list=None, batch_size=500,
+                    commit=True):
+        """
+        批量更新(支持字典列表或原始SQL)
+        :param table: 表名(字典插入时必需)
+        :param data_list: 字典列表 [{列名: 值}]
+        :param condition_list: 条件列表(必须为字典,与data_list等长)
+        :param query: 直接SQL语句(与data_list二选一)
+        :param args_list: SQL参数列表(query使用时必需)
+        :param batch_size: 分批大小
+        :param commit: 是否自动提交
+        :return: 影响行数
+        """
+        if data_list is not None:
+            if not data_list or not isinstance(data_list[0], dict):
+                raise ValueError("Data_list must be a non-empty list of dictionaries")
+            if condition_list is None or len(data_list) != len(condition_list):
+                raise ValueError("Condition_list must be provided and match the length of data_list")
+            if not all(isinstance(cond, dict) for cond in condition_list):
+                raise ValueError("All elements in condition_list must be dictionaries")
+
+            # 获取第一个数据项和条件项的键
+            first_data_keys = set(data_list[0].keys())
+            first_cond_keys = set(condition_list[0].keys())
+
+            # 构造基础SQL
+            set_clause = ', '.join([self._safe_identifier(k) + ' = %s' for k in data_list[0].keys()])
+            condition_clause = ' AND '.join([self._safe_identifier(k) + ' = %s' for k in condition_list[0].keys()])
+            base_query = f"UPDATE {self._safe_identifier(table)} SET {set_clause} WHERE {condition_clause}"
+            total = 0
+
+            # 分批次处理
+            for i in range(0, len(data_list), batch_size):
+                batch_data = data_list[i:i + batch_size]
+                batch_conds = condition_list[i:i + batch_size]
+                batch_args = []
+
+                # 检查当前批次的结构是否一致
+                can_batch = True
+                for data, cond in zip(batch_data, batch_conds):
+                    data_keys = set(data.keys())
+                    cond_keys = set(cond.keys())
+                    if data_keys != first_data_keys or cond_keys != first_cond_keys:
+                        can_batch = False
+                        break
+                    batch_args.append(tuple(data.values()) + tuple(cond.values()))
+
+                if not can_batch:
+                    # 结构不一致,转为单条更新
+                    for data, cond in zip(batch_data, batch_conds):
+                        self.update_one_or_dict(table=table, data=data, condition=cond, commit=commit)
+                        total += 1
+                    continue
+
+                # 执行批量更新
+                try:
+                    with self.pool.connection() as conn:
+                        with conn.cursor() as cursor:
+                            cursor.executemany(base_query, batch_args)
+                            if commit:
+                                conn.commit()
+                            total += cursor.rowcount
+                            self.log.debug(f"Batch update succeeded. Rows: {cursor.rowcount}")
+                except Exception as e:
+                    if commit:
+                        conn.rollback()
+                    self.log.error(f"Batch update failed: {e}")
+                    # 降级为单条更新
+                    for args, data, cond in zip(batch_args, batch_data, batch_conds):
+                        try:
+                            self._execute(base_query, args, commit=commit)
+                            total += 1
+                        except Exception as e2:
+                            self.log.error(f"Single update failed: {e2}, Data: {data}, Condition: {cond}")
+            self.log.info(f"Total updated rows: {total}")
+            return total
+        elif query is not None:
+            # 处理原始SQL和参数列表
+            if args_list is None:
+                raise ValueError("args_list must be provided when using query")
+
+            total = 0
+            for i in range(0, len(args_list), batch_size):
+                batch_args = args_list[i:i + batch_size]
+                try:
+                    with self.pool.connection() as conn:
+                        with conn.cursor() as cursor:
+                            cursor.executemany(query, batch_args)
+                            if commit:
+                                conn.commit()
+                            total += cursor.rowcount
+                            self.log.debug(f"Batch update succeeded. Rows: {cursor.rowcount}")
+                except Exception as e:
+                    if commit:
+                        conn.rollback()
+                    self.log.error(f"Batch update failed: {e}")
+                    # 降级为单条更新
+                    for args in batch_args:
+                        try:
+                            self._execute(query, args, commit=commit)
+                            total += 1
+                        except Exception as e2:
+                            self.log.error(f"Single update failed: {e2}, Args: {args}")
+            self.log.info(f"Total updated rows: {total}")
+            return total
+        else:
+            raise ValueError("Either data_list or query must be provided")
+
+    def check_pool_health(self):
+        """
+        检查连接池中有效连接数
+
+        # 使用示例
+        # 配置 MySQL 连接池
+        sql_pool = MySQLConnectionPool(log=log)
+        if not sql_pool.check_pool_health():
+            log.error("数据库连接池异常")
+            raise RuntimeError("数据库连接池异常")
+        """
+        try:
+            with self.pool.connection() as conn:
+                conn.ping(reconnect=True)
+                return True
+        except Exception as e:
+            self.log.error(f"Connection pool health check failed: {e}")
+            return False
+
+    @staticmethod
+    def _safe_identifier(name):
+        """SQL标识符安全校验"""
+        if not re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', name):
+            raise ValueError(f"Invalid SQL identifier: {name}")
+        return name
+
+
+if __name__ == '__main__':
+    sql_pool = MySQLConnectionPool()
+    data_dic = {'card_type_id': 111, 'card_type_name': '补充包 继承的意志【OPC-13】', 'card_type_position': 964,
+                'card_id': 5284, 'card_name': '蒙奇·D·路飞', 'card_number': 'OP13-001', 'card_rarity': 'L',
+                'card_img': 'https://source.windoent.com/OnePiecePc/Picture/1757929283612OP13-001.png',
+                'card_life': '4', 'card_attribute': '打', 'card_power': '5000', 'card_attack': '-',
+                'card_color': '红/绿', 'subscript': 4, 'card_features': '超新星/草帽一伙',
+                'card_text_desc': '【咚!!×1】【对方的攻击时】我方处于活跃状态的咚!!不多于5张的场合,可以将我方任意张数的咚!!转为休息状态。每有1张转为休息状态的咚!!,本次战斗中,此领袖或我方最多1张拥有《草帽一伙》特征的角色力量+2000。',
+                'card_offer_type': '补充包 继承的意志【OPC-13】', 'crawler_language': '简中'}
+    sql_pool.insert_one_or_dict(table="one_piece_record", data=data_dic)

+ 477 - 0
psa_spider/psa_pop_detail_spider.py

@@ -0,0 +1,477 @@
+# -*- coding: utf-8 -*-
+# Author : Charley
+# Python : 3.10.8
+# Date   : 2025/9/15 14:53
+import inspect
+import random
+import time
+import schedule
+import user_agent
+from curl_cffi import requests
+from loguru import logger
+from parsel import Selector
+from urllib.parse import urljoin
+from mysql_pool import MySQLConnectionPool
+from DrissionPage import ChromiumPage, ChromiumOptions
+from tenacity import retry, stop_after_attempt, wait_fixed
+
+logger.remove()
+logger.add("logs/pop_player_{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
+           format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
+           level="DEBUG", retention="3 day")
+
+client_identifier_list = [
+    "edge99", "edge101", "chrome99", "chrome100", "chrome101", "chrome104", "chrome107",
+    "chrome110", "chrome116", "chrome119", "chrome120", "chrome123", "chrome124",
+    "chrome99_android", "safari15_3", "safari15_5", "safari17_0", "safari17_2_ios"
+]
+
+BASE_URL = 'https://www.psacard.com'
+
+headers = {
+    "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
+    'User-Agent': user_agent.generate_user_agent()
+}
+
+category_link_list = {
+    'Baseball Cards': 'https://www.psacard.com/pop/baseball-cards/20003',
+    'Baseball Coins': 'https://www.psacard.com/pop/baseball-coins/82797',
+    'Basketball Cards': 'https://www.psacard.com/pop/basketball-cards/20019',
+    'Basketball Coins': 'https://www.psacard.com/pop/basketball-coins/83007',
+    'Boxing / Wrestling Cards / MMA': 'https://www.psacard.com/pop/boxing-wrestling-cards-mma/20021',
+    'Football Cards': 'https://www.psacard.com/pop/football-cards/20014',
+    'Football Coins': 'https://www.psacard.com/pop/football-coins/83011',
+    'Golf Cards': 'https://www.psacard.com/pop/golf-cards/20023',
+    'Hockey Cards': 'https://www.psacard.com/pop/hockey-cards/20020',
+    'Hockey Coins': 'https://www.psacard.com/pop/hockey-coins/83012',
+    'Minor League Cards': 'https://www.psacard.com/pop/minor-league-cards/20031',
+    'Misc Cards': 'https://www.psacard.com/pop/misc-cards/20033',
+    'Multi-Sport Cards': 'https://www.psacard.com/pop/multi-sport-cards/20006',
+    'Multi-Sport Coins': 'https://www.psacard.com/pop/multi-sport-coins/102825',
+    'Non-Sport Cards': 'https://www.psacard.com/pop/non-sport-cards/20032',
+    'Non-Sport Coins': 'https://www.psacard.com/pop/non-sport-coins/82981',
+    'Packs': 'https://www.psacard.com/pop/packs/20017',
+    'Pins': 'https://www.psacard.com/pop/pins/20013',
+    'Soccer Cards': 'https://www.psacard.com/pop/soccer-cards/20004',
+    'TCG Cards': 'https://www.psacard.com/pop/tcg-cards/156940',
+    'Tickets': 'https://www.psacard.com/pop/tickets/20022',
+    # 'Game-Used Bats': 'https://www.psacard.com/pop/bats'
+}
+
+
+def after_log(retry_state):
+    """
+    retry 回调
+    :param retry_state: RetryCallState 对象
+    """
+    # 检查 args 是否存在且不为空
+    if retry_state.args and len(retry_state.args) > 0:
+        log = retry_state.args[0]  # 获取传入的 logger
+    else:
+        log = logger  # 使用全局 logger
+
+    if retry_state.outcome.failed:
+        log.warning(
+            f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
+    else:
+        log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
+
+
+@retry(stop=stop_after_attempt(5), wait=wait_fixed(2), after=after_log)
+def get_proxys(log):
+    # 已购买账户  北美
+    # http_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36927"
+    # https_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36927"
+    http_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36931"
+    https_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36931"
+
+    try:
+        proxySettings = {
+            "http": http_proxy,
+            "https": https_proxy,
+        }
+        return proxySettings
+    except Exception as e:
+        log.error(f"Error getting proxy: {e}")
+        raise e
+
+
+@retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
+def get_sets_data(log, category, category_id, category_link, tag_year, tag_year_link, sql_pool):
+    # 隧道域名:端口号
+    # tunnel = "x371.kdltps.com:15818"
+    tunnel = "proxy.123proxy.cn:36927"
+    options = ChromiumOptions()
+    # options.set_paths(local_port=9137, user_data_path=r'D:\Drissionpage_temp\topps1_port_9137')
+    # options = ChromiumOptions()
+    options.set_proxy("http://" + tunnel)
+    options.auto_port(True)
+    # options.headless(True)
+    options.set_argument("--disable-gpu")
+    options.set_argument("-accept-lang=en-US")
+    options.set_argument('--start-maximized')
+    tab = ChromiumPage(options)
+    try:
+        # tab_url = f"https://www.psacard.com/pop/baseball-cards/2024/260045"
+        tab_url = tag_year_link
+        tab.get(tab_url)
+        log.debug(f'{inspect.currentframe().f_code.co_name} -> 页面加载成功, url: {tab_url}')
+
+        # time.sleep(11111)
+
+        # 循环翻页直到最后一页
+        page = 1
+        while True:
+            log.debug(f'{inspect.currentframe().f_code.co_name} -> 当前页码: {page}')
+            html = tab.html
+            if not html:
+                log.error(f'{inspect.currentframe().f_code.co_name} -> 页面加载失败...........')
+                raise Exception('页面加载失败, 重新加载........')
+
+            selector = Selector(text=html)
+            tag_tr_list = selector.xpath('//table[@id="tableSets"]/tbody/tr[position() > 1]')
+
+            info_list = []
+            for tag_tr in tag_tr_list:
+                set_name = tag_tr.xpath('./td[@class="text-left"]/a[1]/text()').get()
+                set_name_url = tag_tr.xpath('./td[@class="text-left"]/a[1]/@href').get()
+                set_name_url = urljoin(BASE_URL, set_name_url) if set_name_url else None
+                set_id = set_name_url.split('/')[-1] if set_name_url else None
+                # print(set_name, set_name_url)
+                data_dict = {
+                    'category': category,
+                    'category_id': category_id,
+                    'category_link': category_link,
+                    'year': tag_year,
+                    'year_link': tag_year_link,
+                    'set_name': set_name,
+                    'set_link': set_name_url,
+                    'set_id': set_id
+                }
+                info_list.append(data_dict)
+                # try:
+                #     get_player_list(log, data_dict, sql_pool)
+                # except Exception as e:
+                #     log.error(f'get_player_list error: {e}')
+
+            # 保存数据
+            if info_list:
+                sql_pool.insert_many(table='psa_pop_player_sets', data_list=info_list, ignore=True)
+
+            # 检查是否还有下一页
+            next_button = tab.ele('#tableSets_next')
+            if next_button and 'disabled' not in next_button.attr('class'):
+                # 点击下一页按钮
+                next_button.click()
+                log.debug(f'{inspect.currentframe().f_code.co_name} -> 点击下一页按钮')
+                # tab.wait.load_start() # 等待页面加载
+                # time.sleep(2)  # 等待页面加载
+            else:
+                # 没有下一页了,退出循环
+                log.debug(f'{inspect.currentframe().f_code.co_name} -> 没有下一页了, 退出循环, 最后页码: {page}')
+                break
+
+            page += 1
+
+    except Exception as e:
+        log.error(f'get_response error: {e}')
+        raise 'get_response error'
+    finally:
+        tab.quit()
+
+
+@retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
+def get_years_data(log, category, category_id, category_link, sql_pool):
+    """
+    获取详情数据
+    :param log:
+    :param category:
+    :param category_id:
+    :param category_link:
+    :param sql_pool:
+    """
+    try:
+        with requests.Session() as session:
+            resp = session.get(category_link, impersonate=random.choice(client_identifier_list), headers=headers,
+                               proxies=get_proxys(log), timeout=22, allow_redirects=False)
+            # log.debug(resp.text)
+
+        if 'Just a moment' in resp.text:
+            log.debug('Just a moment , retrying.....')
+            raise Exception('Just a moment')
+
+        resp_selector = Selector(text=resp.text)
+
+        tag_tr_list = resp_selector.xpath('//table[@id="tableCategory"]/tbody/tr')
+        for tag_tr in tag_tr_list:
+            tag_year = tag_tr.xpath('./td[1]/a/text()').get()
+            tag_year_link = tag_tr.xpath('./td[1]/a/@href').get()
+            tag_year_link = BASE_URL + tag_year_link if tag_year_link else tag_year_link
+
+            try:
+                get_sets_data(log, category, category_id, category_link, tag_year, tag_year_link, sql_pool)
+            except Exception as e1:
+                log.error(f"Error getting sets data: {e1}")
+
+    except Exception as e:
+        log.error(f"Error getting detail data: {e}")
+        raise e
+
+
+@retry(stop=stop_after_attempt(5), wait=wait_fixed(3), after=after_log)
+def get_player_single_page(log, category_id, set_id, start, length, draw):
+    """
+    获取单页球员数据
+    :param log: 日志对象
+    :param category_id: 分类ID
+    :param set_id: 集合ID
+    :param start: 起始位置
+    :param length: 数据长度
+    :param draw: 请求序号
+    :return: 响应数据字典或None
+    """
+    player_headers = {
+        "accept": "application/json, text/javascript, */*; q=0.01",
+        "content-type": "application/x-www-form-urlencoded; charset=UTF-8",
+        "referer": f"https://www.psacard.com/pop/baseball-cards/2024/bowman/{set_id}",
+        "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36",
+    }
+
+    url = "https://www.psacard.com/Pop/GetSetItems"
+    data = {
+        "draw": str(draw),
+        "start": str(start),
+        "length": str(length),
+        "search": "",
+        "headingID": str(set_id),
+        "categoryID": str(category_id),
+        "isPSADNA": "false"
+    }
+
+    try:
+        response = requests.post(
+            url,
+            impersonate=random.choice(client_identifier_list),
+            headers=player_headers,
+            data=data,
+            timeout=22,
+            proxies=get_proxys(log)
+        )
+
+        if response.status_code == 200:
+            # print(response.json())
+            return response.json()
+        elif response.status_code == 403:
+            log.error(f"请求被拒绝,请检查IP地址是否被封禁, set_id: {set_id}")
+            # return None
+            raise Exception('请求被拒绝')
+        else:
+            log.error(f"请求失败,状态码: {response.status_code}, set_id: {set_id}")
+            # return None
+            raise Exception('请求失败')
+
+    except Exception as e:
+        log.error(f"获取单页球员数据出错, set_id: {set_id}, start: {start}, error: {e}")
+        # return None
+        raise Exception('获取单页球员数据出错')
+
+
+def get_player_list(log, category_id, set_id, sql_pool):
+    """
+    获取球员列表数据(带翻页功能)
+    :param log: 日志对象
+    :param category_id: category_id
+    :param set_id: set_id
+    :param sql_pool: 数据库连接池
+    """
+    start = 0  # 起始位置
+    length = 300  # 每页数量
+    draw = 1  # 请求序列号
+
+    while True:
+        log.debug(f"正在获取球员数据, category_id:{category_id}, set_id: {set_id}, page: {draw}, start: {start}")
+
+        # 获取单页数据
+        try:
+            response_data = get_player_single_page(log, category_id, set_id, start, length, draw)
+        except Exception as e:
+            log.error(f"获取单页球员数据出错, category_id:{category_id}, set_id: {set_id}, start: {start}, error: {e}")
+            response_data = None
+
+        if response_data is None:
+            log.error(f"获取球员数据失败, category_id:{category_id}, set_id: {set_id}, page: {draw}, break !!!")
+            # sql_pool.update_one_or_dict(
+            #     table='psa_pop_player_sets',
+            #     data={'player_state': 2},
+            #     condition={'set_id': set_id}
+            # )
+            break
+
+        # 检查是否有数据返回
+        player_data_list = response_data.get('data')
+
+        # 如果是第一页  删除第一条
+        if draw == 1:
+            player_data_list.pop(0)
+
+        if len(player_data_list) > 0:
+            log.debug(
+                f"获取到 {len(player_data_list)} 条球员数据, category_id:{category_id}, set_id: {set_id}, start: {start}")
+
+            info_list = []
+            for pl_data in player_data_list:
+                spec_id = pl_data.get('SpecID')
+                card_number = pl_data.get('CardNumber')
+                subject_name = pl_data.get('SubjectName')
+                card_set = pl_data.get('Variety')
+                grade_total = pl_data.get('GradeTotal')
+
+                data_dict = {
+                    'category_id': category_id,
+                    'set_id': set_id,
+                    'spec_id': spec_id,
+                    'card_number': card_number,
+                    'subject_name': subject_name,
+                    'card_set': card_set,
+                    'grade_total': grade_total
+                }
+                # print(f'data_dict:{data_dict}')
+                info_list.append(data_dict)
+
+            # 保存数据到数据库
+            if info_list:
+                sql_pool.insert_many(table='psa_pop_player_record', data_list=info_list, ignore=True)
+
+            # 如果返回数据少于请求长度,说明已是最后一页
+            if len(player_data_list) < length:
+                log.debug(f"已到达最后一页, set_id: {set_id}")
+                break
+
+            # 更新参数准备下一页
+            start += length
+            draw += 1
+
+            # 添加延迟避免请求过于频繁
+            time.sleep(1)
+        else:
+            log.debug(f"没有更多数据, set_id: {set_id}")
+            break
+
+
+@retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
+def pop_main(log):
+    """
+    主函数
+    """
+    log.info(
+        f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
+
+    # 配置 MySQL 连接池
+    sql_pool = MySQLConnectionPool(log=log)
+    if not sql_pool.check_pool_health():
+        log.error("数据库连接池异常")
+        raise RuntimeError("数据库连接池异常")
+
+    try:
+        log.debug(".......... 开始获取数据报告 ..........")
+        for category, category_link in category_link_list.items():
+            log.debug(f"{category}第一次查询, 开始获取数据.......")
+            try:
+                category_id = category_link.split('/')[-1]
+                get_years_data(log, category, category_id, category_link, sql_pool)
+            except Exception as e1:
+                log.error(f"Error getting detail data: {e1}")
+    except Exception as e:
+        log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
+    finally:
+        log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
+
+
+@retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
+def player_main(log):
+    """
+    主函数
+    """
+    log.info(
+        f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
+
+    # 配置 MySQL 连接池
+    sql_pool = MySQLConnectionPool(log=log)
+    if not sql_pool.check_pool_health():
+        log.error("数据库连接池异常")
+        raise RuntimeError("数据库连接池异常")
+
+    try:
+        log.debug(".......... 开始获取数据报告 ..........")
+        sql_sets_list = sql_pool.select_all(
+            # "select category_id, set_id from psa_pop_player_sets where player_state = 0"
+            "select category_id, set_id from psa_pop_player_sets")
+        for category_set in sql_sets_list:
+            category_id, set_id = category_set
+            try:
+                log.debug(f"category_id:{category_id}第一次查询, 开始获取数据.......")
+                get_player_list(log, category_id, set_id, sql_pool)
+                # sql_pool.update_one_or_dict(
+                #     table='psa_pop_player_sets',
+                #     data={'player_state': 1},
+                #     condition={'set_id': set_id}
+                # )
+
+            except Exception as e1:
+                log.error(f"Error getting detail data: {e1}")
+                # sql_pool.update_one_or_dict(
+                #     table='psa_pop_player_sets',
+                #     data={'player_state': 2},
+                #     condition={'set_id': set_id}
+                # )
+    except Exception as e:
+        log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
+    finally:
+        log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
+
+
+def schedule_task():
+    """
+    两个爬虫模块的启动文件
+    bidding_main
+    weika_change_card_by_id_spider
+    change_card_main
+    """
+    # 立即运行一次任务
+    # pop_main(log=logger)
+
+    # player_main(log=logger)
+
+    # 设置定时任务
+    schedule.every().saturday.at("08:00").do(pop_main, log=logger)
+
+    schedule.every().wednesday.at("08:00").do(player_main, log=logger)
+
+    # schedule.every().day.at("00:30").do(player_main, log=logger)
+
+    while True:
+        schedule.run_pending()
+        time.sleep(1)
+
+
+if __name__ == '__main__':
+    # get_detail_data(logger, '','https://www.psacard.com/pop/bats',None)
+    # get_sets_data(logger)
+    # get_player_single_page(logger, '21172', '279481', 0, 300, 1)
+
+    # aa_dict = {
+    #     'category': 'baseball',
+    #     'category_id': '20003',
+    #     'category_link': 'https://www.psacard.com/pop/bats',
+    #     'tag_year': '2004',
+    #     'tag_year_link': '',
+    #     'set_name': '',
+    #     'set_name_url': '',
+    #     'set_id': '279664'
+    # }
+    # get_player_list(logger, aa_dict, None)
+
+    # pop_main(log=logger)
+    player_main(log=logger)
+
+    # schedule_task()

+ 234 - 0
psa_spider/psa_pop_spider.py

@@ -0,0 +1,234 @@
+# -*- coding: utf-8 -*-
+# Author : Charley
+# Python : 3.10.8
+# Date   : 2025/9/15 14:53
+import inspect
+import random
+import time
+import schedule
+import user_agent
+from curl_cffi import requests
+from loguru import logger
+from parsel import Selector
+from mysql_pool import MySQLConnectionPool
+from tenacity import retry, stop_after_attempt, wait_fixed
+
+logger.remove()
+logger.add("logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
+           format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
+           level="DEBUG", retention="3 day")
+
+client_identifier_list = [
+    "edge99", "edge101", "chrome99", "chrome100", "chrome101", "chrome104", "chrome107",
+    "chrome110", "chrome116", "chrome119", "chrome120", "chrome123", "chrome124",
+    "chrome99_android", "safari15_3", "safari15_5", "safari17_0", "safari17_2_ios"
+]
+
+BASE_URL = 'https://www.psacard.com'
+
+headers = {
+    "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
+    'User-Agent': user_agent.generate_user_agent()
+}
+
+category_link_list = {'Baseball Cards': 'https://www.psacard.com/pop/baseball-cards/20003',
+                      'Baseball Coins': 'https://www.psacard.com/pop/baseball-coins/82797',
+                      'Basketball Cards': 'https://www.psacard.com/pop/basketball-cards/20019',
+                      'Basketball Coins': 'https://www.psacard.com/pop/basketball-coins/83007',
+                      'Boxing / Wrestling Cards / MMA': 'https://www.psacard.com/pop/boxing-wrestling-cards-mma/20021',
+                      'Football Cards': 'https://www.psacard.com/pop/football-cards/20014',
+                      'Football Coins': 'https://www.psacard.com/pop/football-coins/83011',
+                      'Golf Cards': 'https://www.psacard.com/pop/golf-cards/20023',
+                      'Hockey Cards': 'https://www.psacard.com/pop/hockey-cards/20020',
+                      'Hockey Coins': 'https://www.psacard.com/pop/hockey-coins/83012',
+                      'Minor League Cards': 'https://www.psacard.com/pop/minor-league-cards/20031',
+                      'Misc Cards': 'https://www.psacard.com/pop/misc-cards/20033',
+                      'Multi-Sport Cards': 'https://www.psacard.com/pop/multi-sport-cards/20006',
+                      'Multi-Sport Coins': 'https://www.psacard.com/pop/multi-sport-coins/102825',
+                      'Non-Sport Cards': 'https://www.psacard.com/pop/non-sport-cards/20032',
+                      'Non-Sport Coins': 'https://www.psacard.com/pop/non-sport-coins/82981',
+                      'Packs': 'https://www.psacard.com/pop/packs/20017',
+                      'Pins': 'https://www.psacard.com/pop/pins/20013',
+                      'Soccer Cards': 'https://www.psacard.com/pop/soccer-cards/20004',
+                      'TCG Cards': 'https://www.psacard.com/pop/tcg-cards/156940',
+                      'Tickets': 'https://www.psacard.com/pop/tickets/20022',
+                      'Game-Used Bats': 'https://www.psacard.com/pop/bats'}
+
+
+def after_log(retry_state):
+    """
+    retry 回调
+    :param retry_state: RetryCallState 对象
+    """
+    # 检查 args 是否存在且不为空
+    if retry_state.args and len(retry_state.args) > 0:
+        log = retry_state.args[0]  # 获取传入的 logger
+    else:
+        log = logger  # 使用全局 logger
+
+    if retry_state.outcome.failed:
+        log.warning(
+            f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
+    else:
+        log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
+
+
+@retry(stop=stop_after_attempt(5), wait=wait_fixed(2), after=after_log)
+def get_proxys(log):
+    # 已购买账户  北美
+    # http_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36927"
+    # https_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36927"
+    http_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36931"
+    https_proxy = "http://u1952150085001297:sJMHl4qc4bM0@proxy.123proxy.cn:36931"
+
+    try:
+        proxySettings = {
+            "http": http_proxy,
+            "https": https_proxy,
+        }
+        return proxySettings
+    except Exception as e:
+        log.error(f"Error getting proxy: {e}")
+        raise e
+
+
+@retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
+def get_detail_data(log, category, link, sql_pool):
+    """
+    获取详情数据
+    :param log:
+    :param category:
+    :param link:
+    :param sql_pool:
+    """
+    try:
+        with requests.Session() as session:
+            resp = session.get(link, impersonate=random.choice(client_identifier_list), headers=headers,
+                               proxies=get_proxys(log), timeout=22, allow_redirects=False)
+            # log.debug(resp.text)
+
+        if 'Just a moment' in resp.text:
+            log.debug('Just a moment , retrying.....')
+            raise Exception('Just a moment')
+
+        resp_selector = Selector(text=resp.text)
+        tag_td_list = resp_selector.xpath(
+            '//*[@id="tableCategory"]/thead/tr/td[@class="text-right"]/text() | //*[@id="tableBats"]/thead/tr/td[@class="text-right"]/text()').getall()
+
+        if tag_td_list:
+            # number_of_sets = tag_td_list[0]
+            # total_items = tag_td_list[1]
+            total_graded = tag_td_list[-1]
+            if total_graded:
+                total_graded = total_graded.replace(',', '')
+            log.debug(f"Total Graded: {total_graded}")
+            data_dict = {
+                "category": category,
+                "category_link": link,
+                "total_graded": total_graded,
+                "crawl_date": time.strftime("%Y-%m-%d", time.localtime())
+            }
+            # print(data_dict)
+            try:
+                sql_pool.insert_one_or_dict(table="psa_pop_record", data=data_dict, ignore=True)
+            except Exception as e1:
+                log.error(f"Error inserting data: {e1}")
+
+    except Exception as e:
+        log.error(f"Error getting detail data: {e}")
+        raise e
+
+
+def get_pop_data(log, sql_pool):
+    """
+    获取 pop 列表页数据
+    :param log:
+    :param sql_pool:
+    """
+    url = "https://www.psacard.com/pop"
+    try:
+        with requests.Session() as session:
+            resp = session.get(url, impersonate=random.choice(client_identifier_list), headers=headers,
+                               proxies=get_proxys(log), timeout=22, allow_redirects=False)
+            # log.debug(resp.text)
+
+        if 'Just a moment' in resp.text:
+            log.debug('Just a moment , retrying.....')
+            raise Exception('Just a moment')
+
+        resp_selector = Selector(text=resp.text)
+
+        tag_a_list = resp_selector.xpath('//*[@id="mainContent"]/div[2]/div/a')
+        for tag_a in tag_a_list:
+            category = tag_a.xpath('./div/div/text()').get()
+            category_link = tag_a.xpath('./@href').get()
+            if 'https://' not in category_link:
+                category_link = BASE_URL + category_link
+
+            log.debug(f"Category: {category}, Link: {category_link}")
+
+            try:
+                get_detail_data(log, category, category_link, sql_pool)
+            except Exception as e1:
+                log.error(f"Error getting detail data: {e1}")
+
+    except Exception as e:
+        log.error(f"Error getting pop data: {e}")
+        raise e
+
+
+@retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
+def pop_main(log):
+    """
+    主函数
+    """
+    log.info(
+        f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
+
+    # 配置 MySQL 连接池
+    sql_pool = MySQLConnectionPool(log=log)
+    if not sql_pool.check_pool_health():
+        log.error("数据库连接池异常")
+        raise RuntimeError("数据库连接池异常")
+
+    try:
+        log.debug(".......... 开始获取数据报告 ..........")
+        # get_pop_data(log, sql_pool)
+        for _ in range(2):
+            for category, category_link in category_link_list.items():
+                log.debug(f"{category}第一次查询, 开始获取数据.......")
+                try:
+                    get_detail_data(log, category, category_link, sql_pool)
+                except Exception as e1:
+                    log.error(f"Error getting detail data: {e1}")
+
+            time.sleep(5)
+
+    except Exception as e:
+        log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
+    finally:
+        log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
+
+
+def schedule_task():
+    """
+    两个爬虫模块的启动文件
+    bidding_main
+    weika_change_card_by_id_spider
+    change_card_main
+    """
+    # 立即运行一次任务
+    # pop_main(log=logger)
+
+    # 设置定时任务
+    schedule.every().day.at("08:00").do(pop_main, log=logger)
+
+    while True:
+        schedule.run_pending()
+        time.sleep(1)
+
+
+if __name__ == '__main__':
+    # get_pop_data(logger)
+    schedule_task()
+    # get_detail_data(logger, '','https://www.psacard.com/pop/bats',None)

+ 12 - 0
psa_spider/requirements.txt

@@ -0,0 +1,12 @@
+-i https://mirrors.aliyun.com/pypi/simple/
+curl_cffi==0.7.4
+DBUtils==3.1.0
+loguru==0.7.3
+parsel==1.10.0
+PyMySQL==1.1.1
+PyYAML==6.0.2
+retrying==1.3.4
+schedule==1.2.2
+tenacity==9.0.0
+tls-client==1.0.1
+user_agent