Kaynağa Gözat

update 6.13.1

lei.chen 6 ay önce
ebeveyn
işleme
cbad105c01

+ 74 - 0
kawan_spider/YamlLoader.py

@@ -0,0 +1,74 @@
+# -*- coding: utf-8 -*-
+#
+import os, re
+import yaml
+
+regex = re.compile(r'^\$\{(?P<ENV>[A-Z_\-]+\:)?(?P<VAL>[\w\.]+)\}$')
+
+class YamlConfig:
+    def __init__(self, config):
+        self.config = config
+
+    def get(self, key:str):
+        return YamlConfig(self.config.get(key))
+    
+    def getValueAsString(self, key: str):
+        try:
+            match = regex.match(self.config[key])
+            group = match.groupdict()
+            if group['ENV'] != None:
+                env = group['ENV'][:-1]
+                return os.getenv(env, group['VAL'])
+            return None
+        except:
+            return self.config[key]
+    
+    def getValueAsInt(self, key: str):
+        try:
+            match = regex.match(self.config[key])
+            group = match.groupdict()
+            if group['ENV'] != None:
+                env = group['ENV'][:-1]
+                return int(os.getenv(env, group['VAL']))
+            return 0
+        except:
+            return int(self.config[key])
+        
+    def getValueAsBool(self, key: str, env: str = None):
+        try:
+            match = regex.match(self.config[key])
+            group = match.groupdict()
+            if group['ENV'] != None:
+                env = group['ENV'][:-1]
+                return bool(os.getenv(env, group['VAL']))
+            return False
+        except:
+            return bool(self.config[key])
+    
+def readYaml(path:str = 'application.yml', profile:str = None) -> YamlConfig:
+    if os.path.exists(path):
+        with open(path) as fd:
+            conf = yaml.load(fd, Loader=yaml.FullLoader)
+
+    if profile != None:
+        result = path.split('.')
+        profiledYaml = f'{result[0]}-{profile}.{result[1]}'
+        if os.path.exists(profiledYaml):
+            with open(profiledYaml) as fd:
+                conf.update(yaml.load(fd, Loader=yaml.FullLoader))
+
+    return YamlConfig(conf)
+
+# res = readYaml()
+# mysqlConf = res.get('mysql')
+# print(mysqlConf)
+
+# print(res.getValueAsString("host"))
+# mysqlYaml = mysqlConf.getValueAsString("host")
+# print(mysqlYaml)
+# host = mysqlYaml.get("host").split(':')[-1][:-1]
+# port = mysqlYaml.get("port").split(':')[-1][:-1]
+# username = mysqlYaml.get("username").split(':')[-1][:-1]
+# password = mysqlYaml.get("password").split(':')[-1][:-1]
+# mysql_db = mysqlYaml.get("db").split(':')[-1][:-1]
+# print(host,port,username,password)

+ 6 - 0
kawan_spider/application.yml

@@ -0,0 +1,6 @@
+mysql:
+  host: ${MYSQL_HOST:100.64.0.25}
+  port: ${MYSQL_PROT:3306}
+  username: ${MYSQL_USERNAME:crawler}
+  password: ${MYSQL_PASSWORD:Pass2022}
+  db: ${MYSQL_DATABASE:crawler}

+ 87 - 0
kawan_spider/get_kw_sign.py

@@ -0,0 +1,87 @@
+# -*- coding: utf-8 -*-
+# Author : Charley
+# Python : 3.8.10
+# Date   : 2025/4/15 10:29
+import hashlib
+import json
+import random
+import time
+
+
+def return_new_url(url_):
+    # 去掉查询参数部分(? 后面的内容)
+    path_without_query = url_.split("?")[0]
+    # 按 "/" 分割路径
+    path_parts = path_without_query.split("/")
+    # 返回最后一部分
+    return path_parts[-1]
+
+
+def generate_random_string():
+    # 使用 random.random() 生成随机数并转换为 36 进制字符串,截取最后 10 个字符
+    part1 = ''.join(random.choices('0123456789abcdefghijklmnopqrstuvwxyz', k=10))
+    part2 = ''.join(random.choices('0123456789abcdefghijklmnopqrstuvwxyz', k=10))
+    return part1 + part2
+
+
+#
+def sort_asc(t: dict):
+    """
+    定义一个函数 sort_asc,用于对字典 t 的键进行排序,并返回一个新的字典 o
+    :param t: dict
+    :return: sorted_dict
+    """
+    # 按字母顺序对字典的键进行排序
+    sorted_keys = sorted(t.keys())
+    # print('排序后的参数为:', sorted_keys)
+    # 创建一个新的有序字典
+    sorted_dict = {key: str(t[key]) for key in sorted_keys}
+    return sorted_dict
+
+
+def get_sign(url_: str):
+    """
+    定义 get_sign 函数,用于生成 MD5 签名
+    :param url_: url
+    :return: nonce, timestamp, md5_hash
+    """
+    url_p = return_new_url(url_)
+
+    # 生成随机字符串 n 和时间戳 a
+    n = generate_random_string()
+    # print('nonce的值为:', n)
+    a = str(int(time.time() * 1000))  # 当前时间戳(毫秒)
+
+    # f = {
+    #     'timestamp': "1744693310566",  # 时间戳
+    #     'nonce': "x0hs3t1hwyvsd5foi7dn",  # 随机字符串
+    #     'salt': "1s2JTL0F9u8^=m6-dW]6yLU1T50ppr8m",  # 固定盐值
+    #     'url': "getCardCollageInfoByIdByShare"  # 路由标识
+    # }
+    f = {
+        'timestamp': a,  # 时间戳
+        'nonce': n,  # 随机字符串
+        'salt': "1s2JTL0F9u8^=m6-dW]6yLU1T50ppr8m",  # 固定盐值
+        'url': url_p  # 路由标识
+    }
+
+    # 对输入字典进行浅拷贝并排序
+    sorted_data = sort_asc(f)
+    # print('排序后的参数为:', sorted_data)
+    # 将排序后的字典转换为 JSON 字符串
+    json_string = json.dumps(sorted_data, separators=(',', ':'))
+    # print('加密前的参数为:', json_string)
+    # 使用 hashlib 创建 MD5 哈希
+    md5_hash = hashlib.md5(json_string.encode('utf-8')).hexdigest()
+    return n, a, md5_hash
+
+
+# 主程序
+if __name__ == "__main__":
+    url = "https://app.cardplayd.com/app/system/shop/queryShopList"
+
+    sign = get_sign(url)
+    print('加密后的 sign 为:', sign)
+
+    #     2b3865f7fc38e94ed0b9f673c3a6a104
+    # "x0hs3t1hwyvsd5foi7dn" "6531a8d9fb78f5d3969b853cf31fbcd9"

+ 50 - 0
kawan_spider/kawan_login.py

@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+# Author : Charley
+# Python : 3.8.10
+# Date   : 2025/4/18 18:14
+import requests
+
+
+headers = {
+    "User-Agent": "Dart/3.6 (dart:io)",
+    "Accept-Encoding": "gzip",
+    "authorization": "Bearer null",
+    "content-type": "application/json",
+    "timestamp": "1744971243352",
+    "signature": "854fcb756f17db85efa9958158e6c161",
+    "app-version": "1.0.12",
+    "content-language": "zh_CN",
+    "nonce": "da899d80-1c3d-11f0-bcea-87a35c752929",
+    "isencrypt": "false"
+}
+def get_sms_code():
+    url = "https://app.cardplayd.com/app/resource/sms/appLoginCode"
+    params = {
+        "phonenumber": "19521500850"
+    }
+    response = requests.get(url, headers=headers, params=params)
+
+    print(response.text)
+    print(response)
+
+def api_login():
+    headers = {
+        "User-Agent": "Dart/3.6 (dart:io)",
+        "Accept-Encoding": "gzip",
+        "Content-Type": "application/json",
+        "authorization": "Bearer null",
+        "timestamp": "1744971271189",
+        "content-language": "zh_CN",
+        "encrypt-key": "qYkyi9s1Okl8NdPjsXceRnoNFpIvKrOspuzT33cD00wor9wb7kVeVkagz4NyGo2WX9CA1ypbfPfDwbHy5U3N6Q==",
+        "app-version": "1.0.12",
+        "signature": "f55c3e5919a3a92f65d51ff4779889cd",
+        "isencrypt": "true",
+        "nonce": "eb213450-1c3d-11f0-bcea-87a35c752929"
+    }
+    url = "https://app.cardplayd.com/app/auth/app/login"
+    data = 'C8mLzEQaHL4vP3bMRLXeWUbRUzSkQSic7k+gO3wlE37XnWOC4tdLJ0JyeO24/B9SD4/FaxOlhdzv4wYJ5yynzFG4i9mlM6idDQ4wEIK572RSsYFxNs3foXgUDYbsgnMAZnqyzpruHRrkpRRykBMW9kxBOxJXMPRpgvIBQL/ZzA28dcW1uf1erijmNGC8+Isd'.encode(
+        'unicode_escape')
+    response = requests.post(url, headers=headers, data=data)
+
+    print(response.text)
+    print(response)

+ 486 - 0
kawan_spider/kw_spider.py

@@ -0,0 +1,486 @@
+# -*- coding: utf-8 -*-
+# Author : Charley
+# Python : 3.8.10
+# Date   : 2025/4/14 11:22
+import time
+
+import schedule
+
+from mysql_pool import MySQLConnectionPool
+from settings import *
+from get_kw_sign import get_sign
+
+baseUrl = "https://app.cardplayd.com/",
+
+
+def parse_shop_list(log, sql_pool, data_info_list, sql_shop_list):
+    data_list = []
+    for data_info in data_info_list:
+        if data_info is None:
+            continue
+
+        shop_id = data_info.get('shopId')
+        if shop_id in sql_shop_list:
+            log.debug(f'{inspect.currentframe().f_code.co_name} Shop {shop_id} already exists in the database.')
+            continue
+        data_dict = {
+            "shop_id": shop_id,
+            "shop_name": data_info.get('shopName'),
+            "fans_num": data_info.get('fansCount'),
+            "group_num": data_info.get('collageSuccessCount')
+        }
+        data_list.append(data_dict)
+        sql_shop_list.append(shop_id)
+    # print(data_list)
+    if data_list:
+        sql_pool.insert_many(table='kawan_shop_record', data_list=data_list)
+    else:
+        log.debug(f'{inspect.currentframe().f_code.co_name} No new shop data found............')
+
+
+def get_shop_one_page(log, headers, page_num=1):
+    log.debug(f'{inspect.currentframe().f_code.co_name} Request page_num: {page_num}')
+    url = "https://app.cardplayd.com/app/system/shop/queryShopList"
+    nonce, a, sign = get_sign(url)
+    headers.update(timestamp=a, nonce=nonce, signature=sign)
+    params = {
+        "pageNum": str(page_num),
+        "pageSize": "10"
+    }
+    # response = requests.get(url, headers=headers, params=params)
+    response = make_request(log, 'GET', url, params=params, headers=headers)
+    # print(response)
+    return response
+
+
+def get_shop_list(log, sql_pool, sql_shop_list, headers):
+    page_num = 1
+    len_all_shops = 0
+
+    while True:
+        log.debug(f'{inspect.currentframe().f_code.co_name} Requesting.............')
+        response_json = get_shop_one_page(log, headers, page_num)
+
+        if response_json is None:
+            log.error("Failed to fetch shop list. Exiting...")
+            break
+
+        data = response_json.get('data', {})
+        data_info_list = data.get('dataInfo', [])
+        # all_shops.extend(data_info_list)
+        len_all_shops += len(data_info_list)
+
+        # 解析店铺列表
+        parse_shop_list(log, sql_pool, data_info_list, sql_shop_list)
+
+        total_shops = data.get('total', 0)
+
+        if not data_info_list or len(data_info_list) < 10:
+            log.info("No more shops found. Stopping requests.")
+            break
+
+        page_num += 1
+
+        # 如果当前已获取的店铺数量达到或超过总店铺数量,停止请求
+        if len_all_shops >= total_shops:
+            log.info("Total shops fetched. Stopping requests.")
+            break
+
+    log.info(f"Total shops fetched: {len_all_shops}")
+
+
+# ----------------------------------------------------------------------------------------------------------------------
+def get_acticity_xplain(log, product_id, headers):
+    url = f"https://app.cardplayd.com/app/system/cardCollage/queryCollageActivityExplainList/{product_id}"
+    # url_p = "https://app.cardplayd.com/app/system/cardCollage/queryCollageActivityExplainList"
+    nonce, a, sign = get_sign(url)
+    headers.update(timestamp=a, nonce=nonce, signature=sign)
+    # response = requests.get(url, headers=headers)
+    response = make_request(log, 'GET', url, headers=headers)
+    # print(response)
+    try:
+        json_data = response.get('data', [{}])
+        if json_data:
+            json_data_list = json_data[0]
+            explain_name = json_data_list.get('explainName')
+            explain_info = json_data_list.get('explainInfo')
+            # print(explain_name, explain_info)
+            return explain_name, explain_info
+        else:
+            log.warning(f'{inspect.currentframe().f_code.co_name} Request product_id: {product_id}, Error: No data')
+            return None, None
+    except Exception as e:
+        log.error(f'{inspect.currentframe().f_code.co_name} Request product_id: {product_id}, Error: {e}')
+        return None, None
+
+
+def parse_sold_list(log, sql_pool, data_info_list, sql_product_id_list, headers, shop_name):
+    data_list = []
+    for data_info in data_info_list:
+        product_id = data_info.get('id')
+        if product_id in sql_product_id_list:
+            log.debug(f'{inspect.currentframe().f_code.co_name} Product {product_id} already exists in the database.')
+            continue
+        no = data_info.get('collageCode')
+        title = data_info.get('collageName')
+        img = data_info.get('rotationImagePath')
+
+        price_sale = data_info.get('firstBuyPrice')
+        original_price = data_info.get('unitPrice')  # 原价
+        total_price = data_info.get('totalPrice')
+        sale_num = data_info.get('lotCount')
+
+        play_way = data_info.get('playWay')
+        spec_config = data_info.get('specifications')
+        spec_config_count = data_info.get('count')
+        sheets_number = data_info.get('sheetNumber')
+
+        state = data_info.get('collageStatus')
+        shop_id = data_info.get('shopId')
+        category_id = data_info.get('cardCategoryId')
+        on_sale_time = data_info.get('startTime')  # 开售时间
+        end_time = data_info.get('applyTime')  # 理论完成时间
+        finish_time = data_info.get('endTime')  # 实际完成时间
+        begin_live_time = data_info.get('beginLiveTime')  # 开始直播时间
+        live_complete_time = data_info.get('liveCompleteTime')  # 直播完成时间
+
+        explain_name, explain_info = get_acticity_xplain(log, product_id, headers)  # 活动说明 # 活动说明信息
+
+        video_url = data_info.get('liveBackPath')
+        sold_data = {
+            "product_id": product_id,
+            "no": no,
+            "title": title,
+            "img": img,
+            "price_sale": price_sale,
+            "original_price": original_price,
+            "total_price": total_price,
+            "sale_num": sale_num,
+            "play_way": play_way,
+            "spec_config": spec_config,
+            "spec_config_count": spec_config_count,
+            "sheets_number": sheets_number,
+            "state": state,
+            "shop_id": shop_id,
+            "shop_name": shop_name,
+            "category_id": category_id,
+            "on_sale_time": on_sale_time,
+            "end_time": end_time,
+            "finish_time": finish_time,
+            "begin_live_time": begin_live_time,
+            "live_complete_time": live_complete_time,
+            "explain_name": explain_name,
+            "explain_info": explain_info,
+            "video_url": video_url
+        }
+        data_list.append(sold_data)
+        sql_product_id_list.append(product_id)
+    # print(data_list)
+    if data_list:
+        sql_pool.insert_many(table="kawan_product_record", data_list=data_list)
+
+
+def get_sold_one_page(log, shopId, headers, page_num=1):
+    log.debug(f'{inspect.currentframe().f_code.co_name} Request page_num: {page_num}')
+    url = "https://app.cardplayd.com/app/system/shopInfo/collageList"
+    params = {
+        "pageNum": str(page_num),
+        "pageSize": "10",
+        "status": "6",
+        "timeLimit": "true",
+        "shopId": shopId
+    }
+    nonce, a, sign = get_sign(url)
+    headers.update(timestamp=a, nonce=nonce, signature=sign)
+    # response = requests.get(url, headers=headers, params=params)
+    response = make_request(log, 'GET', url, params=params, headers=headers)
+    # print(response)
+    return response
+
+
+def get_sold_list(log, sql_pool, shopId, shop_name, sql_product_id_list, headers):
+    page_num = 1
+    len_all_sold = 0
+    log.debug(f'{inspect.currentframe().f_code.co_name} Requesting with shopId: {shopId}.............')
+
+    while True:
+        response_json = get_sold_one_page(log, shopId, headers, page_num)
+        if response_json is None:
+            log.error("Failed to fetch sold list. Exiting...")
+            break
+
+        data = response_json.get('data', {})
+
+        total_solds = response_json.get('total', 0)
+        if total_solds == 0:
+            log.warning(
+                f"Warning {inspect.currentframe().f_code.co_name}: total_solds == 0, shop_id:{shopId}没有已售数据")
+            break
+
+        sold_info_list = data.get('dataInfo', [])
+        # print(sold_info_list)
+        if not sold_info_list:
+            log.warning(
+                f"Warning {inspect.currentframe().f_code.co_name}: sold_info_list为空, shop_id:{shopId}没有已售数据")
+            break
+
+        len_all_sold += len(sold_info_list)
+
+        # 解析已售列表
+        parse_sold_list(log, sql_pool, sold_info_list, sql_product_id_list, headers, shop_name)
+
+        if not sold_info_list or len(sold_info_list) < 10:
+            log.info("No more sold_info_list found. Stopping requests.")
+            break
+
+        page_num += 1
+
+        # 如果当前已获取的店铺数量达到或超过总数量total,停止请求
+        if len_all_sold >= total_solds:
+            log.info("Total sold_info_list fetched. Stopping requests.")
+            break
+
+    log.info(f"Total sold_info_list fetched: {len_all_sold}")
+
+
+# ----------------------------------------------------------------------------------------------------------------------
+def get_product_detail(log, product_id, headers):
+    # 暂时用不到  备用
+    log.debug(f'{inspect.currentframe().f_code.co_name} Request product_id: {product_id}')
+    url = "https://app.cardplayd.com/app/system/cardCollage/getCardCollageInfoById"
+    params = {
+        # "collageId": "1911391864602927105"
+        "collageId": product_id
+    }
+    nonce, a, sign = get_sign(url)
+    headers.update(timestamp=a, nonce=nonce, signature=sign)
+    response = requests.get(url, headers=headers, params=params)
+
+    print(response.text)
+    print(response)
+
+
+# ----------------------------------------------------------------------------------------------------------------------
+def get_player_list(log, cardCollageId):
+    log.debug(f'{inspect.currentframe().f_code.co_name} Request cardCollageId: {cardCollageId}')
+    url = "https://app.cardplayd.com/app/system/cardReport/getCarmiPublicityVoByTypeList"
+    # data = {
+    #     "F9gls72L1UmU0/fdUzTFS3ry8fQCShi/nU0HmHsW3WtEtgxZ9j3kG2SQKu3iQ3FDtBlp4bnHXXxDZqeXjyEiEICm1Xo4QJTYKIB9kijJy3mA2V2Ayt2X5Rqf+eipjEX+5ES+7D3gZdEmcdT9gPOjjn69z4hqnweX3thbvg5/LXJ1531bkv/otiMYFshgbbMQ51el/Tlh20zDkpj952Y8Gg": "="
+    # }
+    # data = "DuDAqydlmgNlK/1CtwT2hIVdQuBreX0MQC0hjznCSJyH3ZIKgyk7yEK1+Fs3E3eFoR9kKSrQnDVis5jh0SwKmDlQ9cEdQMPa1facZd5asXV10oQrud4aONB4/RjyQh/iNL7tWTIT2HZCtwJIz61kA/6kRqIIpnBfoOkFTegD6TvzG1XhmYlMcZ70PWIpF4o+VMYhAVyTFfsLa7kBFJUdqA=="
+
+    player_headers = {
+        "User-Agent": "Dart/3.6 (dart:io)",
+        "Accept-Encoding": "gzip",
+        "Content-Type": "application/json",
+        "authorization": "Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJsb2dpblR5cGUiOiJhcHBMb2dpbiIsImxvZ2luSWQiOiJhcHBfdXNlcjoxOTExNjI3MDU4NDk2ODM5NjgyIiwicm5TdHIiOiJoUTNtS2VwQ210RWZ4VFkzQVNIUTcxV1RRZlo1ajBkNCIsImNsaWVudGlkIjoiNDI4YTgzMTBjZDQ0Mjc1N2FlNjk5ZGY1ZDg5NGYwNTEiLCJ1c2VySWQiOjE5MTE2MjcwNTg0OTY4Mzk2ODIsImludml0ZUNvZGUiOiIyMDc3MTYiLCJwaG9uZSI6IjEzMDE0NjE3NjE0In0.IuWoS8kCmG4OQFh1XINJOHpbeKMZKlMmticVglAVF_Y",
+        "content-language": "zh_CN",
+        # "encrypt-key": "n8XhkgeVYg26D8/nY3MGThxNzKI59EMd69AjUF3Jk5ZT9ixwo21PABLWhwLMJFuSXqASVsUaq2KhUnjsaaIXDA==",
+        "app-version": "1.0.12",
+        # "isencrypt": "true",
+        # "isencrypt": "false",
+    }
+    # data = "916L0IKDzAb1hrnjyzFDCH+prEuxPR0LqfU5m79fYlfZTCvFQhehf43vS0P9Gz91+ySAFH8cvuIaC8f2A6Awo3HXmjJY4GzUXPTDNNehEgMugpVAXsS1ly9tWuWgQp0nnZuFZzWL281CNuo9cY8XkrcyL9p2QqVs5GDNnSFNi2Y8LRPk+1aiED2n+rvY7j0stupez5m9+1AcNGAUyKO/hQ=="
+    # data = "eyJjYXJkQ29sbGFnZUlkIjogIjE5MTEzOTE4NjQ2MDI5MjcxMDUiLCJwYWdlU2l6ZSI6ICIxMCIsInBhZ2VOdW0iOiAiMSIsImVuY3J5cHQta2V5IjogIm44WGhrZ2VWWWcyNkQ4L25ZM01HVGh4TnpLSTU5RU1kNjlBalVGM0prNVpUOWl4d28yMVBBQkxXaHdMTUpGdVNYcUFTVnNVYXEyS2hVbmpzYWFJWERBPT0ifQ=="
+    # data = {"type": "1", "cardCollageId": "1906194540964519937", "userAnonymous": None, "pageSize": 100, "pageNum": 1,
+    #         "isAsc": "", "orderByColumn": "create_time", "filterInfo": ""}
+
+    data = {"type": "1", "cardCollageId": cardCollageId, "userAnonymous": None, "pageSize": 100, "pageNum": 1,
+            "isAsc": "", "orderByColumn": "create_time", "filterInfo": ""}
+
+    nonce, a, sign = get_sign(url)
+    player_headers.update(timestamp=a, nonce=nonce, signature=sign)
+    response = requests.post(url, headers=player_headers, data=data)
+    # response = make_request(log, 'POST', url, headers=headers, data=data)
+
+    print(response.text)
+    print(response)
+
+
+# ----------------------------------------------------------------------------------------------------------------------
+def get_report_one_page(log, collageId, headers, page_num=1):
+    url = "https://app.cardplayd.com/app/system/cardReport/getOpenReportInfo"
+    params = {
+        "cardCollageId": collageId,
+        "pageSize": "10",
+        "pageNum": str(page_num)
+    }
+    nonce, a, sign = get_sign(url)
+    headers.update(timestamp=a, nonce=nonce, signature=sign)
+    # response = requests.get(url, headers=headers, params=params)
+    response = make_request(log, 'GET', url, headers=headers, params=params)
+    # print(response)
+    return response
+
+
+def parse_report_list(sql_pool, report_info_list, collageId):
+    data_list = []
+    for report_info in report_info_list:
+        data = {
+            "product_id": collageId,
+            "card_name": report_info.get("carmiInfo"),
+            "open_card_time": report_info.get("openCardTime"),
+            "imgs": report_info.get("frontImagePath")
+        }
+        data_list.append(data)
+    if data_list:
+        sql_pool.insert_many(table="kawan_report_record", data_list=data_list)
+
+
+def get_report_list(log, sql_pool, collageId, headers):
+    page_num = 1
+    len_all_report = 0
+    while True:
+        response_json = get_report_one_page(log, collageId, headers, page_num)
+        if response_json is None:
+            log.error("Failed to fetch report list. Exiting...")
+            break
+
+        data = response_json.get('data', {})
+
+        total_reports = data.get('total', 0)
+        if total_reports == 0:
+            log.warning(
+                f"Warning {inspect.currentframe().f_code.co_name}: {response_json['msg']}, collageId:{collageId}没有 report 数据")
+            break
+
+        report_info_list = data.get('otherCardReportResultList', [])
+        if not report_info_list:
+            log.warning(
+                f"Warning {inspect.currentframe().f_code.co_name}: {response_json['msg']}, collageId:{collageId}没有 report 数据")
+            break
+        len_all_report += len(report_info_list)
+
+        parse_report_list(sql_pool, report_info_list, collageId)
+
+        if not report_info_list or len(report_info_list) < 10:
+            log.info("No more report_info_list found. Stopping requests.")
+            break
+
+        page_num += 1
+        if len_all_report >= total_reports:
+            log.info("Total report_info_list fetched. Stopping requests.")
+            break
+
+    log.info(f"Total report_info_list fetched: {len_all_report}")
+    sql_pool.update_one("update kawan_product_record set report_state = 1 where product_id = %s", (collageId,))
+
+
+@retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
+def kawan_main(log):
+    """
+    主函数
+    :param log: logger对象
+    """
+    log.info(
+        f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
+
+    # 配置 MySQL 连接池
+    sql_pool = MySQLConnectionPool(log=log)
+    if not sql_pool.check_pool_health():
+        log.error("数据库连接池异常")
+        raise RuntimeError("数据库连接池异常")
+
+    try:
+        sql_token = sql_pool.select_one("SELECT token FROM kawan_token")
+        # sql_token_str = 'Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJsb2dpblR5cGUiOiJhcHBMb2dpbiIsImxvZ2luSWQiOiJhcHBfdXNlcjoxOTExNjI0MDUzMzQ5MzI2ODQ5Iiwicm5TdHIiOiJvZURBT2QwTEFvYmlmTFR2Y0xVVXpNQ0haaWVLWXRrUyIsImNsaWVudGlkIjoiNDI4YTgzMTBjZDQ0Mjc1N2FlNjk5ZGY1ZDg5NGYwNTEiLCJ1c2VySWQiOjE5MTE2MjQwNTMzNDkzMjY4NDksImludml0ZUNvZGUiOiI0NjI2MTgiLCJwaG9uZSI6IjE5NTIxNTAwODUwIn0.PY7l7OvS2fOHsgl-YsHcEy1TyKsIgkmSxSV4RZxWaxc'
+        headers = {
+            "User-Agent": "Dart/3.6 (dart:io)",
+            "Accept-Encoding": "gzip",
+            # "authorization": "Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJsb2dpblR5cGUiOiJhcHBMb2dpbiIsImxvZ2luSWQiOiJhcHBfdXNlcjoxOTExNjI3MDU4NDk2ODM5NjgyIiwicm5TdHIiOiJoUTNtS2VwQ210RWZ4VFkzQVNIUTcxV1RRZlo1ajBkNCIsImNsaWVudGlkIjoiNDI4YTgzMTBjZDQ0Mjc1N2FlNjk5ZGY1ZDg5NGYwNTEiLCJ1c2VySWQiOjE5MTE2MjcwNTg0OTY4Mzk2ODIsImludml0ZUNvZGUiOiIyMDc3MTYiLCJwaG9uZSI6IjEzMDE0NjE3NjE0In0.IuWoS8kCmG4OQFh1XINJOHpbeKMZKlMmticVglAVF_Y",
+            "authorization": sql_token[0],
+            # "authorization": sql_token_str,
+            "content-type": "application/json",
+            "app-version": "1.0.12",
+            "content-language": "zh_CN",
+            # "nonce": "ceac8160-18e3-11f0-bb6e-95de6e5ff903",
+            "isencrypt": "false"
+        }
+        # 获取 商家 列表
+        try:
+            sql_shop_list = sql_pool.select_all("SELECT shop_id FROM kawan_shop_record")
+            sql_shop_list = [item[0] for item in sql_shop_list]
+
+            get_shop_list(log, sql_pool, sql_shop_list, headers)
+            sql_shop_list.clear()
+        except Exception as e:
+            log.error(f"Error fetching last_product_id: {e}")
+        time.sleep(5)
+
+        # 获取已售商品
+        try:
+            sql_shop_id_list = sql_pool.select_all("SELECT shop_id, shop_name FROM kawan_shop_record")
+            # sql_shop_id_list = [item[0] for item in sql_shop_id_list]
+
+            # 获取 product_id_list
+            sql_product_id_list = sql_pool.select_all("SELECT product_id FROM kawan_product_record")
+            sql_product_id_list = [item[0] for item in sql_product_id_list]
+
+            for shop_id_name in sql_shop_id_list:
+                shop_id = shop_id_name[0]
+                shop_name = shop_id_name[1]
+                log.info(f"开始获取商家:{shop_id} 已售商品")
+                try:
+                    get_sold_list(log, sql_pool, shop_id, shop_name, sql_product_id_list, headers)
+                except Exception as e:
+                    log.error(f"Error fetching get_sold_list for shop_id:{shop_id}, {e}")
+
+            sql_product_id_list.clear()
+        except Exception as e:
+            log.error(f"Error fetching sql_shop_id_list: {e}")
+        time.sleep(5)
+
+        # 获取拆卡报告
+        try:
+            sql_product_id_list_for_report = sql_pool.select_all(
+                "SELECT product_id FROM kawan_product_record WHERE report_state = 0")
+            sql_product_id_list_for_report = [item[0] for item in sql_product_id_list_for_report]
+            for product_id in sql_product_id_list_for_report:
+                log.info(f"开始获取商品:{product_id} 拆卡报告")
+                try:
+                    get_report_list(log, sql_pool, product_id, headers)
+                except Exception as e:
+                    log.error(f"Error fetching reports for product_id:{product_id}, {e}")
+                    sql_pool.update_one("update kawan_product_record set report_state = 2 where product_id = %s",
+                                        (product_id,))
+
+        except Exception as e:
+            log.error(f"Error fetching reports: {e}")
+        # time.sleep(5)
+
+        # 获取商品玩家
+        # try:
+        #     get_player_list(log, sql_pool)
+        # except Exception as e:
+        #     log.error(f"Error fetching players: {e}")
+
+    except Exception as e:
+        log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
+    finally:
+        log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
+
+
+def schedule_task():
+    """
+    爬虫模块的启动文件
+    """
+    # 立即运行一次任务
+    # kawan_main(log=logger)
+
+    # 设置定时任务
+    schedule.every().day.at("00:01").do(kawan_main, log=logger)
+
+    while True:
+        schedule.run_pending()
+        time.sleep(1)
+
+
+if __name__ == '__main__':
+    # get_shop_list(logger, None)
+    # get_sold_list(logger)
+    # get_acticity_xplain(logger, '1910557299676192770')
+    # get_product_detail(logger, '1910557299676192770')
+    # get_player_list(logger)
+    # get_report_list(logger, '1910557299676192770')
+    # kawan_main(logger)
+    schedule_task()

+ 461 - 0
kawan_spider/mysql_pool.py

@@ -0,0 +1,461 @@
+# -*- coding: utf-8 -*-
+# Author : Charley
+# Python : 3.10.8
+# Date   : 2025/3/25 14:14
+import re
+import pymysql
+import YamlLoader
+from loguru import logger
+from dbutils.pooled_db import PooledDB
+
+# 获取yaml配置
+yaml = YamlLoader.readYaml()
+mysqlYaml = yaml.get("mysql")
+sql_host = mysqlYaml.getValueAsString("host")
+sql_port = mysqlYaml.getValueAsInt("port")
+sql_user = mysqlYaml.getValueAsString("username")
+sql_password = mysqlYaml.getValueAsString("password")
+sql_db = mysqlYaml.getValueAsString("db")
+
+
+class MySQLConnectionPool:
+    """
+    MySQL连接池
+    """
+
+    def __init__(self, mincached=4, maxcached=5, maxconnections=10, log=None):
+        """
+        初始化连接池
+        :param mincached: 初始化时,链接池中至少创建的链接,0表示不创建
+        :param maxcached: 池中空闲连接的最大数目(0 或 None 表示池大小不受限制)
+        :param maxconnections: 允许的最大连接数(0 或 None 表示任意数量的连接)
+        :param log: 自定义日志记录器
+        """
+        # 使用 loguru 的 logger,如果传入了其他 logger,则使用传入的 logger
+        self.log = log or logger
+        self.pool = PooledDB(
+            creator=pymysql,
+            mincached=mincached,
+            maxcached=maxcached,
+            maxconnections=maxconnections,
+            blocking=True,  # 连接池中如果没有可用连接后,是否阻塞等待。True,等待;False,不等待然后报错
+            host=sql_host,
+            port=sql_port,
+            user=sql_user,
+            password=sql_password,
+            database=sql_db,
+            ping=0  # 每次连接使用时自动检查有效性(0=不检查,1=执行query前检查,2=每次执行前检查)
+        )
+
+    def _execute(self, query, args=None, commit=False):
+        """
+        执行SQL
+        :param query: SQL语句
+        :param args: SQL参数
+        :param commit: 是否提交事务
+        :return: 查询结果
+        """
+        try:
+            with self.pool.connection() as conn:
+                with conn.cursor() as cursor:
+                    cursor.execute(query, args)
+                    if commit:
+                        conn.commit()
+                    self.log.debug(f"sql _execute, Query: {query}, Rows: {cursor.rowcount}")
+                    return cursor
+        except Exception as e:
+            if commit:
+                conn.rollback()
+            self.log.error(f"Error executing query: {e}, Query: {query}, Args: {args}")
+            raise e
+
+    def select_one(self, query, args=None):
+        """
+        执行查询,返回单个结果
+        :param query: 查询语句
+        :param args: 查询参数
+        :return: 查询结果
+        """
+        cursor = self._execute(query, args)
+        return cursor.fetchone()
+
+    def select_all(self, query, args=None):
+        """
+        执行查询,返回所有结果
+        :param query: 查询语句
+        :param args: 查询参数
+        :return: 查询结果
+        """
+        cursor = self._execute(query, args)
+        return cursor.fetchall()
+
+    def insert_one(self, query, args):
+        """
+        执行单条插入语句
+        :param query: 插入语句
+        :param args: 插入参数
+        """
+        self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data insert_one 入库中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
+        cursor = self._execute(query, args, commit=True)
+        return cursor.lastrowid  # 返回插入的ID
+
+    def insert_all(self, query, args_list):
+        """
+        执行批量插入语句,如果失败则逐条插入
+        :param query: 插入语句
+        :param args_list: 插入参数列表
+        """
+        conn = None
+        cursor = None
+        try:
+            conn = self.pool.connection()
+            cursor = conn.cursor()
+            cursor.executemany(query, args_list)
+            conn.commit()
+            self.log.debug(f"sql insert_all, SQL: {query}, Rows: {len(args_list)}")
+            self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data insert_all 入库中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
+        except Exception as e:
+            conn.rollback()
+            self.log.error(f"Batch insertion failed after 5 attempts. Trying single inserts. Error: {e}")
+            # 如果批量插入失败,则逐条插入
+            rowcount = 0
+            for args in args_list:
+                self.insert_one(query, args)
+                rowcount += 1
+            self.log.debug(f"Batch insertion failed. Inserted {rowcount} rows individually.")
+        finally:
+            if cursor:
+                cursor.close()
+            if conn:
+                conn.close()
+
+    def insert_one_or_dict(self, table=None, data=None, query=None, args=None, commit=True):
+        """
+        单条插入(支持字典或原始SQL)
+        :param table: 表名(字典插入时必需)
+        :param data: 字典数据 {列名: 值}
+        :param query: 直接SQL语句(与data二选一)
+        :param args: SQL参数(query使用时必需)
+        :param commit: 是否自动提交
+        :return: 最后插入ID
+        """
+        if data is not None:
+            if not isinstance(data, dict):
+                raise ValueError("Data must be a dictionary")
+
+            keys = ', '.join([self._safe_identifier(k) for k in data.keys()])
+            values = ', '.join(['%s'] * len(data))
+            query = f"INSERT INTO {self._safe_identifier(table)} ({keys}) VALUES ({values})"
+            args = tuple(data.values())
+        elif query is None:
+            raise ValueError("Either data or query must be provided")
+
+        cursor = self._execute(query, args, commit)
+        self.log.info(f"sql insert_one_or_dict, Table: {table}, Rows: {cursor.rowcount}")
+        self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data insert_one_or_dict 入库中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
+        return cursor.lastrowid
+
+    def insert_many(self, table=None, data_list=None, query=None, args_list=None, batch_size=500, commit=True):
+        """
+        批量插入(支持字典列表或原始SQL)
+        :param table: 表名(字典插入时必需)
+        :param data_list: 字典列表 [{列名: 值}]
+        :param query: 直接SQL语句(与data_list二选一)
+        :param args_list: SQL参数列表(query使用时必需)
+        :param batch_size: 分批大小
+        :param commit: 是否自动提交
+        :return: 影响行数
+        """
+        if data_list is not None:
+            if not data_list or not isinstance(data_list[0], dict):
+                raise ValueError("Data_list must be a non-empty list of dictionaries")
+
+            keys = ', '.join([self._safe_identifier(k) for k in data_list[0].keys()])
+            values = ', '.join(['%s'] * len(data_list[0]))
+            query = f"INSERT INTO {self._safe_identifier(table)} ({keys}) VALUES ({values})"
+            args_list = [tuple(d.values()) for d in data_list]
+        elif query is None:
+            raise ValueError("Either data_list or query must be provided")
+
+        total = 0
+        for i in range(0, len(args_list), batch_size):
+            batch = args_list[i:i + batch_size]
+            try:
+                with self.pool.connection() as conn:
+                    with conn.cursor() as cursor:
+                        cursor.executemany(query, batch)
+                        if commit:
+                            conn.commit()
+                        total += cursor.rowcount
+                        # self.log.debug(f"sql insert_many_or_dict, SQL: {query}, Rows: {cursor.rowcount}")
+            except Exception as e:
+                if commit:
+                    conn.rollback()
+                self.log.error(f"Batch insert failed: {e}")
+                # 降级为单条插入
+                for args in batch:
+                    try:
+                        self.insert_one_or_dict(table=None, query=query, args=args, commit=commit)
+                        total += 1
+                    except Exception as e2:
+                        self.log.error(f"Single insert failed: {e2}")
+                        continue
+        self.log.info(f"sql insert_many_or_dict, Table: {table}, Total Rows: {total}")
+        return total
+
+    def insert_too_many(self, query, args_list, batch_size=1000):
+        """
+        执行批量插入语句,分片提交, 单次插入大于十万+时可用, 如果失败则降级为逐条插入
+        :param query: 插入语句
+        :param args_list: 插入参数列表
+        :param batch_size: 每次插入的条数
+        """
+        for i in range(0, len(args_list), batch_size):
+            batch = args_list[i:i + batch_size]
+            try:
+                with self.pool.connection() as conn:
+                    with conn.cursor() as cursor:
+                        cursor.executemany(query, batch)
+                        conn.commit()
+            except Exception as e:
+                self.log.error(f"insert_too_many error. Trying single insert. Error: {e}")
+                # 当前批次降级为单条插入
+                for args in batch:
+                    self.insert_one(query, args)
+
+    def update_one(self, query, args):
+        """
+        执行单条更新语句
+        :param query: 更新语句
+        :param args: 更新参数
+        """
+        self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data update_one 更新中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
+        return self._execute(query, args, commit=True)
+
+    def update_all(self, query, args_list):
+        """
+        执行批量更新语句,如果失败则逐条更新
+        :param query: 更新语句
+        :param args_list: 更新参数列表
+        """
+        conn = None
+        cursor = None
+        try:
+            conn = self.pool.connection()
+            cursor = conn.cursor()
+            cursor.executemany(query, args_list)
+            conn.commit()
+            self.log.debug(f"sql update_all, SQL: {query}, Rows: {len(args_list)}")
+            self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data update_all 更新中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
+        except Exception as e:
+            conn.rollback()
+            self.log.error(f"Error executing query: {e}")
+            # 如果批量更新失败,则逐条更新
+            rowcount = 0
+            for args in args_list:
+                self.update_one(query, args)
+                rowcount += 1
+            self.log.debug(f'Batch update failed. Updated {rowcount} rows individually.')
+        finally:
+            if cursor:
+                cursor.close()
+            if conn:
+                conn.close()
+
+    def update_one_or_dict(self, table=None, data=None, condition=None, query=None, args=None, commit=True):
+        """
+        单条更新(支持字典或原始SQL)
+        :param table: 表名(字典模式必需)
+        :param data: 字典数据 {列名: 值}(与 query 二选一)
+        :param condition: 更新条件,支持以下格式:
+            - 字典: {"id": 1} → "WHERE id = %s"
+            - 字符串: "id = 1" → "WHERE id = 1"(需自行确保安全)
+            - 元组: ("id = %s", [1]) → "WHERE id = %s"(参数化查询)
+        :param query: 直接SQL语句(与 data 二选一)
+        :param args: SQL参数(query 模式下必需)
+        :param commit: 是否自动提交
+        :return: 影响行数
+        :raises: ValueError 参数校验失败时抛出
+        """
+        # 参数校验
+        if data is not None:
+            if not isinstance(data, dict):
+                raise ValueError("Data must be a dictionary")
+            if table is None:
+                raise ValueError("Table name is required for dictionary update")
+            if condition is None:
+                raise ValueError("Condition is required for dictionary update")
+
+            # 构建 SET 子句
+            set_clause = ", ".join([f"{self._safe_identifier(k)} = %s" for k in data.keys()])
+            set_values = list(data.values())
+
+            # 解析条件
+            condition_clause, condition_args = self._parse_condition(condition)
+            query = f"UPDATE {self._safe_identifier(table)} SET {set_clause} WHERE {condition_clause}"
+            args = set_values + condition_args
+
+        elif query is None:
+            raise ValueError("Either data or query must be provided")
+
+        # 执行更新
+        cursor = self._execute(query, args, commit)
+        self.log.debug(
+            f"Updated table={table}, rows={cursor.rowcount}, query={query[:100]}...",
+            extra={"table": table, "rows": cursor.rowcount}
+        )
+        return cursor.rowcount
+
+    def _parse_condition(self, condition):
+        """
+        解析条件为 (clause, args) 格式
+        :param condition: 字典/字符串/元组
+        :return: (str, list) SQL 子句和参数列表
+        """
+        if isinstance(condition, dict):
+            clause = " AND ".join([f"{self._safe_identifier(k)} = %s" for k in condition.keys()])
+            args = list(condition.values())
+        elif isinstance(condition, str):
+            clause = condition  # 注意:需调用方确保安全
+            args = []
+        elif isinstance(condition, (tuple, list)) and len(condition) == 2:
+            clause, args = condition[0], condition[1]
+            if not isinstance(args, (list, tuple)):
+                args = [args]
+        else:
+            raise ValueError("Condition must be dict/str/(clause, args)")
+        return clause, args
+
+    def update_many(self, table=None, data_list=None, condition_list=None, query=None, args_list=None, batch_size=500,
+                    commit=True):
+        """
+        批量更新(支持字典列表或原始SQL)
+        :param table: 表名(字典插入时必需)
+        :param data_list: 字典列表 [{列名: 值}]
+        :param condition_list: 条件列表(必须为字典,与data_list等长)
+        :param query: 直接SQL语句(与data_list二选一)
+        :param args_list: SQL参数列表(query使用时必需)
+        :param batch_size: 分批大小
+        :param commit: 是否自动提交
+        :return: 影响行数
+        """
+        if data_list is not None:
+            if not data_list or not isinstance(data_list[0], dict):
+                raise ValueError("Data_list must be a non-empty list of dictionaries")
+            if condition_list is None or len(data_list) != len(condition_list):
+                raise ValueError("Condition_list must be provided and match the length of data_list")
+            if not all(isinstance(cond, dict) for cond in condition_list):
+                raise ValueError("All elements in condition_list must be dictionaries")
+
+            # 获取第一个数据项和条件项的键
+            first_data_keys = set(data_list[0].keys())
+            first_cond_keys = set(condition_list[0].keys())
+
+            # 构造基础SQL
+            set_clause = ', '.join([self._safe_identifier(k) + ' = %s' for k in data_list[0].keys()])
+            condition_clause = ' AND '.join([self._safe_identifier(k) + ' = %s' for k in condition_list[0].keys()])
+            base_query = f"UPDATE {self._safe_identifier(table)} SET {set_clause} WHERE {condition_clause}"
+            total = 0
+
+            # 分批次处理
+            for i in range(0, len(data_list), batch_size):
+                batch_data = data_list[i:i + batch_size]
+                batch_conds = condition_list[i:i + batch_size]
+                batch_args = []
+
+                # 检查当前批次的结构是否一致
+                can_batch = True
+                for data, cond in zip(batch_data, batch_conds):
+                    data_keys = set(data.keys())
+                    cond_keys = set(cond.keys())
+                    if data_keys != first_data_keys or cond_keys != first_cond_keys:
+                        can_batch = False
+                        break
+                    batch_args.append(tuple(data.values()) + tuple(cond.values()))
+
+                if not can_batch:
+                    # 结构不一致,转为单条更新
+                    for data, cond in zip(batch_data, batch_conds):
+                        self.update_one_or_dict(table=table, data=data, condition=cond, commit=commit)
+                        total += 1
+                    continue
+
+                # 执行批量更新
+                try:
+                    with self.pool.connection() as conn:
+                        with conn.cursor() as cursor:
+                            cursor.executemany(base_query, batch_args)
+                            if commit:
+                                conn.commit()
+                            total += cursor.rowcount
+                            self.log.debug(f"Batch update succeeded. Rows: {cursor.rowcount}")
+                except Exception as e:
+                    if commit:
+                        conn.rollback()
+                    self.log.error(f"Batch update failed: {e}")
+                    # 降级为单条更新
+                    for args, data, cond in zip(batch_args, batch_data, batch_conds):
+                        try:
+                            self._execute(base_query, args, commit=commit)
+                            total += 1
+                        except Exception as e2:
+                            self.log.error(f"Single update failed: {e2}, Data: {data}, Condition: {cond}")
+            self.log.info(f"Total updated rows: {total}")
+            return total
+        elif query is not None:
+            # 处理原始SQL和参数列表
+            if args_list is None:
+                raise ValueError("args_list must be provided when using query")
+
+            total = 0
+            for i in range(0, len(args_list), batch_size):
+                batch_args = args_list[i:i + batch_size]
+                try:
+                    with self.pool.connection() as conn:
+                        with conn.cursor() as cursor:
+                            cursor.executemany(query, batch_args)
+                            if commit:
+                                conn.commit()
+                            total += cursor.rowcount
+                            self.log.debug(f"Batch update succeeded. Rows: {cursor.rowcount}")
+                except Exception as e:
+                    if commit:
+                        conn.rollback()
+                    self.log.error(f"Batch update failed: {e}")
+                    # 降级为单条更新
+                    for args in batch_args:
+                        try:
+                            self._execute(query, args, commit=commit)
+                            total += 1
+                        except Exception as e2:
+                            self.log.error(f"Single update failed: {e2}, Args: {args}")
+            self.log.info(f"Total updated rows: {total}")
+            return total
+        else:
+            raise ValueError("Either data_list or query must be provided")
+
+    def check_pool_health(self):
+        """
+        检查连接池中有效连接数
+
+        # 使用示例
+        # 配置 MySQL 连接池
+        sql_pool = MySQLConnectionPool(log=log)
+        if not sql_pool.check_pool_health():
+            log.error("数据库连接池异常")
+            raise RuntimeError("数据库连接池异常")
+        """
+        try:
+            with self.pool.connection() as conn:
+                conn.ping(reconnect=True)
+                return True
+        except Exception as e:
+            self.log.error(f"Connection pool health check failed: {e}")
+            return False
+
+    @staticmethod
+    def _safe_identifier(name):
+        """SQL标识符安全校验"""
+        if not re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', name):
+            raise ValueError(f"Invalid SQL identifier: {name}")
+        return name

+ 110 - 0
kawan_spider/settings.py

@@ -0,0 +1,110 @@
+# -*- coding: utf-8 -*-
+# Author : Charley
+# Python : 3.8.10
+# Date   : 2025/4/14 11:58
+import inspect
+import requests
+from loguru import logger
+from tenacity import retry, stop_after_attempt, wait_fixed
+
+logger.remove()
+logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
+           format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
+           level="DEBUG", retention="7 day")
+
+HEADERS = {
+    "User-Agent": "Dart/3.6 (dart:io)"
+}
+
+
+def after_log(retry_state):
+    """
+    retry 回调
+    :param retry_state: RetryCallState 对象
+    """
+    # 检查 args 是否存在且不为空
+    if retry_state.args and len(retry_state.args) > 0:
+        log = retry_state.args[0]  # 获取传入的 logger
+    else:
+        log = logger  # 使用全局 logger
+
+    if retry_state.outcome.failed:
+        log.warning(
+            f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
+    else:
+        log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
+
+
+@retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
+def get_proxys(log):
+    """
+    获取代理
+    :return: 代理
+    """
+    tunnel = "x371.kdltps.com:15818"
+    kdl_username = "t13753103189895"
+    kdl_password = "o0yefv6z"
+    try:
+        proxies = {
+            "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
+            "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
+        }
+        return proxies
+    except Exception as e:
+        log.error(f"Error getting proxy: {e}")
+        raise e
+
+
+@retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
+def make_request(log, method, url, params=None, data=None, headers=None, proxies=None, timeout=22, token=None):
+    """
+    通用请求函数
+    :param log: logger对象
+    :param method: 请求方法 ('GET' 或 'POST')
+    :param url: 请求的URL
+    :param params: GET请求的查询参数
+    :param data: POST请求的数据
+    :param headers: 请求头
+    :param proxies: 代理
+    :param timeout: 请求超时时间
+    :param token: token
+    :return: 响应的JSON数据
+    """
+    if not log:
+        log = logger
+    if headers is None:
+        headers = HEADERS
+        if not token:
+            token = "a-2df1084f01cb48f2ab67806867fea37f"
+        headers["authorization"] = token
+
+    if proxies is None:
+        proxies = get_proxys(log)
+
+    try:
+        with requests.Session() as session:
+            if method.upper() == 'GET':
+                response = session.get(url, headers=headers, params=params, proxies=proxies, timeout=timeout)
+            elif method.upper() == 'POST':
+                response = session.post(url, headers=headers, json=data, proxies=proxies, timeout=timeout)
+            else:
+                log.error(f"Unsupported request method: {method}")
+                return {}
+
+            response.raise_for_status()
+            data = response.json()
+            if data["code"] == 200:
+                log.info(f"Successfully fetched {method} request to {url}")
+                return data
+            else:
+                log.warning(f"Warning {inspect.currentframe().f_code.co_name}: {data['msg']}")
+                return {}
+    except requests.exceptions.RequestException as e:
+        log.error(f"Error making {method} request to {url}: {e}")
+        raise e
+    except ValueError as e:
+        log.error(f"Error parsing JSON for {method} request to {url}: {e}")
+        raise e
+    except Exception as e:
+        log.error(f"Error making {method} request to {url}: {e}")
+        raise e