# -*- coding: utf-8 -*- # Author : Charley # Python : 3.10.8 # Date : 2025/2/27 11:57 import inspect import requests import user_agent from loguru import logger from mysq_pool import MySQLConnectionPool from tenacity import retry, stop_after_attempt, wait_fixed """ D+9位数,从D000000001-D000200000 E+9位数,从E000000001-E000040000 C+9位数,从C000000001-C000600000 """ logger.remove() logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00", format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}", level="DEBUG", retention="7 day") def after_log(retry_state): """ retry 回调 :param retry_state: RetryCallState 对象 """ # 检查 args 是否存在且不为空 if retry_state.args and len(retry_state.args) > 0: log = retry_state.args[0] # 获取传入的 logger else: log = logger # 使用全局 logger if retry_state.outcome.failed: log.warning( f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times") else: log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded") @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log) def get_proxys(log): """ 获取代理 :return: 代理 """ tunnel = "x371.kdltps.com:15818" kdl_username = "t13753103189895" kdl_password = "o0yefv6z" try: proxies = { "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}, "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel} } return proxies except Exception as e: log.error(f"Error getting proxy: {e}") raise e def save_data(sql_pool, info): """ 保存数据 :param sql_pool: MySQL连接池对象 :param info: 要保存的数据 """ sql = """ INSERT INTO baocui_record (keyword, picurl, dt_pics, vr_pics, vr_date, title, name, year, score, card_type, card_num, card_score, card_centering, card_edges, card_corners, card_surface) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) """ sql_pool.insert_one(sql, info) def parse_data(resp_json: dict, keyword, sql_pool): """ 解析数据 :param resp_json: response.json()的返回值 -> dict :param keyword: 查询的keyword :param sql_pool: MySQL连接池对象 """ info_dict = resp_json.get("info") if info_dict: picurl = info_dict.get("picurl") # 正面图 dt_pics = info_dict.get("dt_pics") # 反面图 vr_pics = info_dict.get("vr_pics") # VR图 vr_date = info_dict.get("vr_date") title = info_dict.get("details", {}).get("title") details_grade = info_dict.get("details", {}).get("grade", []) info_dict = { 'name': None, 'year': None, 'score': None, 'card_type': None, 'card_num': None, 'card_score': None, 'card_centering': None, 'card_edges': None, 'card_corners': None, 'card_surface': None } for grade in details_grade: grade_name = grade.get("title") grade_value = grade.get("val") # print(f"{grade_name}:{grade_value}") if grade_name == "名称": info_dict['name'] = grade_value elif grade_name == "年份": info_dict['year'] = grade_value elif grade_name == "分数": info_dict['score'] = grade_value elif grade_name == "卡种": info_dict['card_type'] = grade_value elif grade_name == "编号": info_dict['card_num'] = grade_value elif grade_name == "分值": info_dict['card_score'] = grade_value elif grade_name == "CENTERING": info_dict['card_centering'] = grade_value elif grade_name == "EDGES": info_dict['card_edges'] = grade_value elif grade_name == "CORNERS": info_dict['card_corners'] = grade_value elif grade_name == "SURFACE": info_dict['card_surface'] = grade_value info = (keyword, picurl, dt_pics, vr_pics, vr_date, title, info_dict.get('name'), info_dict.get('year'), info_dict.get('score'), info_dict.get('card_type'), info_dict.get('card_num'), info_dict.get('card_score'), info_dict.get('card_centering'), info_dict.get('card_edges'), info_dict.get('card_corners'), info_dict.get('card_surface')) # print(info) save_data(sql_pool, info) @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log) def get_bc_resp(log, keyword, sql_pool): """ 获取响应 :param log: logger对象 :param keyword: 查询的keyword :param sql_pool: MySQL连接池对象 """ headers = { "accept": "application/json", "accept-language": "en,zh-CN;q=0.9,zh;q=0.8", # "referer": "https://www.baocuicoin.com/", "user-agent": user_agent.generate_user_agent() } url = "https://web-api.baocuicoin.com/Search/index" params = { "keyword": keyword, # "code": "404" "code": "" } try: with requests.Session() as session: response = session.post(url, headers=headers, params=params, proxies=get_proxys(log), timeout=10) # print(response.text) # print(response.json()) response.raise_for_status() resp_json = response.json() if not resp_json: log.error(f"Not resp_json, {keyword} 获取失败, msg:{resp_json.get('msg')}") return if resp_json.get("code") == '200': parse_data(resp_json, keyword, sql_pool) else: log.error(f"code != 200, {keyword} 获取失败, msg:{resp_json.get('msg')}") except Exception as e: log.warning(f"{inspect.currentframe().f_code.co_name} error: {e}") def generate_keywords(): """ 生成要查询的 keywords """ start = 1 # for prefix, end in [('C', 600001), ('D', 200001), ('E', 40001)]: for prefix, end in [('D', 200001), ('E', 40001)]: if prefix == 'D': start = 150503 for i in range(start, end): yield f"{prefix}{i:09d}" @retry(stop=stop_after_attempt(50), wait=wait_fixed(1800), after=after_log) def baocui_main(log): """ 主函数 :param log: logger对象 """ log.info( f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................') # 配置 MySQL 连接池 sql_pool = MySQLConnectionPool(log=log) if not sql_pool: log.error("MySQL数据库连接失败") raise Exception("MySQL数据库连接失败") try: # keyword = 'E000011631' for keyword in generate_keywords(): try: get_bc_resp(log, keyword, sql_pool) except Exception as e: log.error(f"Loop keyword:{keyword} 获取失败, error:{e}") except Exception as e: log.error(f'{inspect.currentframe().f_code.co_name} error: {e}') finally: log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............') if __name__ == '__main__': baocui_main(logger)