| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154 |
- # -*- coding: utf-8 -*-
- # Author : Charley
- # Python : 3.10.8
- # Date : 2025/5/28 15:35
- import inspect
- import requests
- import user_agent
- from loguru import logger
- import concurrent.futures
- from tenacity import stop_after_attempt, wait_fixed, retry
- from mysql_pool import MySQLConnectionPool
- logger.remove()
- logger.add("logs/add_{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
- format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
- level="DEBUG", retention="1 day")
- def after_log(retry_state):
- """
- retry 回调
- :param retry_state: RetryCallState 对象
- """
- # 检查 args 是否存在且不为空
- if retry_state.args and len(retry_state.args) > 0:
- log = retry_state.args[0] # 获取传入的 logger
- else:
- log = logger # 使用全局 logger
- if retry_state.outcome.failed:
- log.warning(
- f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
- else:
- log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
- @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
- def get_proxys(log):
- tunnel = "x371.kdltps.com:15818"
- kdl_username = "t13753103189895"
- kdl_password = "o0yefv6z"
- try:
- proxies = {
- "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
- "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
- }
- return proxies
- except Exception as e:
- log.error(f"Error getting proxy: {e}")
- raise e
- @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
- def get_data(log, bgs_id, sql_pool):
- """
- :param log:
- :param bgs_id:
- :param sql_pool:
- :return:
- """
- headers = {
- "accept": "application/json, text/plain, */*",
- "user-agent": user_agent.generate_user_agent()
- }
- url = "https://www.beckett.com/api/grading/lookup"
- params = {
- "category": "BGS",
- "serialNumber": str(bgs_id)
- }
- response = requests.get(url, headers=headers, params=params, proxies=get_proxys(log), timeout=5)
- if response.status_code == 404:
- # 没有数据 No Record Found 将状态改为3
- log.warning(f"No Record Found for {bgs_id}")
- sql_pool.update_one("UPDATE bgs_task SET state=3 WHERE auth_code=%s", (bgs_id,))
- return
- if response.status_code != 200:
- # 查询失败 将状态改为2
- log.warning(f"Error getting data for {bgs_id}, {response.status_code}")
- sql_pool.update_one("UPDATE bgs_task SET state=2 WHERE auth_code=%s", (bgs_id,))
- return
- result_dict = response.json()
- if result_dict:
- centering_grade = result_dict.get('center_grade')
- corner_grade = result_dict.get('corners_grade')
- edges_grade = result_dict.get('edges_grade')
- surfaces_grade = result_dict.get('surface_grade')
- total_grade = result_dict.get('pop_report')
- cards_grade = result_dict.get('pop_higher')
- info = (centering_grade, corner_grade, edges_grade, surfaces_grade, total_grade, cards_grade, int(bgs_id))
- # 检查所有值是否都为 None或空字符串, 不包含bgs_id
- all_none_or_empty = all(x is None or x == '' for x in info[:-1])
- if all_none_or_empty:
- log.debug("All values are empty")
- else:
- # print(info)
- sql_pool.update_one(
- "UPDATE beckett_bgs_record SET centering_grade=%s, corner_grade=%s, edges_grade=%s ,surfaces_grade=%s, total_grade=%s, cards_grade=%s WHERE number=%s",
- info)
- # 查询成功 将状态改为1
- sql_pool.update_one("UPDATE bgs_task SET state=1 WHERE auth_code=%s", (bgs_id,))
- def process_urls(log, ids, mysql_pool, batch_size=1000, max_workers=5):
- with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
- for i in range(0, len(ids), batch_size):
- # print(i)
- batch = ids[i:i + batch_size]
- # print(batch)
- try:
- futures_to_urls = {executor.submit(get_data, log, url, mysql_pool): url for url in batch}
- for future in concurrent.futures.as_completed(futures_to_urls):
- url = futures_to_urls[future]
- try:
- future.result()
- log.debug(f"处理 {url} 成功")
- except Exception as exc:
- log.debug(f"处理 {url} 出错: {exc}")
- except Exception as e:
- log.error(f"提交任务失败: {e}")
- @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
- def bgs_add_main(log):
- try:
- log.info(f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务.........................................')
- sql_pool = MySQLConnectionPool(log=log)
- if not sql_pool:
- log.error("数据库连接失败")
- raise Exception("数据库连接失败")
- # 补充之前接口变化的异常数据
- sql = "SELECT number FROM beckett_bgs_record WHERE centering_grade IS NULL AND corner_grade IS NULL AND surfaces_grade IS NULL;"
- sql_bgs_id_list = sql_pool.select_all(sql)
- sql_bgs_id_list = [bid[0] for bid in sql_bgs_id_list]
- try:
- process_urls(log, sql_bgs_id_list, sql_pool, batch_size=1000,
- max_workers=10) # 根据需要调整batch_size和max_workers
- except Exception as e:
- log.error('process urls: ', e)
- except Exception as e:
- log.error(e)
- finally:
- log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
- if __name__ == '__main__':
- bgs_add_main(logger)
|