|
|
@@ -0,0 +1,324 @@
|
|
|
+# -*- coding: utf-8 -*-
|
|
|
+# Author : Charley
|
|
|
+# Python : 3.10.8
|
|
|
+# Date : 2025/3/19 18:55
|
|
|
+import time
|
|
|
+import inspect
|
|
|
+import requests
|
|
|
+import schedule
|
|
|
+import user_agent
|
|
|
+from loguru import logger
|
|
|
+from datetime import datetime
|
|
|
+from mysq_pool import MySQLConnectionPool
|
|
|
+from tenacity import retry, stop_after_attempt, wait_fixed
|
|
|
+
|
|
|
+"""
|
|
|
+com.jihuanshe
|
|
|
+"""
|
|
|
+
|
|
|
+logger.remove()
|
|
|
+logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
|
|
|
+ format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
|
|
|
+ level="DEBUG", retention="7 day")
|
|
|
+
|
|
|
+
|
|
|
+def after_log(retry_state):
|
|
|
+ """
|
|
|
+ retry 回调
|
|
|
+ :param retry_state: RetryCallState 对象
|
|
|
+ """
|
|
|
+ # 检查 args 是否存在且不为空
|
|
|
+ if retry_state.args and len(retry_state.args) > 0:
|
|
|
+ log = retry_state.args[0] # 获取传入的 logger
|
|
|
+ else:
|
|
|
+ log = logger # 使用全局 logger
|
|
|
+
|
|
|
+ if retry_state.outcome.failed:
|
|
|
+ log.warning(
|
|
|
+ f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
|
|
|
+ else:
|
|
|
+ log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
|
|
|
+
|
|
|
+
|
|
|
+@retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
|
|
|
+def get_proxys(log):
|
|
|
+ """
|
|
|
+ 获取代理
|
|
|
+ :return: 代理
|
|
|
+ """
|
|
|
+ tunnel = "x371.kdltps.com:15818"
|
|
|
+ kdl_username = "t13753103189895"
|
|
|
+ kdl_password = "o0yefv6z"
|
|
|
+ try:
|
|
|
+ proxies = {
|
|
|
+ "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
|
|
|
+ "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
|
|
|
+ }
|
|
|
+ return proxies
|
|
|
+ except Exception as e:
|
|
|
+ log.error(f"Error getting proxy: {e}")
|
|
|
+ raise e
|
|
|
+
|
|
|
+
|
|
|
+def save_product_data(sql_pool, product_info: tuple):
|
|
|
+ """
|
|
|
+ 保存 product 数据
|
|
|
+ :param sql_pool: MySQL连接池对象
|
|
|
+ :param product_info: 要保存的数据
|
|
|
+ """
|
|
|
+ sql = """
|
|
|
+ INSERT INTO jhs_product_record (seller_user_id, seller_username, product_id, app_id, nonce_str, signature, auction_product_name, auction_product_images, game_key, language_text, authenticator_name, grading, starting_price, max_bid_price, status, auction_product_start_time, auction_product_end_time)
|
|
|
+ VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"""
|
|
|
+ sql_pool.insert_one(sql, product_info)
|
|
|
+
|
|
|
+
|
|
|
+def save_biddings_data(sql_pool, biddings_list: list):
|
|
|
+ """
|
|
|
+ 保存 biddings 数据
|
|
|
+ :param sql_pool: MySQL连接池对象
|
|
|
+ :param biddings_list: 要保存的数据 -> list
|
|
|
+ """
|
|
|
+ sql = """
|
|
|
+ INSERT INTO jhs_biddings_record (product_id, username, bid_price, bid_status, created_at)
|
|
|
+ VALUES (%s, %s, %s, %s, %s)"""
|
|
|
+ sql_pool.insert_all(sql, biddings_list)
|
|
|
+
|
|
|
+
|
|
|
+def save_shop_data(sql_pool, shop_info: tuple):
|
|
|
+ """
|
|
|
+ 保存 product 数据
|
|
|
+ :param sql_pool: MySQL连接池对象
|
|
|
+ :param shop_info: 要保存的数据
|
|
|
+ """
|
|
|
+ sql = """
|
|
|
+ INSERT INTO jhs_shop_record (seller_user_id, seller_username, follower_count, success_order_user_count, seller_credit_rank_image_url)
|
|
|
+ VALUES (%s, %s, %s, %s, %s)"""
|
|
|
+ sql_pool.insert_one(sql, shop_info)
|
|
|
+
|
|
|
+
|
|
|
+def parse_data(log, resp_json, sql_product_id, sql_pool, sql_shop_id_list):
|
|
|
+ """
|
|
|
+ 解析数据, 将解析后的数据, 分成三个表存储 product
|
|
|
+ biddings
|
|
|
+ shop
|
|
|
+ :param log: logger对象
|
|
|
+ :param resp_json: resp_json
|
|
|
+ :param sql_product_id: sql_product_id
|
|
|
+ :param sql_pool: MySQL连接池对象
|
|
|
+ :param sql_shop_id_list: sql_shop_id_list
|
|
|
+ """
|
|
|
+ sql_id = sql_product_id[0]
|
|
|
+ pid = sql_product_id[1]
|
|
|
+
|
|
|
+ status = resp_json.get('status')
|
|
|
+ if status not in ['ongoing', 'pending']:
|
|
|
+ """
|
|
|
+ 获取 shop 信息
|
|
|
+ """
|
|
|
+ seller_user_id = resp_json.get('seller_user_id')
|
|
|
+ seller_username = resp_json.get('seller_username')
|
|
|
+
|
|
|
+ # print(seller_user_id)
|
|
|
+ # # 查询商家id在不在数据库中
|
|
|
+ # sql_exists_flag = """SELECT EXISTS (SELECT 1 FROM jhs_shop_record WHERE seller_user_id = %s) AS exists_flag"""
|
|
|
+ # exists_flag = sql_pool.select_one(sql_exists_flag, (seller_user_id,))
|
|
|
+ # exists_flag = exists_flag[0]
|
|
|
+ # if exists_flag == 1:
|
|
|
+
|
|
|
+ if str(seller_user_id) in sql_shop_id_list:
|
|
|
+ log.info(
|
|
|
+ f"----------------- The seller_user_id {seller_user_id} is already in the database, Not need save -----------------")
|
|
|
+ else:
|
|
|
+ follower_count = resp_json.get('follower_count')
|
|
|
+ success_order_user_count = resp_json.get('success_order_user_count')
|
|
|
+ seller_credit_rank_image_url = resp_json.get('seller_credit_rank_image_url')
|
|
|
+ shop_info = (
|
|
|
+ seller_user_id, seller_username, follower_count, success_order_user_count, seller_credit_rank_image_url
|
|
|
+ )
|
|
|
+ # print(shop_info)
|
|
|
+
|
|
|
+ try:
|
|
|
+ save_shop_data(sql_pool, shop_info)
|
|
|
+ sql_shop_id_list.append(seller_user_id)
|
|
|
+ except Exception as e:
|
|
|
+ if "Duplicate entry" in str(e):
|
|
|
+ logger.warning(f"存在重复的 seller_user_id{seller_user_id},跳过插入....")
|
|
|
+ else:
|
|
|
+ logger.error(f"保存数据 seller_user_id 时出错: {str(e)}")
|
|
|
+
|
|
|
+ # sql_pool.update_one("update jhs_task set task_state = 1 where id = %s", (sql_id,))
|
|
|
+
|
|
|
+ """
|
|
|
+ 获取 product 信息
|
|
|
+ """
|
|
|
+ app_id = resp_json.get('appId')
|
|
|
+ nonce_str = resp_json.get('nonceStr')
|
|
|
+ signature = resp_json.get('signature')
|
|
|
+
|
|
|
+ # auction_product_id = resp_json.get('auction_product_id')
|
|
|
+ auction_product_name = resp_json.get('auction_product_name')
|
|
|
+ auction_product_images_list = resp_json.get('auction_product_images', [])
|
|
|
+ if auction_product_images_list:
|
|
|
+ auction_product_images = ','.join(auction_product_images_list)
|
|
|
+ else:
|
|
|
+ auction_product_images = None
|
|
|
+
|
|
|
+ game_key = resp_json.get('game_key')
|
|
|
+ language_text = resp_json.get('language_text')
|
|
|
+ authenticator_name = resp_json.get('authenticator_name')
|
|
|
+ grading = resp_json.get('grading')
|
|
|
+ starting_price = resp_json.get('starting_price')
|
|
|
+ max_bid_price = resp_json.get('max_bid_price')
|
|
|
+
|
|
|
+ auction_product_start_timestamp = resp_json.get('auction_product_start_timestamp')
|
|
|
+ auction_product_start_time = datetime.fromtimestamp(auction_product_start_timestamp).strftime(
|
|
|
+ '%Y-%m-%d %H:%M:%S') if auction_product_start_timestamp else None
|
|
|
+ auction_product_end_timestamp = resp_json.get('auction_product_end_timestamp')
|
|
|
+ auction_product_end_time = datetime.fromtimestamp(auction_product_end_timestamp).strftime(
|
|
|
+ '%Y-%m-%d %H:%M:%S') if auction_product_end_timestamp else None
|
|
|
+
|
|
|
+ product_info = (
|
|
|
+ seller_user_id, seller_username, pid, app_id, nonce_str, signature, auction_product_name,
|
|
|
+ auction_product_images, game_key, language_text, authenticator_name, grading, starting_price, max_bid_price,
|
|
|
+ status, auction_product_start_time, auction_product_end_time
|
|
|
+ )
|
|
|
+ # print(product_info)
|
|
|
+ try:
|
|
|
+ save_product_data(sql_pool, product_info)
|
|
|
+ sql_pool.update_one("update jhs_task set task_state = 1 where id = %s", (sql_id,))
|
|
|
+ except Exception as e:
|
|
|
+ if "Duplicate entry" in str(e):
|
|
|
+ logger.warning(f"存在重复的 pid{pid},跳过插入....")
|
|
|
+ else:
|
|
|
+ logger.error(f"保存数据时出错: {str(e)}")
|
|
|
+ return
|
|
|
+
|
|
|
+ """
|
|
|
+ 获取 biddings 信息
|
|
|
+ """
|
|
|
+ biddings = resp_json.get('biddings', [])
|
|
|
+ # print(biddings)
|
|
|
+
|
|
|
+ # 创建一个字典来存储每个用户的最高出价记录
|
|
|
+ highest_bids = {}
|
|
|
+
|
|
|
+ for record in biddings:
|
|
|
+ username = record['username']
|
|
|
+ bid_price = float(record['bid_price']) # 将出价转换为浮点数以便比较
|
|
|
+
|
|
|
+ # 如果用户不在字典中,或者当前出价高于已存储的最高出价,则更新记录
|
|
|
+ if username not in highest_bids or bid_price > float(highest_bids[username]['bid_price']):
|
|
|
+ highest_bids[username] = record
|
|
|
+
|
|
|
+ bids_list = list(highest_bids.values())
|
|
|
+ biddings_list = [
|
|
|
+ (pid, record['username'], record['bid_price'], record['bid_status'], record['created_at'])
|
|
|
+ for record in bids_list
|
|
|
+ ]
|
|
|
+ # print(biddings_list)
|
|
|
+ if biddings_list:
|
|
|
+ save_biddings_data(sql_pool, biddings_list)
|
|
|
+ else:
|
|
|
+ log.info(f"................ No biddings found for product {pid}, Not need save ................")
|
|
|
+
|
|
|
+ else:
|
|
|
+ log.info(f"................ The product {pid} is ongoing or pending, Not need parse ................")
|
|
|
+ sql_pool.update_one("update jhs_task set task_state = 4 where id = %s", (sql_id,))
|
|
|
+
|
|
|
+
|
|
|
+@retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
|
|
|
+def get_resp(log, sql_product_id, sql_pool, sql_shop_id_list):
|
|
|
+ """
|
|
|
+ 获取 response 响应
|
|
|
+ :param log: logger对象
|
|
|
+ :param sql_product_id: sql_product_id
|
|
|
+ :param sql_pool: MySQL连接池对象
|
|
|
+ :param sql_shop_id_list: sql_shop_id_list
|
|
|
+ """
|
|
|
+ sql_id = sql_product_id[0]
|
|
|
+ pid = sql_product_id[1]
|
|
|
+ headers = {
|
|
|
+ "accept": "application/json, text/plain, */*",
|
|
|
+ "referer": "https://www.jihuanshe.com/",
|
|
|
+ "user-agent": user_agent.generate_user_agent()
|
|
|
+ }
|
|
|
+ url = "https://api.jihuanshe.com/api/market/share/auction-product"
|
|
|
+ params = {
|
|
|
+ "auction_product_id": pid,
|
|
|
+ "url": f"https://www.jihuanshe.com/app/auction?auctionProductId={pid}"
|
|
|
+ }
|
|
|
+ response = requests.get(url, headers=headers, params=params, timeout=5, proxies=get_proxys(log))
|
|
|
+ # print(response.json())
|
|
|
+ # print(response)
|
|
|
+ resp_json = response.json()
|
|
|
+ if resp_json:
|
|
|
+ if resp_json.get("code") == 440:
|
|
|
+ log.debug(f"< 抱歉,该竞价商品已被移除, pid:{pid} >")
|
|
|
+ sql_pool.update_one("update jhs_task set task_state = 2 where id = %s", (sql_id,))
|
|
|
+ else:
|
|
|
+ try:
|
|
|
+ parse_data(log, resp_json, sql_product_id, sql_pool, sql_shop_id_list)
|
|
|
+ except Exception as e:
|
|
|
+ log.error(f"get_resp call parse_data() sql_product_id:{sql_product_id} 获取失败, error:{e}")
|
|
|
+
|
|
|
+
|
|
|
+@retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
|
|
|
+def jhs_main(log):
|
|
|
+ """
|
|
|
+ 主函数
|
|
|
+ :param log: logger对象
|
|
|
+ """
|
|
|
+ log.info(
|
|
|
+ f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
|
|
|
+
|
|
|
+ # 配置 MySQL 连接池
|
|
|
+ sql_pool = MySQLConnectionPool(log=log)
|
|
|
+ if not sql_pool:
|
|
|
+ log.error("MySQL数据库连接失败")
|
|
|
+ raise Exception("MySQL数据库连接失败")
|
|
|
+
|
|
|
+ try:
|
|
|
+ # max_sql_id = sql_pool.select_one("select max(product_id) from jhs_product_record")
|
|
|
+ # if max_sql_id:
|
|
|
+ # max_sql_id = max_sql_id[0]
|
|
|
+ # else:
|
|
|
+ # max_sql_id = 422342
|
|
|
+ # log.debug(f"当前数据库中最大的 product_id max_sql_id:{max_sql_id}")
|
|
|
+
|
|
|
+ # 从数据库获取需要爬取的 product_id 从最大的id max_sql_id 开始
|
|
|
+ product_id_list = sql_pool.select_all(
|
|
|
+ "SELECT id, product_id FROM jhs_task WHERE task_state != 1 AND id > 420107 LIMIT 10000")
|
|
|
+ # "SELECT id, product_id FROM jhs_task WHERE task_state IN (0, 2) AND id > 420107 LIMIT 10000")
|
|
|
+ # "SELECT id, product_id FROM jhs_task WHERE task_state IN (0, 2) AND id > %s LIMIT 6000", (max_sql_id,))
|
|
|
+ # "SELECT id, product_id FROM jhs_task WHERE task_state IN (0, 2) AND id < 376575 ORDER BY id DESC")
|
|
|
+ product_id_list = [keyword for keyword in product_id_list]
|
|
|
+
|
|
|
+ sql_shop_id_list = sql_pool.select_all("SELECT seller_user_id FROM jhs_shop_record")
|
|
|
+ sql_shop_id_list = [keyword[0] for keyword in sql_shop_id_list]
|
|
|
+ for sql_product_id in product_id_list:
|
|
|
+ try:
|
|
|
+ get_resp(log, sql_product_id, sql_pool, sql_shop_id_list)
|
|
|
+ except Exception as e:
|
|
|
+ log.error(f"Loop sql_product_id:{sql_product_id} 获取失败, error:{e}")
|
|
|
+ sql_pool.update_one("update jhs_task set task_state = 3 where id = %s", (sql_product_id[0],))
|
|
|
+ except Exception as e:
|
|
|
+ log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
|
|
|
+ finally:
|
|
|
+ log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
|
|
|
+
|
|
|
+
|
|
|
+def schedule_task():
|
|
|
+ """
|
|
|
+ 设置定时任务
|
|
|
+ """
|
|
|
+ jhs_main(log=logger)
|
|
|
+
|
|
|
+ schedule.every().day.at("01:31").do(jhs_main, log=logger)
|
|
|
+ while True:
|
|
|
+ schedule.run_pending()
|
|
|
+ time.sleep(1)
|
|
|
+
|
|
|
+
|
|
|
+if __name__ == '__main__':
|
|
|
+ schedule_task()
|
|
|
+ # get_resp(logger, (438807, 438807), MySQLConnectionPool(log=logger),[])
|