| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473 |
- # -*- coding: utf-8 -*-
- # Author : Charley
- # Python : 3.10.8
- # Date : 2025/11/11 11:54
- import time
- import random
- import inspect
- import requests
- from loguru import logger
- from datetime import datetime
- from mysql_pool import MySQLConnectionPool
- from tenacity import retry, stop_after_attempt, wait_fixed
- logger.remove()
- logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
- format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
- level="DEBUG", retention="7 day")
- headers = {
- "authority": "kurabu.feishezhang.com",
- "accept": "*/*",
- "accept-language": "zh-CN,zh;q=0.9",
- "content-type": "application/json",
- "referer": "https://servicewechat.com/wxa5880b2d8e8a0f37/17/page-frame.html",
- "sec-fetch-dest": "empty",
- "sec-fetch-mode": "cors",
- "sec-fetch-site": "cross-site",
- "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36 MicroMessenger/7.0.20.1781(0x6700143B) NetType/WIFI MiniProgramEnv/Windows WindowsWechat/WMPF WindowsWechat(0x63090c33) XWEB/9129",
- "xweb_xhr": "1"
- }
- token = "26eed48c80bad053717687daef3a8668"
- crawl_category = "一番赏"
- def after_log(retry_state):
- """
- retry 回调
- :param retry_state: RetryCallState 对象
- """
- # 检查 args 是否存在且不为空
- if retry_state.args and len(retry_state.args) > 0:
- log = retry_state.args[0] # 获取传入的 logger
- else:
- log = logger # 使用全局 logger
- if retry_state.outcome.failed:
- log.warning(
- f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
- else:
- log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
- @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
- def get_proxys(log):
- """
- 获取代理
- :return: 代理
- """
- tunnel = "x371.kdltps.com:15818"
- kdl_username = "t13753103189895"
- kdl_password = "o0yefv6z"
- try:
- proxies = {
- "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
- "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
- }
- return proxies
- except Exception as e:
- log.error(f"Error getting proxy: {e}")
- raise e
- @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
- def get_reward_single_page(log, page, sql_pool, sql_reward_list):
- """
- 获取 赏池 单个页面数据
- :param log: 日志对象
- :param page: 页码
- :param sql_pool: 数据库连接池对象
- :param sql_reward_list: sql_reward_list
- :return: len(data_list) -> 获取数据长度
- """
- log.debug(f"{inspect.currentframe().f_code.co_name} 开始获取第{page}页数据")
- url = "https://kurabu.feishezhang.com/api.php"
- params = {
- "s": "reward/index",
- "system_type": "default",
- "application": "app",
- "application_client_type": "weixin",
- "token": token,
- "uuid": "227de1d4-c429-4e73-b972-42c26560ed83",
- "ajax": "ajax"
- }
- data = {
- # "page": 1,
- "page": page,
- "type": 0
- }
- # data = json.dumps(data, separators=(',', ':'))
- response = requests.post(url, headers=headers, params=params, json=data)
- response.raise_for_status()
- resp_json = response.json()
- if resp_json['code'] == 0:
- data_list = resp_json.get('data', {}).get('data', [])
- info_list = []
- for cdata in data_list:
- reward_pool_id = cdata.get('id') # 赏池id
- title = cdata.get('title')
- simple_desc = cdata.get('simple_desc')
- show_image = cdata.get('show_image')
- total_sessions = cdata.get('total_sessions') # 总箱数
- price = cdata.get('price_per_draw')
- status = cdata.get('status')
- reward_type = cdata.get('type')
- sales_count = cdata.get('sales_count')
- # 判断是否在库中
- if reward_pool_id in sql_reward_list:
- log.debug(f"{inspect.currentframe().f_code.co_name} 赏池ID: {reward_pool_id} 已存在")
- sql_pool.update_one_or_dict(table="feishezhang_reward_list_record",
- data={"total_sessions": total_sessions, "price": price,
- "sales_count": sales_count},
- condition={"reward_pool_id": reward_pool_id})
- continue
- sale_time_int = cdata.get('sale_time') # 开售时间
- sale_time = datetime.fromtimestamp(sale_time_int).strftime('%Y-%m-%d %H:%M:%S') if sale_time_int != 0 else 0
- order_time_int = cdata.get('order_time') # 下单时间
- order_time = datetime.fromtimestamp(order_time_int).strftime(
- '%Y-%m-%d %H:%M:%S') if order_time_int != 0 else 0
- data_dict = {
- "reward_pool_id": reward_pool_id,
- "title": title,
- "simple_desc": simple_desc,
- "show_image": show_image,
- "total_sessions": total_sessions,
- "price": price,
- "status": status,
- "reward_type": reward_type,
- "sales_count": sales_count,
- "sale_time": sale_time,
- "order_time": order_time,
- "crawl_category": crawl_category
- }
- # print(data_dict)
- info_list.append(data_dict)
- # 保存数据
- if info_list:
- sql_pool.insert_many(table="feishezhang_reward_list_record", data_list=info_list, ignore=True)
- return len(data_list)
- else:
- log.warning(resp_json['msg'])
- return 0
- def get_reward_list(log, sql_pool, sql_reward_list):
- """
- 获取 赏池 列表
- :param log: 日志对象
- :param sql_pool: 数据库连接池对象
- :param sql_reward_list: sql_reward_list
- """
- log.debug(f"{inspect.currentframe().f_code.co_name} 开始获取列表数据")
- page = 1
- max_page = 1000
- while page <= max_page:
- len_data_list = get_reward_single_page(log, page, sql_pool, sql_reward_list)
- if len_data_list < 15:
- log.debug(f"当前页数据不足15条,已结束获取数据,当前页数:{page}")
- break
- page += 1
- time.sleep(random.uniform(1, 2))
- @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
- def get_reward_detail(log, reward_pool_id, sql_pool):
- """
- 获取 赏池 详情
- :param log: 日志对象
- :param reward_pool_id: 赏池id
- :param sql_pool: 数据库连接池对象
- """
- log.debug(f"{inspect.currentframe().f_code.co_name} 开始获取详情数据")
- url = "https://kurabu.feishezhang.com/api.php"
- params = {
- "s": "reward/GetRandomAvailableSession",
- "system_type": "default",
- "application": "app",
- "application_client_type": "weixin",
- "token": token,
- "uuid": "227de1d4-c429-4e73-b972-42c26560ed83",
- "ajax": "ajax"
- }
- data = {
- # "reward_pool_id": "3cd3fa34-b0b7-4b1b-a593-411191fc745f",
- "reward_pool_id": reward_pool_id,
- "session_id": ""
- }
- # data = json.dumps(data, separators=(',', ':'))
- response = requests.post(url, headers=headers, params=params, json=data)
- response.raise_for_status()
- resp_json = response.json()
- # print(resp_json)
- if resp_json['code'] == 0:
- session_data = resp_json.get('data', {}).get('session', {})
- session_id = session_data.get('id')
- prize_items = resp_json.get('data', {}).get('prize_items', [])
- info_list = []
- for item in prize_items:
- prize_id = item.get('id')
- prize_name = item.get('name')
- level_nickname = item.get('level_nickname')
- parameter = item.get('parameter')
- prize_image = item.get('image')
- prize_num = item.get('num')
- goods_ids = item.get('goods_ids', [])
- goods_ids = ','.join(map(str, goods_ids)) if goods_ids else None
- prize_price = item.get('reference_price')
- prize_inventory = item.get('inventory') # 剩余库存
- data_dict = {
- "reward_pool_id": reward_pool_id,
- "session_id": session_id, # int
- "prize_id": prize_id,
- "prize_name": prize_name,
- "level_nickname": level_nickname,
- "parameter": parameter,
- "prize_image": prize_image,
- "prize_num": prize_num,
- "goods_ids": goods_ids,
- "prize_price": prize_price,
- "prize_inventory": prize_inventory,
- "crawl_category": crawl_category
- }
- # print(data_dict)
- info_list.append(data_dict)
- sql_pool.insert_many(table="feishezhang_reward_detail_record", data_list=info_list, ignore=True)
- else:
- log.warning(resp_json['msg'])
- @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
- def get_reward_order_single_page(log, reward_pool_id, session_id, page, sql_pool):
- """
- 获取 赏池 订单列表
- :param log: 日志对象
- :param reward_pool_id: 赏池id
- :param session_id: 订单id
- :param page: 页码
- :param sql_pool: 数据库连接池对象
- :return: len(data_list) -> 获取数据长度
- """
- log.debug(f"{inspect.currentframe().f_code.co_name} 开始获取第{page}页数据, session_id:{session_id}")
- url = "https://kurabu.feishezhang.com/api.php"
- params = {
- "s": "reward/MiniappGetCurrentSessionDrawRecords",
- "system_type": "default",
- "application": "app",
- "application_client_type": "weixin",
- "token": token,
- "uuid": "227de1d4-c429-4e73-b972-42c26560ed83",
- "ajax": "ajax",
- # "session_id": "3065",
- "session_id": session_id,
- "record_page": str(page)
- # "record_page": "1"
- }
- response = requests.get(url, headers=headers, params=params)
- response.raise_for_status()
- resp_json = response.json()
- if resp_json['code'] == 0:
- data_list = resp_json.get('data', [])
- if not data_list:
- log.debug(f"当前页数据为空,已结束获取数据,当前页数:{page}, session_id:{session_id}")
- return 0
- info_list = []
- for cdata in data_list:
- order_id = cdata.get('id')
- goods_id = cdata.get('goods_id')
- draw_time = cdata.get('draw_time')
- level_nickname = cdata.get('level_nickname')
- user_id = cdata.get('user_id')
- images = cdata.get('images')
- title = cdata.get('title')
- nickname = cdata.get('nickname')
- data_dict = {
- "reward_pool_id": reward_pool_id,
- "session_id": session_id,
- "order_id": order_id,
- "goods_id": goods_id,
- "draw_time": draw_time,
- "level_nickname": level_nickname,
- "user_id": user_id,
- "images": images,
- "title": title,
- "nickname": nickname,
- "crawl_category": crawl_category
- }
- # print(data_dict)
- info_list.append(data_dict)
- sql_pool.insert_many(table="feishezhang_reward_order_record", data_list=info_list, ignore=True)
- return len(data_list)
- else:
- log.warning(resp_json['msg'])
- return 0
- def get_reward_order_list(log, reward_pool_id, total_sessions, sql_pool):
- """
- 获取 赏池 订单列表
- :param log: 日志对象
- :param reward_pool_id: 赏池id
- :param total_sessions: 订单总箱数
- :param sql_pool: 数据库连接池对象
- """
- log.debug(f"{inspect.currentframe().f_code.co_name} 开始获取数据")
- session_id_list = get_all_boxes(log, reward_pool_id, total_sessions)
- for session_id in session_id_list:
- log.debug(f"{inspect.currentframe().f_code.co_name} 获取第{session_id}的订单数据")
- page = 1
- max_page = 1000
- while page <= max_page:
- try:
- len_data_list = get_reward_order_single_page(log, reward_pool_id, session_id, page, sql_pool)
- except Exception as e:
- log.error(e)
- len_data_list = 0
- if len_data_list < 20:
- log.debug(f"当前页数据不足20条,已结束获取数据,当前页数:{page}")
- break
- page += 1
- # time.sleep(random.uniform(1, 2))
- def get_all_boxes(log, reward_pool_id, total_sessions):
- url = "https://kurabu.feishezhang.com/api.php"
- params = {
- "s": "reward/MiniappGetPreviousAvailableSession",
- "system_type": "default",
- "application": "app",
- "application_client_type": "weixin",
- # "token": "26eed48c80bad053717687daef3a8668",
- "token": token,
- "uuid": "0b2f5448-a8ca-4dbe-9bac-0c49a10d9873",
- "ajax": "ajax"
- }
- session_id_list = []
- for i in range(1, total_sessions + 1):
- log.debug(f"{inspect.currentframe().f_code.co_name} 获取reward_pool_id:{reward_pool_id}, 第{i}箱赏")
- data = {
- "reward_pool_id": reward_pool_id,
- # "reward_pool_id": "65dc0347-63b5-41de-8bb8-811958a3b6c8",
- # "current_session_num": 2
- "current_session_num": i
- }
- response = requests.post(url, headers=headers, params=params, json=data)
- response.raise_for_status()
- resp_json = response.json()
- if resp_json['code'] == 0:
- session_id = resp_json.get('data', {}).get('id')
- session_id_list.append(session_id)
- else:
- log.warning(f"{resp_json['msg']}")
- session_id_list = list(set(session_id_list))
- # print(session_id_list)
- return session_id_list
- @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
- def fs_yifan_main(log):
- """
- 主函数
- :param log: logger对象
- """
- log.info(
- f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
- # 配置 MySQL 连接池
- sql_pool = MySQLConnectionPool(log=log)
- if not sql_pool.check_pool_health():
- log.error("数据库连接池异常")
- raise RuntimeError("数据库连接池异常")
- try:
- # 获取赏池列表数据
- try:
- sql_reward_list = sql_pool.select_all(
- f"select distinct reward_pool_id from feishezhang_reward_list_record where crawl_category = '{crawl_category}'")
- sql_reward_list = [item[0] for item in sql_reward_list]
- get_reward_list(log, sql_pool, sql_reward_list)
- except Exception as e:
- log.error(f'get_reward_list -> error: {e}')
- # # 获取赏列表数据
- # sql_detail_list = sql_pool.select_all(
- # f"select id, reward_pool_id from feishezhang_reward_list_record where detail_state = 0 and crawl_category = '{crawl_category}'")
- # for sql_detail in sql_detail_list:
- # sql_id = sql_detail[0]
- # reward_pool_id = sql_detail[1]
- # log.info(f"开始处理数据:{reward_pool_id}")
- # try:
- # get_reward_detail(log, reward_pool_id, sql_pool)
- # sql_pool.update_one(
- # "update feishezhang_reward_list_record set detail_state = 1 where id = %s", (sql_id,))
- # except Exception as e:
- # log.error(f'get_reward_detail -> error: {e}')
- # sql_pool.update_one(
- # "update feishezhang_reward_list_record set detail_state = 2 where id = %s", (sql_id,))
- #
- # time.sleep(random.uniform(0.1, 1))
- # 获取订单列表数据
- sql_order_list = sql_pool.select_all(
- f"select reward_pool_id, total_sessions from feishezhang_reward_list_record where yi_order_state = 0 and crawl_category = '{crawl_category}'")
- for sql_order in sql_order_list:
- reward_pool_id = sql_order[0]
- total_sessions = sql_order[1]
- log.info(f"开始处理数据, reward_pool_id:{reward_pool_id}, total_sessions:{total_sessions}")
- try:
- get_reward_order_list(log, reward_pool_id, total_sessions, sql_pool)
- sql_pool.update_one(
- "update feishezhang_reward_list_record set yi_order_state = 1 where reward_pool_id = %s",
- (reward_pool_id,)
- )
- except Exception as e:
- log.error(f'get_reward_order_list -> error: {e}')
- sql_pool.update_one(
- "update feishezhang_reward_list_record set yi_order_state = 2 where reward_pool_id = %s",
- (reward_pool_id,)
- )
- except Exception as e:
- log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
- finally:
- log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
- if __name__ == '__main__':
- # get_login()
- # get_reward_list(logger)
- # get_reward_detail(logger, "3cd3fa34-b0b7-4b1b-a593-411191fc745f", None)
- # get_reward_order_list(logger, "3cd3fa34-b0b7-4b1b-a593-411191fc745f", "3065", None)
- # get_all_boxes(logger, "c058b102-8ea6-48c2-9195-1216b010f579", 3)
- fs_yifan_main(logger)
|