|
@@ -92,7 +92,7 @@ def make_encrypted_post_request(log, url: str, request_data: dict, extra_headers
|
|
|
|
|
|
|
|
encrypted_body = CryptoHelper.encrypt_request_data(request_data)
|
|
encrypted_body = CryptoHelper.encrypt_request_data(request_data)
|
|
|
# print(request_headers)
|
|
# print(request_headers)
|
|
|
- response = requests.post(url, headers=request_headers, json=encrypted_body, timeout=30)
|
|
|
|
|
|
|
+ response = requests.post(url, headers=request_headers, json=encrypted_body, timeout=22, proxies=get_proxys(log))
|
|
|
# response.raise_for_status()
|
|
# response.raise_for_status()
|
|
|
|
|
|
|
|
if response.status_code == 200:
|
|
if response.status_code == 200:
|
|
@@ -303,18 +303,12 @@ def parse_player_data(log, items, sql_pool):
|
|
|
for item in items:
|
|
for item in items:
|
|
|
# log.debug(f"Processing player item: {item}")
|
|
# log.debug(f"Processing player item: {item}")
|
|
|
pid = item.get('actId')
|
|
pid = item.get('actId')
|
|
|
- player_id = item.get('id')
|
|
|
|
|
- order_id = item.get('orderId')
|
|
|
|
|
- secret_name = item.get('secretName')
|
|
|
|
|
- add_time = item.get('addTime')
|
|
|
|
|
|
|
+ give_number = item.get('giveNumber') # 份数
|
|
|
user_id = item.get('userId')
|
|
user_id = item.get('userId')
|
|
|
- user_name = item.get('user_name')
|
|
|
|
|
|
|
+ user_name = item.get('userName')
|
|
|
data_dict = {
|
|
data_dict = {
|
|
|
'pid': pid,
|
|
'pid': pid,
|
|
|
- 'player_id': player_id,
|
|
|
|
|
- 'order_id': order_id,
|
|
|
|
|
- 'secret_name': secret_name,
|
|
|
|
|
- 'add_time': add_time,
|
|
|
|
|
|
|
+ 'give_number': give_number,
|
|
|
'user_id': user_id,
|
|
'user_id': user_id,
|
|
|
'user_name': user_name
|
|
'user_name': user_name
|
|
|
}
|
|
}
|
|
@@ -375,10 +369,11 @@ def get_sold_list(log, shop_id, token, sql_pool):
|
|
|
:param sql_pool: MySQL连接池
|
|
:param sql_pool: MySQL连接池
|
|
|
"""
|
|
"""
|
|
|
page_num = 1
|
|
page_num = 1
|
|
|
- max_pages = 5
|
|
|
|
|
|
|
+ max_pages = 10
|
|
|
|
|
|
|
|
while page_num <= max_pages:
|
|
while page_num <= max_pages:
|
|
|
result = get_sold_single_page(log, shop_id, page_num)
|
|
result = get_sold_single_page(log, shop_id, page_num)
|
|
|
|
|
+ # time.sleep(random.uniform(0.5, 1)) # 添加随机延迟,防止对目标服务器造成过大负载
|
|
|
# print(result)
|
|
# print(result)
|
|
|
if result is None:
|
|
if result is None:
|
|
|
log.error(f"第 {page_num} 页请求失败,停止翻页")
|
|
log.error(f"第 {page_num} 页请求失败,停止翻页")
|
|
@@ -408,7 +403,7 @@ def get_player_list(log, act_id, token, sql_pool):
|
|
|
:return: has_data (True: 有数据, False: 无数据)
|
|
:return: has_data (True: 有数据, False: 无数据)
|
|
|
"""
|
|
"""
|
|
|
page_num = 1
|
|
page_num = 1
|
|
|
- max_pages = 1000
|
|
|
|
|
|
|
+ max_pages = 100
|
|
|
has_data = False
|
|
has_data = False
|
|
|
|
|
|
|
|
while page_num <= max_pages:
|
|
while page_num <= max_pages:
|
|
@@ -465,7 +460,6 @@ def zc_main(log):
|
|
|
|
|
|
|
|
# 获取shop data
|
|
# 获取shop data
|
|
|
try:
|
|
try:
|
|
|
- log.debug("开始爬取商户数据")
|
|
|
|
|
get_shop_list(log, sql_pool)
|
|
get_shop_list(log, sql_pool)
|
|
|
except Exception as e:
|
|
except Exception as e:
|
|
|
log.error(f'iterate_shop_list error: {e}')
|
|
log.error(f'iterate_shop_list error: {e}')
|
|
@@ -533,7 +527,7 @@ def schedule_task():
|
|
|
# zc_main(log=logger)
|
|
# zc_main(log=logger)
|
|
|
|
|
|
|
|
# 设置定时任务
|
|
# 设置定时任务
|
|
|
- schedule.every().day.at("01:00").do(zc_main, log=logger)
|
|
|
|
|
|
|
+ schedule.every().day.at("00:01").do(zc_main, log=logger)
|
|
|
|
|
|
|
|
while True:
|
|
while True:
|
|
|
schedule.run_pending()
|
|
schedule.run_pending()
|