zc_new_daily_spider.py 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545
  1. # -*- coding: utf-8 -*-
  2. # Author : Charley
  3. # Python : 3.10.8
  4. # Date : 2026/2/27 11:22
  5. import time
  6. import inspect
  7. import requests
  8. import schedule
  9. import user_agent
  10. from loguru import logger
  11. from crypto_utils import CryptoHelper
  12. from mysql_pool import MySQLConnectionPool
  13. from tenacity import retry, stop_after_attempt, wait_fixed
  14. logger.remove()
  15. logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
  16. format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
  17. level="DEBUG", retention="7 day")
  18. # 基础配置
  19. BASE_URL = "https://cashier.yqszpay.com"
  20. PAGE_SIZE = 10
  21. headers = {
  22. "User-Agent": user_agent.generate_user_agent(),
  23. "Connection": "Keep-Alive",
  24. "Accept-Encoding": "gzip",
  25. "Content-Type": "application/json",
  26. "channelNo": "88888888",
  27. "pageSize": str(PAGE_SIZE),
  28. # "pageNum": 1,
  29. "version": "1.9.9.82537"
  30. }
  31. def after_log(retry_state):
  32. """
  33. retry 回调
  34. :param retry_state: RetryCallState 对象
  35. """
  36. # 检查 args 是否存在且不为空
  37. if retry_state.args and len(retry_state.args) > 0:
  38. log = retry_state.args[0] # 获取传入的 logger
  39. else:
  40. log = logger # 使用全局 logger
  41. if retry_state.outcome.failed:
  42. log.warning(
  43. f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
  44. else:
  45. log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
  46. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  47. def get_proxys(log):
  48. """
  49. 获取代理配置
  50. :param log: 日志对象
  51. :return: 代理字典
  52. """
  53. tunnel = "x371.kdltps.com:15818"
  54. kdl_username = "t13753103189895"
  55. kdl_password = "o0yefv6z"
  56. try:
  57. proxies = {
  58. "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
  59. "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
  60. }
  61. return proxies
  62. except Exception as e:
  63. log.error(f"Error getting proxy: {e}")
  64. raise e
  65. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  66. def make_encrypted_post_request(log, url: str, request_data: dict, extra_headers: dict = None):
  67. """
  68. 通用加密POST请求函数(带重试机制)
  69. :param log: 日志对象
  70. :param url: 请求URL
  71. :param request_data: 请求数据字典(会被加密)
  72. :param extra_headers: 额外的请求头
  73. :return: 解密后的响应数据,失败返回None
  74. """
  75. request_headers = headers.copy()
  76. if extra_headers:
  77. request_headers.update(extra_headers)
  78. log.debug(f"Request URL: {url}, Data: {request_data}")
  79. encrypted_body = CryptoHelper.encrypt_request_data(request_data)
  80. # print(request_headers)
  81. response = requests.post(url, headers=request_headers, json=encrypted_body, timeout=30)
  82. # response.raise_for_status()
  83. if response.status_code == 200:
  84. response_json = response.json()
  85. # log.debug(f"Raw response: {response_json}")
  86. if 'data' in response_json:
  87. decrypted = CryptoHelper.decrypt_response_data(response_json)
  88. # log.debug(f"Decrypted response: {decrypted}")
  89. return decrypted
  90. return response_json
  91. else:
  92. log.error(f"请求失败: {response.status_code}, Response: {response.text}")
  93. return None
  94. def get_shop_single_page(log, page_num, page_size=PAGE_SIZE):
  95. """
  96. 获取商户列表(支持翻页)
  97. :param log: 日志对象
  98. :param page_num: 页码
  99. :param page_size: 每页条数
  100. """
  101. log.debug(f"Getting shop list, page: {page_num}")
  102. url = f"{BASE_URL}/zc-api/merchant/getMerMyList"
  103. request_data = {'pageNum': page_num, 'pageSize': page_size}
  104. try:
  105. resp = make_encrypted_post_request(log, url, request_data, extra_headers={"pageNum": str(page_num)})
  106. except Exception as e:
  107. log.error(f"Error getting shop list: {e}")
  108. resp = None
  109. return resp
  110. def get_sold_single_page(log, mer_no, page_num):
  111. """
  112. 获取商品列表(支持翻页)
  113. :param log: 日志对象
  114. :param mer_no: 商户编号
  115. :param page_num: 页码
  116. """
  117. log.info(f"Getting sold items for mer_no: {mer_no}, page: {page_num}")
  118. url = f"{BASE_URL}/zc-api/act/actProduct/getActList"
  119. request_data = {
  120. 'merNo': mer_no,
  121. 'pageNum': page_num,
  122. 'pageSize': PAGE_SIZE,
  123. 'queryType': 1
  124. }
  125. return make_encrypted_post_request(log, url, request_data, extra_headers={"pageNum": str(page_num)})
  126. def get_player_single_page(log, act_id, token, page_num, page_size=PAGE_SIZE):
  127. """
  128. 获取玩家列表(支持翻页)
  129. :param log: 日志对象
  130. :param act_id: 活动ID
  131. :param token: Authorization token
  132. :param page_num: 页码
  133. :param page_size: 每页条数
  134. """
  135. log.debug(f"Getting player list for act_id: {act_id}, page: {page_num}")
  136. url = f"{BASE_URL}/zc-api/act/actOrder/getActOrderPublicDetails"
  137. request_data = {'actId': act_id, 'pageNum': page_num, 'pageSize': page_size}
  138. return make_encrypted_post_request(
  139. log, url, request_data,
  140. extra_headers={"Authorization": token, "pageNum": str(page_num)}
  141. )
  142. def parse_shop_data(log, items, sql_pool):
  143. """
  144. 解析商户数据
  145. :param log: 日志对象
  146. :param items: 商户列表
  147. :param sql_pool: MySQL连接池
  148. :return: 解析后的数据列表
  149. """
  150. log.debug(f"Parsing shop data...........")
  151. info_list = []
  152. for item in items:
  153. # log.debug(f"Processing shop item: {item}")
  154. shop_id = item.get('merNo')
  155. shop_name = item.get('merName')
  156. sold_number = item.get('spell_number')
  157. # link_man = item.get('linkMan')
  158. # user_id = item.get('userId')
  159. fans = item.get('attentionNumber')
  160. data_dict = {
  161. 'shop_id': shop_id,
  162. 'shop_name': shop_name,
  163. 'sold_number': sold_number,
  164. 'fans': fans
  165. }
  166. log.debug(f"Parsed shop data: {data_dict}")
  167. info_list.append(data_dict)
  168. # 保存/更新 根据shop_id判断 是否存在,存在则更新,不存在则插入
  169. sql = "INSERT INTO zc_shop_record (shop_id, shop_name, sold_number, fans) VALUES (%s, %s, %s, %s) ON DUPLICATE KEY UPDATE shop_name=VALUES(shop_name), sold_number=VALUES(sold_number), fans=VALUES(fans)"
  170. # 将字典列表转换为元组列表
  171. args_list = [tuple(d.values()) for d in info_list]
  172. sql_pool.insert_many(query=sql, args_list=args_list)
  173. @retry(stop=stop_after_attempt(3), wait=wait_fixed(1), after=after_log)
  174. def get_video(log, token, pid):
  175. """
  176. 获取活动视频信息
  177. :param log: 日志对象
  178. :param token: Authorization token
  179. :param pid: 活动ID
  180. :return: (live_id, open_time, close_time, video_url)
  181. """
  182. url = "https://cashier.yqszpay.com/zc-api/live/actLive/getMerLiveInfo"
  183. request_data = {'actId': pid}
  184. log.debug(f"获取视频信息,actId: {pid}")
  185. resp_data = make_encrypted_post_request(
  186. log, url, request_data,
  187. extra_headers={"Authorization": token}
  188. )
  189. # log.debug(f"视频响应: {resp_data}")
  190. live_id = resp_data.get('live', {}).get('liveId')
  191. live_open_time = resp_data.get('live', {}).get('openTime')
  192. live_close_time = resp_data.get('live', {}).get('closeTime')
  193. video_url = resp_data.get('live', {}).get('videoUrl')
  194. return live_id, live_open_time, live_close_time, video_url
  195. def parse_sold_data(log, token, items, sql_pool):
  196. """
  197. 解析商品数据
  198. :param log: 日志对象
  199. :param token: Authorization token
  200. :param items: 商品列表
  201. :param sql_pool: MySQL连接池
  202. :return: 解析后的数据列表
  203. """
  204. info_list = []
  205. for item in items:
  206. # log.debug(f"Processing sold item: {item}")
  207. shop_id = item.get('merNo') # 商户编号
  208. pid = item.get('id')
  209. act_day = item.get('actDay') # 活动天数
  210. act_logo = item.get('actLogo')
  211. act_name = item.get('actName') # 活动名称
  212. act_no = item.get('actNo') # 活动编号
  213. act_status = item.get('actStatus') # 活动状态
  214. startDate = item.get('startDate') # 开始时间
  215. endDate = item.get('endDate') # 结束时间
  216. storageId = item.get('storageId') # 存储ID
  217. storageName = item.get('storageName') # 存储名称
  218. unitPrice = item.get('unitPrice') # 单价
  219. sumPrice = item.get('sumPrice') # 总价
  220. reality_price = item.get('realityPrice') # 实际价格
  221. packageNumber = item.get('packageNumber') # 包配置
  222. schedule_ = item.get('schedule') # 库存
  223. live_id, live_open_time, live_close_time, video_url = get_video(log, token, pid)
  224. data_dict = {
  225. 'shop_id': shop_id,
  226. 'pid': pid,
  227. 'act_day': act_day,
  228. 'act_img': act_logo,
  229. 'act_name': act_name,
  230. 'act_no': act_no,
  231. 'act_status': act_status,
  232. 'start_date': startDate,
  233. 'end_date': endDate,
  234. 'storage_id': storageId,
  235. 'storage_name': storageName,
  236. 'unit_price': unitPrice,
  237. 'sum_price': sumPrice,
  238. 'reality_price': reality_price,
  239. 'package_number': packageNumber,
  240. 'schedule': schedule_,
  241. 'live_id': live_id,
  242. 'live_open_time': live_open_time,
  243. 'live_close_time': live_close_time,
  244. 'video_url': video_url
  245. }
  246. # log.debug(f"Parsed sold data: {data_dict}")
  247. # { 'live_close_time': None, 'video_url': None}
  248. info_list.append(data_dict)
  249. # 保存数据
  250. sql_pool.insert_many(table='zc_product_record', data_list=info_list, ignore=True)
  251. def parse_player_data(log, items, sql_pool):
  252. """
  253. 解析玩家数据
  254. :param log: 日志对象
  255. :param items: 玩家列表
  256. :param sql_pool: MySQL连接池
  257. :return: 解析后的数据列表
  258. """
  259. log.debug(f"Parsing player data...........")
  260. info_list = []
  261. for item in items:
  262. # log.debug(f"Processing player item: {item}")
  263. pid = item.get('actId')
  264. player_id = item.get('id')
  265. order_id = item.get('orderId')
  266. secret_name = item.get('secretName')
  267. add_time = item.get('addTime')
  268. user_id = item.get('userId')
  269. user_name = item.get('user_name')
  270. data_dict = {
  271. 'pid': pid,
  272. 'player_id': player_id,
  273. 'order_id': order_id,
  274. 'secret_name': secret_name,
  275. 'add_time': add_time,
  276. 'user_id': user_id,
  277. 'user_name': user_name
  278. }
  279. # log.debug(f"Parsed player data: {data_dict}")
  280. info_list.append(data_dict)
  281. # 保存数据
  282. sql_pool.insert_many(table='zc_player_record', data_list=info_list, ignore=True)
  283. def get_shop_list(log, sql_pool):
  284. """
  285. 商户列表翻页生成器
  286. :param log: 日志对象
  287. :param sql_pool: MySQL连接池
  288. """
  289. page_num = 1
  290. total = 0
  291. while page_num <= 100:
  292. result = get_shop_single_page(log, page_num, PAGE_SIZE)
  293. # print(result)
  294. if result is None:
  295. log.error(f"第 {page_num} 页请求失败,停止翻页")
  296. break
  297. data_list = result.get('rows', [])
  298. parse_shop_data(log, data_list, sql_pool)
  299. # 获取总条数(第一页时获取)
  300. if total is None and 'total' in result:
  301. total = result['total']
  302. log.info(f"总记录数: {total}")
  303. # 检查是否有数据
  304. if len(data_list) == 0:
  305. log.info(f"第 {page_num} 页无数据,停止翻页")
  306. break
  307. # 根据total判断是否超出范围
  308. if total is not None and (page_num - 1) * PAGE_SIZE >= total:
  309. log.info(f"已遍历完所有数据,停止翻页")
  310. break
  311. log.info(f"第 {page_num} 页查询完成,本页条数: {len(data_list)}")
  312. page_num += 1
  313. def get_sold_list(log, shop_id, token, sql_pool):
  314. """
  315. 商品列表翻页生成器
  316. :param log: 日志对象
  317. :param shop_id: shop_id
  318. :param token: Authorization token
  319. :param sql_pool: MySQL连接池
  320. """
  321. page_num = 1
  322. max_pages = 5
  323. while page_num <= max_pages:
  324. result = get_sold_single_page(log, shop_id, page_num)
  325. # print(result)
  326. if result is None:
  327. log.error(f"第 {page_num} 页请求失败,停止翻页")
  328. break
  329. data_list = result.get('rows', [])
  330. parse_sold_data(log, token, data_list, sql_pool)
  331. # 检查是否有数据
  332. if len(data_list) < 10:
  333. log.info(f"第 {page_num} 页无数据,停止翻页")
  334. break
  335. log.info(f"第 {page_num} 页查询完成,本页条数: {len(data_list)}")
  336. page_num += 1
  337. def get_player_list(log, act_id, token, sql_pool):
  338. """
  339. 玩家列表翻页生成器
  340. :param log: 日志对象
  341. :param act_id: 活动ID
  342. :param token: Authorization token
  343. :param sql_pool: MySQL连接池
  344. :return: has_data (True: 有数据, False: 无数据)
  345. """
  346. page_num = 1
  347. max_pages = 1000
  348. has_data = False
  349. while page_num <= max_pages:
  350. result = get_player_single_page(log, act_id, token, page_num)
  351. if result is None:
  352. log.error(f"第 {page_num} 页请求失败,停止翻页")
  353. break
  354. data_list = result.get('rows', [])
  355. # 如果有数据才解析
  356. if len(data_list) > 0:
  357. has_data = True
  358. parse_player_data(log, data_list, sql_pool)
  359. # 检查是否有数据
  360. if len(data_list) < 10:
  361. log.info(f"第 {page_num} 页无数据,停止翻页")
  362. break
  363. log.info(f"第 {page_num} 页查询完成,本页条数: {len(data_list)}")
  364. page_num += 1
  365. return has_data
  366. @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
  367. def zc_main(log):
  368. """
  369. 主函数
  370. :param log: logger对象
  371. """
  372. log.info(
  373. f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
  374. # 配置 MySQL 连接池
  375. sql_pool = MySQLConnectionPool(log=log)
  376. if not sql_pool.check_pool_health():
  377. log.error("数据库连接池异常")
  378. raise RuntimeError("数据库连接池异常")
  379. try:
  380. # 获取 token
  381. token_row = sql_pool.select_one("SELECT token FROM zc_token WHERE id = 1")
  382. if not token_row:
  383. log.error("未查询到 token")
  384. return
  385. token = token_row[0]
  386. # player test
  387. # has_data = get_player_list(log, 1800, token, sql_pool)
  388. # 获取shop data
  389. try:
  390. get_shop_list(logger, sql_pool)
  391. except Exception as e:
  392. log.error(f'iterate_shop_list error: {e}')
  393. time.sleep(5)
  394. # 获取sold data - 遍历所有商户
  395. try:
  396. # 从 shop 表查询所有 merNo
  397. mer_no_rows = sql_pool.select_all("SELECT shop_id FROM zc_shop_record WHERE sold_number != 0")
  398. mer_no_list = [row[0] for row in mer_no_rows] if mer_no_rows else []
  399. log.info(f"查询到 {len(mer_no_list)} 个商户编号: {mer_no_list}")
  400. for shop_id in mer_no_list:
  401. log.info(f"开始爬取商户 {shop_id} 的商品数据")
  402. get_sold_list(log, shop_id, token, sql_pool)
  403. except Exception as e:
  404. log.error(f'get_sold_list error: {e}')
  405. time.sleep(5)
  406. # 获取player data - 遍历所有活动
  407. try:
  408. # 从 sold 表查询所有 actId
  409. act_id_rows = sql_pool.select_all("SELECT pid FROM zc_product_record WHERE player_state = 0")
  410. act_id_list = [row[0] for row in act_id_rows] if act_id_rows else []
  411. log.info(f"查询到 {len(act_id_list)} 个活动ID")
  412. for act_id in act_id_list:
  413. try:
  414. # 先将当前 pid 的状态改为 1,表示开始查询
  415. sql_pool.update_one("UPDATE zc_product_record SET player_state = 1 WHERE pid = %s", (act_id,))
  416. log.info(f"将 pid: {act_id} 的状态更新为 1(开始查询)")
  417. log.info(f"开始爬取pid: {act_id} 的玩家数据")
  418. has_data = get_player_list(log, act_id, token, sql_pool)
  419. # 根据是否有数据更新状态
  420. if has_data:
  421. log.info(f"pid: {act_id} 查询到数据,状态保持为 1")
  422. else:
  423. log.info(f"pid: {act_id} 没有数据,状态更新为 2")
  424. sql_pool.update_one("UPDATE zc_product_record SET player_state = 2 WHERE pid = %s", (act_id,))
  425. except Exception as pid_error:
  426. # 如果查询失败,将状态改为 3
  427. log.error(f"pid: {act_id} 查询失败,错误: {pid_error}")
  428. try:
  429. sql_pool.update_one("UPDATE zc_product_record SET player_state = 3 WHERE pid = %s", (act_id,))
  430. log.info(f"已将 pid: {act_id} 的状态更新为 3(查询异常)")
  431. except Exception as update_error:
  432. log.error(f"更新 pid: {act_id} 状态失败: {update_error}")
  433. except Exception as e:
  434. log.error(f'iterate_player_list error: {e}')
  435. except Exception as e:
  436. log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
  437. finally:
  438. log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
  439. def schedule_task():
  440. """
  441. 爬虫模块 定时任务 的启动文件
  442. """
  443. # 立即运行一次任务
  444. # zc_main(log=logger)
  445. # 设置定时任务
  446. schedule.every().day.at("00:01").do(zc_main, log=logger)
  447. while True:
  448. schedule.run_pending()
  449. time.sleep(1)
  450. if __name__ == '__main__':
  451. # zc_main(logger)
  452. schedule_task()