kw_spider.py 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486
  1. # -*- coding: utf-8 -*-
  2. # Author : Charley
  3. # Python : 3.8.10
  4. # Date : 2025/4/14 11:22
  5. import time
  6. import schedule
  7. from mysql_pool import MySQLConnectionPool
  8. from settings import *
  9. from get_kw_sign import get_sign
  10. baseUrl = "https://app.cardplayd.com/",
  11. def parse_shop_list(log, sql_pool, data_info_list, sql_shop_list):
  12. data_list = []
  13. for data_info in data_info_list:
  14. if data_info is None:
  15. continue
  16. shop_id = data_info.get('shopId')
  17. if shop_id in sql_shop_list:
  18. log.debug(f'{inspect.currentframe().f_code.co_name} Shop {shop_id} already exists in the database.')
  19. continue
  20. data_dict = {
  21. "shop_id": shop_id,
  22. "shop_name": data_info.get('shopName'),
  23. "fans_num": data_info.get('fansCount'),
  24. "group_num": data_info.get('collageSuccessCount')
  25. }
  26. data_list.append(data_dict)
  27. sql_shop_list.append(shop_id)
  28. # print(data_list)
  29. if data_list:
  30. sql_pool.insert_many(table='kawan_shop_record', data_list=data_list)
  31. else:
  32. log.debug(f'{inspect.currentframe().f_code.co_name} No new shop data found............')
  33. def get_shop_one_page(log, headers, page_num=1):
  34. log.debug(f'{inspect.currentframe().f_code.co_name} Request page_num: {page_num}')
  35. url = "https://app.cardplayd.com/app/system/shop/queryShopList"
  36. nonce, a, sign = get_sign(url)
  37. headers.update(timestamp=a, nonce=nonce, signature=sign)
  38. params = {
  39. "pageNum": str(page_num),
  40. "pageSize": "10"
  41. }
  42. # response = requests.get(url, headers=headers, params=params)
  43. response = make_request(log, 'GET', url, params=params, headers=headers)
  44. # print(response)
  45. return response
  46. def get_shop_list(log, sql_pool, sql_shop_list, headers):
  47. page_num = 1
  48. len_all_shops = 0
  49. while True:
  50. log.debug(f'{inspect.currentframe().f_code.co_name} Requesting.............')
  51. response_json = get_shop_one_page(log, headers, page_num)
  52. if response_json is None:
  53. log.error("Failed to fetch shop list. Exiting...")
  54. break
  55. data = response_json.get('data', {})
  56. data_info_list = data.get('dataInfo', [])
  57. # all_shops.extend(data_info_list)
  58. len_all_shops += len(data_info_list)
  59. # 解析店铺列表
  60. parse_shop_list(log, sql_pool, data_info_list, sql_shop_list)
  61. total_shops = data.get('total', 0)
  62. if not data_info_list or len(data_info_list) < 10:
  63. log.info("No more shops found. Stopping requests.")
  64. break
  65. page_num += 1
  66. # 如果当前已获取的店铺数量达到或超过总店铺数量,停止请求
  67. if len_all_shops >= total_shops:
  68. log.info("Total shops fetched. Stopping requests.")
  69. break
  70. log.info(f"Total shops fetched: {len_all_shops}")
  71. # ----------------------------------------------------------------------------------------------------------------------
  72. def get_acticity_xplain(log, product_id, headers):
  73. url = f"https://app.cardplayd.com/app/system/cardCollage/queryCollageActivityExplainList/{product_id}"
  74. # url_p = "https://app.cardplayd.com/app/system/cardCollage/queryCollageActivityExplainList"
  75. nonce, a, sign = get_sign(url)
  76. headers.update(timestamp=a, nonce=nonce, signature=sign)
  77. # response = requests.get(url, headers=headers)
  78. response = make_request(log, 'GET', url, headers=headers)
  79. # print(response)
  80. try:
  81. json_data = response.get('data', [{}])
  82. if json_data:
  83. json_data_list = json_data[0]
  84. explain_name = json_data_list.get('explainName')
  85. explain_info = json_data_list.get('explainInfo')
  86. # print(explain_name, explain_info)
  87. return explain_name, explain_info
  88. else:
  89. log.warning(f'{inspect.currentframe().f_code.co_name} Request product_id: {product_id}, Error: No data')
  90. return None, None
  91. except Exception as e:
  92. log.error(f'{inspect.currentframe().f_code.co_name} Request product_id: {product_id}, Error: {e}')
  93. return None, None
  94. def parse_sold_list(log, sql_pool, data_info_list, sql_product_id_list, headers, shop_name):
  95. data_list = []
  96. for data_info in data_info_list:
  97. product_id = data_info.get('id')
  98. if product_id in sql_product_id_list:
  99. log.debug(f'{inspect.currentframe().f_code.co_name} Product {product_id} already exists in the database.')
  100. continue
  101. no = data_info.get('collageCode')
  102. title = data_info.get('collageName')
  103. img = data_info.get('rotationImagePath')
  104. price_sale = data_info.get('firstBuyPrice')
  105. original_price = data_info.get('unitPrice') # 原价
  106. total_price = data_info.get('totalPrice')
  107. sale_num = data_info.get('lotCount')
  108. play_way = data_info.get('playWay')
  109. spec_config = data_info.get('specifications')
  110. spec_config_count = data_info.get('count')
  111. sheets_number = data_info.get('sheetNumber')
  112. state = data_info.get('collageStatus')
  113. shop_id = data_info.get('shopId')
  114. category_id = data_info.get('cardCategoryId')
  115. on_sale_time = data_info.get('startTime') # 开售时间
  116. end_time = data_info.get('applyTime') # 理论完成时间
  117. finish_time = data_info.get('endTime') # 实际完成时间
  118. begin_live_time = data_info.get('beginLiveTime') # 开始直播时间
  119. live_complete_time = data_info.get('liveCompleteTime') # 直播完成时间
  120. explain_name, explain_info = get_acticity_xplain(log, product_id, headers) # 活动说明 # 活动说明信息
  121. video_url = data_info.get('liveBackPath')
  122. sold_data = {
  123. "product_id": product_id,
  124. "no": no,
  125. "title": title,
  126. "img": img,
  127. "price_sale": price_sale,
  128. "original_price": original_price,
  129. "total_price": total_price,
  130. "sale_num": sale_num,
  131. "play_way": play_way,
  132. "spec_config": spec_config,
  133. "spec_config_count": spec_config_count,
  134. "sheets_number": sheets_number,
  135. "state": state,
  136. "shop_id": shop_id,
  137. "shop_name": shop_name,
  138. "category_id": category_id,
  139. "on_sale_time": on_sale_time,
  140. "end_time": end_time,
  141. "finish_time": finish_time,
  142. "begin_live_time": begin_live_time,
  143. "live_complete_time": live_complete_time,
  144. "explain_name": explain_name,
  145. "explain_info": explain_info,
  146. "video_url": video_url
  147. }
  148. data_list.append(sold_data)
  149. sql_product_id_list.append(product_id)
  150. # print(data_list)
  151. if data_list:
  152. sql_pool.insert_many(table="kawan_product_record", data_list=data_list)
  153. def get_sold_one_page(log, shopId, headers, page_num=1):
  154. log.debug(f'{inspect.currentframe().f_code.co_name} Request page_num: {page_num}')
  155. url = "https://app.cardplayd.com/app/system/shopInfo/collageList"
  156. params = {
  157. "pageNum": str(page_num),
  158. "pageSize": "10",
  159. "status": "6",
  160. "timeLimit": "true",
  161. "shopId": shopId
  162. }
  163. nonce, a, sign = get_sign(url)
  164. headers.update(timestamp=a, nonce=nonce, signature=sign)
  165. # response = requests.get(url, headers=headers, params=params)
  166. response = make_request(log, 'GET', url, params=params, headers=headers)
  167. # print(response)
  168. return response
  169. def get_sold_list(log, sql_pool, shopId, shop_name, sql_product_id_list, headers):
  170. page_num = 1
  171. len_all_sold = 0
  172. log.debug(f'{inspect.currentframe().f_code.co_name} Requesting with shopId: {shopId}.............')
  173. while True:
  174. response_json = get_sold_one_page(log, shopId, headers, page_num)
  175. if response_json is None:
  176. log.error("Failed to fetch sold list. Exiting...")
  177. break
  178. data = response_json.get('data', {})
  179. total_solds = response_json.get('total', 0)
  180. if total_solds == 0:
  181. log.warning(
  182. f"Warning {inspect.currentframe().f_code.co_name}: total_solds == 0, shop_id:{shopId}没有已售数据")
  183. break
  184. sold_info_list = data.get('dataInfo', [])
  185. # print(sold_info_list)
  186. if not sold_info_list:
  187. log.warning(
  188. f"Warning {inspect.currentframe().f_code.co_name}: sold_info_list为空, shop_id:{shopId}没有已售数据")
  189. break
  190. len_all_sold += len(sold_info_list)
  191. # 解析已售列表
  192. parse_sold_list(log, sql_pool, sold_info_list, sql_product_id_list, headers, shop_name)
  193. if not sold_info_list or len(sold_info_list) < 10:
  194. log.info("No more sold_info_list found. Stopping requests.")
  195. break
  196. page_num += 1
  197. # 如果当前已获取的店铺数量达到或超过总数量total,停止请求
  198. if len_all_sold >= total_solds:
  199. log.info("Total sold_info_list fetched. Stopping requests.")
  200. break
  201. log.info(f"Total sold_info_list fetched: {len_all_sold}")
  202. # ----------------------------------------------------------------------------------------------------------------------
  203. def get_product_detail(log, product_id, headers):
  204. # 暂时用不到 备用
  205. log.debug(f'{inspect.currentframe().f_code.co_name} Request product_id: {product_id}')
  206. url = "https://app.cardplayd.com/app/system/cardCollage/getCardCollageInfoById"
  207. params = {
  208. # "collageId": "1911391864602927105"
  209. "collageId": product_id
  210. }
  211. nonce, a, sign = get_sign(url)
  212. headers.update(timestamp=a, nonce=nonce, signature=sign)
  213. response = requests.get(url, headers=headers, params=params)
  214. print(response.text)
  215. print(response)
  216. # ----------------------------------------------------------------------------------------------------------------------
  217. def get_player_list(log, cardCollageId):
  218. log.debug(f'{inspect.currentframe().f_code.co_name} Request cardCollageId: {cardCollageId}')
  219. url = "https://app.cardplayd.com/app/system/cardReport/getCarmiPublicityVoByTypeList"
  220. # data = {
  221. # "F9gls72L1UmU0/fdUzTFS3ry8fQCShi/nU0HmHsW3WtEtgxZ9j3kG2SQKu3iQ3FDtBlp4bnHXXxDZqeXjyEiEICm1Xo4QJTYKIB9kijJy3mA2V2Ayt2X5Rqf+eipjEX+5ES+7D3gZdEmcdT9gPOjjn69z4hqnweX3thbvg5/LXJ1531bkv/otiMYFshgbbMQ51el/Tlh20zDkpj952Y8Gg": "="
  222. # }
  223. # data = "DuDAqydlmgNlK/1CtwT2hIVdQuBreX0MQC0hjznCSJyH3ZIKgyk7yEK1+Fs3E3eFoR9kKSrQnDVis5jh0SwKmDlQ9cEdQMPa1facZd5asXV10oQrud4aONB4/RjyQh/iNL7tWTIT2HZCtwJIz61kA/6kRqIIpnBfoOkFTegD6TvzG1XhmYlMcZ70PWIpF4o+VMYhAVyTFfsLa7kBFJUdqA=="
  224. player_headers = {
  225. "User-Agent": "Dart/3.6 (dart:io)",
  226. "Accept-Encoding": "gzip",
  227. "Content-Type": "application/json",
  228. "authorization": "Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJsb2dpblR5cGUiOiJhcHBMb2dpbiIsImxvZ2luSWQiOiJhcHBfdXNlcjoxOTExNjI3MDU4NDk2ODM5NjgyIiwicm5TdHIiOiJoUTNtS2VwQ210RWZ4VFkzQVNIUTcxV1RRZlo1ajBkNCIsImNsaWVudGlkIjoiNDI4YTgzMTBjZDQ0Mjc1N2FlNjk5ZGY1ZDg5NGYwNTEiLCJ1c2VySWQiOjE5MTE2MjcwNTg0OTY4Mzk2ODIsImludml0ZUNvZGUiOiIyMDc3MTYiLCJwaG9uZSI6IjEzMDE0NjE3NjE0In0.IuWoS8kCmG4OQFh1XINJOHpbeKMZKlMmticVglAVF_Y",
  229. "content-language": "zh_CN",
  230. # "encrypt-key": "n8XhkgeVYg26D8/nY3MGThxNzKI59EMd69AjUF3Jk5ZT9ixwo21PABLWhwLMJFuSXqASVsUaq2KhUnjsaaIXDA==",
  231. "app-version": "1.0.12",
  232. # "isencrypt": "true",
  233. # "isencrypt": "false",
  234. }
  235. # data = "916L0IKDzAb1hrnjyzFDCH+prEuxPR0LqfU5m79fYlfZTCvFQhehf43vS0P9Gz91+ySAFH8cvuIaC8f2A6Awo3HXmjJY4GzUXPTDNNehEgMugpVAXsS1ly9tWuWgQp0nnZuFZzWL281CNuo9cY8XkrcyL9p2QqVs5GDNnSFNi2Y8LRPk+1aiED2n+rvY7j0stupez5m9+1AcNGAUyKO/hQ=="
  236. # data = "eyJjYXJkQ29sbGFnZUlkIjogIjE5MTEzOTE4NjQ2MDI5MjcxMDUiLCJwYWdlU2l6ZSI6ICIxMCIsInBhZ2VOdW0iOiAiMSIsImVuY3J5cHQta2V5IjogIm44WGhrZ2VWWWcyNkQ4L25ZM01HVGh4TnpLSTU5RU1kNjlBalVGM0prNVpUOWl4d28yMVBBQkxXaHdMTUpGdVNYcUFTVnNVYXEyS2hVbmpzYWFJWERBPT0ifQ=="
  237. # data = {"type": "1", "cardCollageId": "1906194540964519937", "userAnonymous": None, "pageSize": 100, "pageNum": 1,
  238. # "isAsc": "", "orderByColumn": "create_time", "filterInfo": ""}
  239. data = {"type": "1", "cardCollageId": cardCollageId, "userAnonymous": None, "pageSize": 100, "pageNum": 1,
  240. "isAsc": "", "orderByColumn": "create_time", "filterInfo": ""}
  241. nonce, a, sign = get_sign(url)
  242. player_headers.update(timestamp=a, nonce=nonce, signature=sign)
  243. response = requests.post(url, headers=player_headers, data=data)
  244. # response = make_request(log, 'POST', url, headers=headers, data=data)
  245. print(response.text)
  246. print(response)
  247. # ----------------------------------------------------------------------------------------------------------------------
  248. def get_report_one_page(log, collageId, headers, page_num=1):
  249. url = "https://app.cardplayd.com/app/system/cardReport/getOpenReportInfo"
  250. params = {
  251. "cardCollageId": collageId,
  252. "pageSize": "10",
  253. "pageNum": str(page_num)
  254. }
  255. nonce, a, sign = get_sign(url)
  256. headers.update(timestamp=a, nonce=nonce, signature=sign)
  257. # response = requests.get(url, headers=headers, params=params)
  258. response = make_request(log, 'GET', url, headers=headers, params=params)
  259. # print(response)
  260. return response
  261. def parse_report_list(sql_pool, report_info_list, collageId):
  262. data_list = []
  263. for report_info in report_info_list:
  264. data = {
  265. "product_id": collageId,
  266. "card_name": report_info.get("carmiInfo"),
  267. "open_card_time": report_info.get("openCardTime"),
  268. "imgs": report_info.get("frontImagePath")
  269. }
  270. data_list.append(data)
  271. if data_list:
  272. sql_pool.insert_many(table="kawan_report_record", data_list=data_list)
  273. def get_report_list(log, sql_pool, collageId, headers):
  274. page_num = 1
  275. len_all_report = 0
  276. while True:
  277. response_json = get_report_one_page(log, collageId, headers, page_num)
  278. if response_json is None:
  279. log.error("Failed to fetch report list. Exiting...")
  280. break
  281. data = response_json.get('data', {})
  282. total_reports = data.get('total', 0)
  283. if total_reports == 0:
  284. log.warning(
  285. f"Warning {inspect.currentframe().f_code.co_name}: {response_json['msg']}, collageId:{collageId}没有 report 数据")
  286. break
  287. report_info_list = data.get('otherCardReportResultList', [])
  288. if not report_info_list:
  289. log.warning(
  290. f"Warning {inspect.currentframe().f_code.co_name}: {response_json['msg']}, collageId:{collageId}没有 report 数据")
  291. break
  292. len_all_report += len(report_info_list)
  293. parse_report_list(sql_pool, report_info_list, collageId)
  294. if not report_info_list or len(report_info_list) < 10:
  295. log.info("No more report_info_list found. Stopping requests.")
  296. break
  297. page_num += 1
  298. if len_all_report >= total_reports:
  299. log.info("Total report_info_list fetched. Stopping requests.")
  300. break
  301. log.info(f"Total report_info_list fetched: {len_all_report}")
  302. sql_pool.update_one("update kawan_product_record set report_state = 1 where product_id = %s", (collageId,))
  303. @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
  304. def kawan_main(log):
  305. """
  306. 主函数
  307. :param log: logger对象
  308. """
  309. log.info(
  310. f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
  311. # 配置 MySQL 连接池
  312. sql_pool = MySQLConnectionPool(log=log)
  313. if not sql_pool.check_pool_health():
  314. log.error("数据库连接池异常")
  315. raise RuntimeError("数据库连接池异常")
  316. try:
  317. sql_token = sql_pool.select_one("SELECT token FROM kawan_token")
  318. # sql_token_str = 'Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJsb2dpblR5cGUiOiJhcHBMb2dpbiIsImxvZ2luSWQiOiJhcHBfdXNlcjoxOTExNjI0MDUzMzQ5MzI2ODQ5Iiwicm5TdHIiOiJvZURBT2QwTEFvYmlmTFR2Y0xVVXpNQ0haaWVLWXRrUyIsImNsaWVudGlkIjoiNDI4YTgzMTBjZDQ0Mjc1N2FlNjk5ZGY1ZDg5NGYwNTEiLCJ1c2VySWQiOjE5MTE2MjQwNTMzNDkzMjY4NDksImludml0ZUNvZGUiOiI0NjI2MTgiLCJwaG9uZSI6IjE5NTIxNTAwODUwIn0.PY7l7OvS2fOHsgl-YsHcEy1TyKsIgkmSxSV4RZxWaxc'
  319. headers = {
  320. "User-Agent": "Dart/3.6 (dart:io)",
  321. "Accept-Encoding": "gzip",
  322. # "authorization": "Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJsb2dpblR5cGUiOiJhcHBMb2dpbiIsImxvZ2luSWQiOiJhcHBfdXNlcjoxOTExNjI3MDU4NDk2ODM5NjgyIiwicm5TdHIiOiJoUTNtS2VwQ210RWZ4VFkzQVNIUTcxV1RRZlo1ajBkNCIsImNsaWVudGlkIjoiNDI4YTgzMTBjZDQ0Mjc1N2FlNjk5ZGY1ZDg5NGYwNTEiLCJ1c2VySWQiOjE5MTE2MjcwNTg0OTY4Mzk2ODIsImludml0ZUNvZGUiOiIyMDc3MTYiLCJwaG9uZSI6IjEzMDE0NjE3NjE0In0.IuWoS8kCmG4OQFh1XINJOHpbeKMZKlMmticVglAVF_Y",
  323. "authorization": sql_token[0],
  324. # "authorization": sql_token_str,
  325. "content-type": "application/json",
  326. "app-version": "1.0.12",
  327. "content-language": "zh_CN",
  328. # "nonce": "ceac8160-18e3-11f0-bb6e-95de6e5ff903",
  329. "isencrypt": "false"
  330. }
  331. # 获取 商家 列表
  332. try:
  333. sql_shop_list = sql_pool.select_all("SELECT shop_id FROM kawan_shop_record")
  334. sql_shop_list = [item[0] for item in sql_shop_list]
  335. get_shop_list(log, sql_pool, sql_shop_list, headers)
  336. sql_shop_list.clear()
  337. except Exception as e:
  338. log.error(f"Error fetching last_product_id: {e}")
  339. time.sleep(5)
  340. # 获取已售商品
  341. try:
  342. sql_shop_id_list = sql_pool.select_all("SELECT shop_id, shop_name FROM kawan_shop_record")
  343. # sql_shop_id_list = [item[0] for item in sql_shop_id_list]
  344. # 获取 product_id_list
  345. sql_product_id_list = sql_pool.select_all("SELECT product_id FROM kawan_product_record")
  346. sql_product_id_list = [item[0] for item in sql_product_id_list]
  347. for shop_id_name in sql_shop_id_list:
  348. shop_id = shop_id_name[0]
  349. shop_name = shop_id_name[1]
  350. log.info(f"开始获取商家:{shop_id} 已售商品")
  351. try:
  352. get_sold_list(log, sql_pool, shop_id, shop_name, sql_product_id_list, headers)
  353. except Exception as e:
  354. log.error(f"Error fetching get_sold_list for shop_id:{shop_id}, {e}")
  355. sql_product_id_list.clear()
  356. except Exception as e:
  357. log.error(f"Error fetching sql_shop_id_list: {e}")
  358. time.sleep(5)
  359. # 获取拆卡报告
  360. try:
  361. sql_product_id_list_for_report = sql_pool.select_all(
  362. "SELECT product_id FROM kawan_product_record WHERE report_state = 0")
  363. sql_product_id_list_for_report = [item[0] for item in sql_product_id_list_for_report]
  364. for product_id in sql_product_id_list_for_report:
  365. log.info(f"开始获取商品:{product_id} 拆卡报告")
  366. try:
  367. get_report_list(log, sql_pool, product_id, headers)
  368. except Exception as e:
  369. log.error(f"Error fetching reports for product_id:{product_id}, {e}")
  370. sql_pool.update_one("update kawan_product_record set report_state = 2 where product_id = %s",
  371. (product_id,))
  372. except Exception as e:
  373. log.error(f"Error fetching reports: {e}")
  374. # time.sleep(5)
  375. # 获取商品玩家
  376. # try:
  377. # get_player_list(log, sql_pool)
  378. # except Exception as e:
  379. # log.error(f"Error fetching players: {e}")
  380. except Exception as e:
  381. log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
  382. finally:
  383. log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
  384. def schedule_task():
  385. """
  386. 爬虫模块的启动文件
  387. """
  388. # 立即运行一次任务
  389. # kawan_main(log=logger)
  390. # 设置定时任务
  391. schedule.every().day.at("00:01").do(kawan_main, log=logger)
  392. while True:
  393. schedule.run_pending()
  394. time.sleep(1)
  395. if __name__ == '__main__':
  396. # get_shop_list(logger, None)
  397. # get_sold_list(logger)
  398. # get_acticity_xplain(logger, '1910557299676192770')
  399. # get_product_detail(logger, '1910557299676192770')
  400. # get_player_list(logger)
  401. # get_report_list(logger, '1910557299676192770')
  402. # kawan_main(logger)
  403. schedule_task()