popmart_lazada_spider.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242
  1. # -*- coding: utf-8 -*-
  2. # Author : Charley
  3. # Python : 3.10.8
  4. # Date : 2025/6/16 13:48
  5. import random
  6. import time
  7. import inspect
  8. import requests
  9. import schedule
  10. from loguru import logger
  11. from mysql_pool import MySQLConnectionPool
  12. from tenacity import retry, stop_after_attempt, wait_fixed
  13. logger.remove()
  14. logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
  15. format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
  16. level="DEBUG", retention="7 day")
  17. def after_log(retry_state):
  18. """
  19. retry 回调
  20. :param retry_state: RetryCallState 对象
  21. """
  22. # 检查 args 是否存在且不为空
  23. if retry_state.args and len(retry_state.args) > 0:
  24. log = retry_state.args[0] # 获取传入的 logger
  25. else:
  26. log = logger # 使用全局 logger
  27. if retry_state.outcome.failed:
  28. log.warning(
  29. f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
  30. else:
  31. log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
  32. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  33. def get_proxys(log):
  34. """
  35. 获取代理
  36. :return: 代理
  37. """
  38. tunnel = "x371.kdltps.com:15818"
  39. kdl_username = "t13753103189895"
  40. kdl_password = "o0yefv6z"
  41. try:
  42. proxies = {
  43. "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
  44. "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
  45. }
  46. return proxies
  47. except Exception as e:
  48. log.error(f"Error getting proxy: {e}")
  49. raise e
  50. @retry(stop=stop_after_attempt(10), wait=wait_fixed(2), after=after_log)
  51. def get_resp_one_page(log, page):
  52. log.debug(f"Getting page {page}..............................")
  53. headers = {
  54. "accept": "application/json, text/plain, */*",
  55. "accept-language": "en,zh-CN;q=0.9,zh;q=0.8",
  56. "bx-v": "2.5.31",
  57. "priority": "u=1, i",
  58. "referer": "https://www.lazada.com.my/",
  59. "sec-ch-ua": "\"Google Chrome\";v=\"137\", \"Chromium\";v=\"137\", \"Not/A)Brand\";v=\"24\"",
  60. "sec-ch-ua-mobile": "?0",
  61. "sec-ch-ua-platform": "\"Windows\"",
  62. "sec-fetch-dest": "empty",
  63. "sec-fetch-mode": "cors",
  64. "sec-fetch-site": "same-origin",
  65. "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36",
  66. # "x-csrf-token": "e4b1ee587f333"
  67. }
  68. cookies = {
  69. "__wpkreporterwid_": "9a807029-4faf-408f-8741-5bd7a5c9bb49",
  70. "lazada_share_info": "1943013429_100_100_0_1943015429_null",
  71. "t_fv": "1750044788744",
  72. "t_uid": "IGNxirTJDPLVkegWMu0OjthsufRjWNgo",
  73. "cna": "dn7WIL8YbU0CAd+mqILgq5w2",
  74. "hng": "MY|en-MY|MYR|458",
  75. "hng.sig": "cLZ14_ZixioeDzk3FYgOHWtqfULyySre3d96Ouq-H6k",
  76. "EGG_SESS": "S_Gs1wHo9OvRHCMp98md7JBvEEnBpIGRATFScrAN0TSKROM9JS5ucnMrutmhJ6e0wTv_5ShoQDavnBd8GRhoufb8Sr_sUum68vg6woIDhiBw82Q8q_zFDAesYtMyDMb_pOyNXueCNJYEWgZDx1h2vQdEJsqeKN4AWpCQgGp-y5g=",
  77. "lwrid": "AgGXdtASjDNi5VqqCd6sX39uIxoL",
  78. "lzd_cid": "ab175d2a-6612-4a32-b759-475063ce8e55",
  79. "lzd_sid": "1e76f843e0504fb8ee3940a1b599e03d",
  80. "_tb_token_": "e4b1ee587f333",
  81. "xlly_s": "1",
  82. "lwrtk": "AAIEaFAB7osFX5Jf9a73Sg0oQN02mKk6aQXx1EKkYzrYCsXMq5K8D/I=",
  83. "_bl_uid": "g3m1Xbthy3wj3Uo1tqv3bhjhjdp9",
  84. "_gcl_au": "1.1.208579588.1750045133",
  85. "_ga": "GA1.3.1640854487.1750045147",
  86. "_gid": "GA1.3.661757065.1750045147",
  87. "AMCVS_126E248D54200F960A4C98C6%40AdobeOrg": "1",
  88. "AMCV_126E248D54200F960A4C98C6%40AdobeOrg": "-1124106680%7CMCIDTS%7C20256%7CMCMID%7C67086440579640317143819662540753251056%7CMCAAMLH-1750649953%7C11%7CMCAAMB-1750649953%7CRKhpRz8krg2tLO6pguXWp5olkAcUniQYPHaMWWgdJ3xzPWQmdj0y%7CMCOPTOUT-1750052353s%7CNONE%7CvVersion%7C5.2.0",
  89. "_m_h5_tk": "a76b9e5019c389f94d6cdde2ad6ed1fd_1750060971812",
  90. "_m_h5_tk_enc": "3cf92060e0c57a1f6a970599d58b33e5",
  91. "t_sid": "Wc3j5Z64n0pv3RftLZRHnRvF9SWuMFH0",
  92. "utm_channel": "NA",
  93. "_uetsid": "7514f0104a6311f0be7d6b89a4c1e673",
  94. "_uetvid": "75150b504a6311f0974d59c1d64da194",
  95. "isg": "BGRk0ZPK0JhEdSRUv65oxzGYNWJW_Yhn-5NlwX6BsC9nKQfzpgyY9rub64kxysC_",
  96. "_ga_6VT623WX3F": "GS2.3.s1750056508$o2$g1$t1750056548$j20$l0$h0",
  97. "cto_bundle": "6xFME19zVHhMJTJGSGFlQ2lqZXRVNEZuZFhzd2VZMkNCRU1rTHRSeEJyYXQ5UUglMkYyTHRUQkFWeiUyQlV1OEI0aXV2JTJGa0FUNHQ1UlBVVTdBWnJxbzFuTGFaUDY3SEZ0TjRqdmElMkZxR05yT2dNOXNjUWNRTFVnOVZ3dmVqenpyZzZ4WUUxRmlYcDZxSXgyN2NWJTJGNnlERm1JUHlYZjBKbVRjZnNSMXRUTUxDMXIlMkZ6NjlMJTJCczhJJTNE",
  98. "epssw": "9*mmCGJmjkWHDn9AvOutXURpqiQILO7tvzut2mZuTmu1HBO9JHdSZ3huBwdSa4dImm3tDm6umZPzy3AHysIiHmugXr0_mwvDom4MTI51KKu7zxCst0hjhSKSv92v2S2MqVWY3I23XQR26pVAufeJtQkKSyIfznmemTbDmm9LmOQiG3oIImuVuuaKmqtZKcKuWzBqSbyPfDarldaYPMun0GyDdPGXM_Zj5HpwuuxA0adYARJsm4miLR3AeYwqCVfjpFmmTMUAz71BGX80R14ITmrOPUGszCOak1FsVdYcXI6WY9HYJw3W3aURkH3fEXu3gbPS0PEmvPooIHqriXMYcCS_HAbI..",
  99. "tfstk": "fxSraJ4cruEPPNVS5Q-EgbZ5SG-JqhF6KMOBK9XHFQAoV0GhusCEd8FLyZRFis8HP4tlK6WpgUwJRBsVT151Fg_7yJSVKOAWd8KoeZC1I0s7yHie2HK315Z_YT6JvHxbgP3owEvwpkikE0Dz7sn415Z_czDDY7P1O3HMJ49piLmkxp24mdJ9xLvkx-RDLd3nZ6xHPxSm_5MWyoBwiNdHdyegZxRxxDju2UAZAIpv3irWyC6ldDiQpL8ysaA0Zf3V0EXp8MHKYhWlW6pGZbVyxtJN_d5zRvdPoOs5-Tr4s3sRENYhjW3h_35eSgYqKDWJYQx2nG2IWIs2N6jyoRiwRnj6S3bbk7TB4d5h2_kEx1jPcXp0O2sdzX0erKp21-y2Ah5F7W_O3o0KJEYM3CwJ-23prKp21-yqJ2L-jKR_eef.."
  100. }
  101. url = "https://www.lazada.com.my/pop-mart-official-store/"
  102. params = {
  103. "ajax": "true",
  104. "from": "wangpu",
  105. "page": str(page),
  106. "q": "All-Products"
  107. }
  108. response = requests.get(url, headers=headers, cookies=cookies, params=params, proxies=get_proxys(log), timeout=10)
  109. # print(response.json())
  110. # print(response)
  111. response.raise_for_status()
  112. resp_json = response.json()
  113. return resp_json
  114. def parse_data(log, resp_json, sql_pool):
  115. try:
  116. listItems = resp_json.get("mods", {}).get("listItems", [])
  117. if listItems:
  118. info_list = []
  119. for item in listItems:
  120. title = item.get("name")
  121. item_id = item.get("itemId")
  122. images_list = item.get("thumbs", [{}])
  123. images_list = [image.get("image") for image in images_list]
  124. images = "|".join(images_list) if images_list else ""
  125. # print(images)
  126. if not images:
  127. images = item.get("image")
  128. original_price_show = item.get("originalPriceShow")
  129. price_show = item.get("priceShow")
  130. review = item.get("review")
  131. location = item.get("location")
  132. # description = item.get("description", [])
  133. # description = "|".join(description)
  134. seller_name = item.get("sellerName")
  135. seller_id = item.get("sellerId")
  136. brand_name = item.get("brandName")
  137. brand_id = item.get("brandId")
  138. cheapest_sku = item.get("cheapest_sku")
  139. # categories = item.get("categories", [])
  140. # categories = "|".join(categories)
  141. item_sold_show = item.get("itemSoldCntShow")
  142. item_url = item.get("itemUrl")
  143. if item_url:
  144. item_url = "https:" + item_url
  145. in_stock = item.get("inStock")
  146. info_dict = {
  147. "title": title,
  148. "item_id": item_id,
  149. "images": images,
  150. "original_price_show": original_price_show,
  151. "price_show": price_show,
  152. "review": review,
  153. "location": location,
  154. "seller_name": seller_name,
  155. "seller_id": seller_id,
  156. "brand_name": brand_name,
  157. "brand_id": brand_id,
  158. "cheapest_sku": cheapest_sku,
  159. "item_sold_show": item_sold_show,
  160. "item_url": item_url,
  161. "in_stock": in_stock # 是否有货
  162. }
  163. # print(info_dict)
  164. info_list.append(info_dict)
  165. sql_pool.insert_many(table="popmart_lazada_record", data_list=info_list)
  166. else:
  167. log.info(f"No data found")
  168. except Exception as e:
  169. log.error(f"parse_data error: {e}")
  170. @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
  171. def pop_lazada_main(log):
  172. """
  173. 主函数
  174. :param log: logger对象
  175. """
  176. log.info(
  177. f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
  178. # 配置 MySQL 连接池
  179. sql_pool = MySQLConnectionPool(log=log)
  180. if not sql_pool.check_pool_health():
  181. log.error("数据库连接池异常")
  182. raise RuntimeError("数据库连接池异常")
  183. try:
  184. # 第一次抓取 共9页
  185. for p in range(1, 10):
  186. try:
  187. resp_json = get_resp_one_page(log, p)
  188. parse_data(log, resp_json, sql_pool)
  189. except Exception as e:
  190. log.error(f"Request get_resp_one_page page: {p}, error: {e}")
  191. time.sleep(random.uniform(1, 2))
  192. except Exception as e:
  193. log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
  194. finally:
  195. log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
  196. def schedule_task():
  197. """
  198. 爬虫模块的定时任务启动文件
  199. """
  200. # 立即运行一次任务
  201. # pop_lazada_main(log=logger)
  202. # 设置定时任务
  203. # schedule.every().day.at("00:01").do(pop_lazada_main, log=logger)
  204. # schedule.every(30).minutes.do(pop_lazada_main, log=logger)
  205. schedule.every(3).hours.do(pop_lazada_main, log=logger)
  206. while True:
  207. schedule.run_pending()
  208. time.sleep(1)
  209. if __name__ == '__main__':
  210. schedule_task()
  211. # json_str = test_dict.test_dict
  212. # parse_data(logger, json_str,None)