mercari_jp_spider.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386
  1. # -*- coding: utf-8 -*-
  2. # Author : Charley
  3. # Python : 3.10.8
  4. # Date : 2026/4/30 14:28
  5. import time
  6. import inspect
  7. import schedule
  8. import requests
  9. from loguru import logger
  10. from mysql_pool import MySQLConnectionPool
  11. from tenacity import retry, stop_after_attempt, wait_fixed
  12. """
  13. 请求网址:
  14. https://jp.mercari.com/search?category_id=82&page_token=v1%3A1&status=sold_out%7Ctrading
  15. """
  16. SEARCH_URL = "https://api.mercari.jp/v2/entities:search"
  17. PAGE_SIZE = 120
  18. LAPLACE_DEVICE_UUID = "a00429c5-ad26-4be4-83ae-60b7239e14d5"
  19. SEARCH_SESSION_ID = "cfba38acec8cae78136c62441bbb267a"
  20. LIST_DPOP = "eyJ0eXAiOiJkcG9wK2p3dCIsImFsZyI6IkVTMjU2IiwiandrIjp7ImNydiI6IlAtMjU2Iiwia3R5IjoiRUMiLCJ4IjoiajlJNmtMS2VrZFNOZEh5SHNhWmw1Z2tiYkZoRGFBUDNEd3N1dlZqQ3JXZyIsInkiOiJOTHREa2RkWVZhZkZ5a1FHYmsteDZBYUp6QWpVblZlcFl0X2pzdmV3cGdJIn19.eyJpYXQiOjE3NzgwNDYyMTksImp0aSI6IjQ0YmM4MzZlLWFiYWEtNDI1OC1hMjQ4LTNlNjkxMTUzZjY2NSIsImh0dSI6Imh0dHBzOi8vYXBpLm1lcmNhcmkuanAvdjIvZW50aXRpZXM6c2VhcmNoIiwiaHRtIjoiUE9TVCIsInV1aWQiOiJhMDA0MjljNS1hZDI2LTRiZTQtODNhZS02MGI3MjM5ZTE0ZDUifQ.KqYWvIC42NYjNTewIfttuPMFHYAwJ4JZIXn4ulQye6s9c5zQutabWoOp8sKDjy-zvmbDCYA-6K7e7dW3bVu3cw"
  21. DETAIL_DPOP = "eyJ0eXAiOiJkcG9wK2p3dCIsImFsZyI6IkVTMjU2IiwiandrIjp7ImNydiI6IlAtMjU2Iiwia3R5IjoiRUMiLCJ4IjoiajlJNmtMS2VrZFNOZEh5SHNhWmw1Z2tiYkZoRGFBUDNEd3N1dlZqQ3JXZyIsInkiOiJOTHREa2RkWVZhZkZ5a1FHYmsteDZBYUp6QWpVblZlcFl0X2pzdmV3cGdJIn19.eyJpYXQiOjE3NzgwNDYwMDksImp0aSI6IjFmNGYwNDlhLTdmMGYtNGM0Zi1hZjcxLTIwYmFhZDhhMTc4NCIsImh0dSI6Imh0dHBzOi8vYXBpLm1lcmNhcmkuanAvaXRlbXMvZ2V0IiwiaHRtIjoiR0VUIiwidXVpZCI6ImEwMDQyOWM1LWFkMjYtNGJlNC04M2FlLTYwYjcyMzllMTRkNSJ9._92fashFF1PmC0Ol0HFqz9rIYdzL-w_ZJwXXRTI3zX_8oNP_ziNUIwySB50Itgp88vsgy8skp4DZ2DTd3WBWnQ"
  22. logger.remove()
  23. logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00",
  24. format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}",
  25. level="DEBUG", retention="7 day")
  26. def after_log(retry_state):
  27. """
  28. retry 回调
  29. :param retry_state: RetryCallState 对象
  30. """
  31. # 检查 args 是否存在且不为空
  32. if retry_state.args and len(retry_state.args) > 0:
  33. log = retry_state.args[0] # 获取传入的 logger
  34. else:
  35. log = logger # 使用全局 logger
  36. if retry_state.outcome.failed:
  37. log.warning(
  38. f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times")
  39. else:
  40. log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded")
  41. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  42. def get_proxys(log):
  43. """
  44. 获取代理
  45. :return: 代理
  46. """
  47. tunnel = "x371.kdltps.com:15818"
  48. kdl_username = "t13753103189895"
  49. kdl_password = "o0yefv6z"
  50. try:
  51. proxies = {
  52. "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel},
  53. "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}
  54. }
  55. return proxies
  56. except Exception as e:
  57. log.error(f"Error getting proxy: {e}")
  58. raise e
  59. def build_headers() -> dict:
  60. """构造 Mercari 搜索接口请求头。"""
  61. return {
  62. "accept": "application/json, text/plain, */*",
  63. "accept-language": "ja",
  64. "content-type": "application/json",
  65. "dpop": LIST_DPOP,
  66. # "dpop": "eyJ0eXAiOiJkcG9wK2p3dCIsImFsZyI6IkVTMjU2IiwiandrIjp7ImNydiI6IlAtMjU2Iiwia3R5IjoiRUMiLCJ4IjoiajlJNmtMS2VrZFNOZEh5SHNhWmw1Z2tiYkZoRGFBUDNEd3N1dlZqQ3JXZyIsInkiOiJOTHREa2RkWVZhZkZ5a1FHYmsteDZBYUp6QWpVblZlcFl0X2pzdmV3cGdJIn19.eyJpYXQiOjE3NzgwNDYyMTksImp0aSI6IjQ0YmM4MzZlLWFiYWEtNDI1OC1hMjQ4LTNlNjkxMTUzZjY2NSIsImh0dSI6Imh0dHBzOi8vYXBpLm1lcmNhcmkuanAvdjIvZW50aXRpZXM6c2VhcmNoIiwiaHRtIjoiUE9TVCIsInV1aWQiOiJhMDA0MjljNS1hZDI2LTRiZTQtODNhZS02MGI3MjM5ZTE0ZDUifQ.KqYWvIC42NYjNTewIfttuPMFHYAwJ4JZIXn4ulQye6s9c5zQutabWoOp8sKDjy-zvmbDCYA-6K7e7dW3bVu3cw",
  67. "origin": "https://jp.mercari.com",
  68. "priority": "u=1, i",
  69. "referer": "https://jp.mercari.com/",
  70. "sec-ch-ua": "\"Google Chrome\";v=\"147\", \"Not.A/Brand\";v=\"8\", \"Chromium\";v=\"147\"",
  71. "sec-ch-ua-mobile": "?0",
  72. "sec-ch-ua-platform": "\"Windows\"",
  73. "sec-fetch-dest": "empty",
  74. "sec-fetch-mode": "cors",
  75. "sec-fetch-site": "cross-site",
  76. "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/147.0.0.0 Safari/537.36",
  77. "x-country-code": "HK",
  78. "x-platform": "web",
  79. }
  80. def build_payload(page_token: str = "v1:0", category_id: int = 1289) -> dict:
  81. """构造单页请求参数,第一页 page_token 传 v1:0。"""
  82. return {
  83. "userId": "",
  84. "config": {
  85. "responseToggles": [
  86. "QUERY_SUGGESTION_WEB_1",
  87. ],
  88. },
  89. "pageSize": PAGE_SIZE,
  90. "pageToken": page_token,
  91. "searchSessionId": SEARCH_SESSION_ID,
  92. "source": "BaseSerp",
  93. "indexRouting": "INDEX_ROUTING_UNSPECIFIED",
  94. "thumbnailTypes": [],
  95. "searchCondition": {
  96. "keyword": "",
  97. "excludeKeyword": "",
  98. "sort": "SORT_SCORE",
  99. "order": "ORDER_DESC",
  100. "status": [
  101. "STATUS_SOLD_OUT",
  102. "STATUS_TRADING",
  103. ],
  104. "sizeId": [],
  105. "categoryId": [
  106. category_id,
  107. ],
  108. "brandId": [],
  109. "sellerId": [],
  110. "priceMin": 0,
  111. "priceMax": 0,
  112. "itemConditionId": [],
  113. "shippingPayerId": [],
  114. "shippingFromArea": [],
  115. "shippingMethod": [],
  116. "colorId": [],
  117. "hasCoupon": False,
  118. "attributes": [],
  119. "itemTypes": [],
  120. "skuIds": [],
  121. "shopIds": [],
  122. "excludeShippingMethodIds": [],
  123. },
  124. "serviceFrom": "suruga",
  125. "withItemBrand": True,
  126. "withItemSize": False,
  127. "withItemPromotions": True,
  128. "withItemSizes": True,
  129. "withShopname": False,
  130. "useDynamicAttribute": True,
  131. "withSuggestedItems": True,
  132. "withOfferPricePromotion": True,
  133. "withProductSuggest": True,
  134. "withParentProducts": False,
  135. "withProductArticles": True,
  136. "withSearchConditionId": False,
  137. "withAuction": True,
  138. "laplaceDeviceUuid": LAPLACE_DEVICE_UUID,
  139. }
  140. def build_page_token(page_number: int) -> str:
  141. """
  142. 把页码转换成接口 pageToken:第 1 页为 v1:0,第 2 页为 v1:1。
  143. :param page_number: 页码
  144. :return: pageToken
  145. """
  146. if page_number < 1:
  147. raise ValueError("page_number 必须从 1 开始")
  148. return f"v1:{page_number - 1}"
  149. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  150. def fetch_page(
  151. log,
  152. category_id: int,
  153. page_number: int,
  154. session: requests.Session | None = None,
  155. timeout: int = 22,
  156. ) -> requests.Response:
  157. """
  158. 请求单页数据。
  159. :param log: logger对象
  160. :param category_id: 类别ID
  161. :param page_number: 页码
  162. :param session: requests.Session对象
  163. :param timeout: 超时时间
  164. :return: requests.Response对象
  165. """
  166. log.info(f"请求第 {page_number} 页数据............")
  167. client = session or requests.Session()
  168. page_token = build_page_token(page_number)
  169. # print(page_token)
  170. response = client.post(
  171. SEARCH_URL,
  172. headers=build_headers(),
  173. json=build_payload(page_token=page_token, category_id=category_id),
  174. timeout=timeout
  175. )
  176. response.raise_for_status()
  177. return response
  178. @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log)
  179. def get_detail_page(log, pid):
  180. """
  181. 获取商品详情。
  182. :param log: logger对象
  183. :param pid: 商品ID
  184. """
  185. log.info(f"获取商品详情 {pid}............")
  186. headers = {
  187. "accept": "application/json, text/plain, */*",
  188. # "accept-language": "ja",
  189. "dpop": DETAIL_DPOP,
  190. # "referer": "https://jp.mercari.com/",
  191. "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/147.0.0.0 Safari/537.36",
  192. "x-platform": "web"
  193. }
  194. url = "https://api.mercari.jp/items/get"
  195. params = {
  196. # "id": "m69042262006",
  197. "id": pid,
  198. "include_item_attributes": "true",
  199. "include_product_page_component": "true",
  200. "include_non_ui_item_attributes": "true",
  201. "include_donation": "true",
  202. "include_item_attributes_sections": "true",
  203. "include_auction": "true",
  204. "country_code": "JP"
  205. }
  206. response = requests.get(url, headers=headers, params=params, timeout=22)
  207. response.raise_for_status()
  208. resp_json = response.json()
  209. data = resp_json.get("data", {})
  210. tag_seller = data.get("seller", {})
  211. seller_id = tag_seller.get("id")
  212. seller_name = tag_seller.get("name")
  213. photos = data.get("photos", [])
  214. photos = ''.join(photos) if photos else None
  215. # print(seller_id, seller_name, photos)
  216. return seller_id, seller_name, photos
  217. def parse_list(log, resp_json, sql_pool, category_id, category_name):
  218. """
  219. 解析商品列表数据。
  220. :param log: logger对象
  221. :param resp_json: 响应的 JSON 数据
  222. :param sql_pool: MySQL连接池
  223. :param category_id: 类别ID
  224. :param category_name: 类别名称
  225. """
  226. items = resp_json.get("items", [])
  227. for item in items:
  228. pid = item.get("id")
  229. # sellerId = item.get("sellerId")
  230. status = item.get("status")
  231. product_name = item.get("name")
  232. price = item.get("price")
  233. created_at = item.get("created") # 1777512645 时间戳
  234. created_at = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(created_at))) if int(created_at) else None
  235. updated_at = item.get("updated") # 1777512645 时间戳
  236. updated_at = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(updated_at))) if int(updated_at) else None
  237. # thumbnails = item.get("thumbnails", [])
  238. # img = thumbnails[0] if thumbnails else None
  239. # categoryId = item.get("categoryId")
  240. # 获取详情页多图
  241. try:
  242. seller_id, seller_name, photos = get_detail_page(log, pid)
  243. except Exception as e:
  244. log.error(f"Error getting detail page: {e}")
  245. seller_id, seller_name, photos = None, None, None
  246. data_dict = {
  247. "pid": pid,
  248. "seller_id": seller_id,
  249. "seller_name": seller_name,
  250. "photos": photos,
  251. "status": status,
  252. "product_name": product_name,
  253. "price": price,
  254. "created_at": created_at,
  255. "updated_at": updated_at,
  256. "category_id": category_id,
  257. "category_name": category_name
  258. }
  259. # log.info(data_dict)
  260. sql_pool.insert_one_or_dict(table="mercari_record", data=data_dict, ignore=True)
  261. def iter_pages(
  262. log,
  263. sql_pool,
  264. category_id: int,
  265. category_name: str,
  266. start_page: int = 1,
  267. end_page: int = 15000,
  268. ):
  269. """
  270. 循环请求多页,返回页码和 Response。
  271. :param log: logger对象
  272. :param sql_pool: MySQL连接池
  273. :param category_id: 类别ID
  274. :param category_name: 类别名称
  275. :param start_page: 开始页码
  276. :param end_page: 结束页码
  277. """
  278. if category_id == 1289:
  279. start_page = 42
  280. if end_page < start_page:
  281. raise ValueError("end_page 必须大于等于 start_page")
  282. with requests.Session() as session:
  283. for page_number in range(start_page, end_page + 1):
  284. response = fetch_page(
  285. log=log,
  286. category_id=category_id,
  287. page_number=page_number,
  288. session=session,
  289. )
  290. # 解析 response
  291. resp_json = response.json()
  292. # print(resp_json)
  293. parse_list(log, resp_json, sql_pool, category_id, category_name)
  294. # 返回数据条数不固定 不能以120条为标准
  295. len_resp_json = len(resp_json.get("items", []))
  296. log.info(f"第 {page_number} 页返回 {len_resp_json} 个商品...................")
  297. if len_resp_json == 0:
  298. log.info(f">>>>>>>>>>>>>>>>>>>>> 第 {page_number} 页返回的商品数量为0,停止请求 <<<<<<<<<<<<<<<<<<<<<<")
  299. break
  300. # if page_number < end_page and sleep_seconds > 0:
  301. # time.sleep(sleep_seconds)
  302. @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log)
  303. def mercari_main(log):
  304. """
  305. 主函数
  306. :param log: logger对象
  307. """
  308. log.info(
  309. f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................')
  310. # 配置 MySQL 连接池
  311. sql_pool = MySQLConnectionPool(log=log)
  312. if not sql_pool:
  313. log.error("MySQL数据库连接失败")
  314. raise Exception("MySQL数据库连接失败")
  315. # 抓取类别
  316. crawl_categories = [
  317. {"category_id": 1289, "category_name": "Pokemon"},
  318. {"category_id": 1409, "category_name": "One Piece"},
  319. {"category_id": 7290, "category_name": "Sports"}
  320. ]
  321. try:
  322. for category in crawl_categories:
  323. try:
  324. category_id = category["category_id"]
  325. category_name = category["category_name"]
  326. log.debug(f'开始爬取类别 {category_name}............')
  327. iter_pages(log, sql_pool, category_id, category_name, start_page=1, end_page=15000)
  328. except Exception as e:
  329. log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
  330. except Exception as e:
  331. log.error(f'{inspect.currentframe().f_code.co_name} error: {e}')
  332. finally:
  333. log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............')
  334. # def schedule_task():
  335. # """
  336. # 设置定时任务
  337. # """
  338. # mercari_main(log=logger)
  339. #
  340. # schedule.every().day.at("05:00").do(mercari_main, log=logger)
  341. # while True:
  342. # schedule.run_pending()
  343. # time.sleep(1)
  344. if __name__ == "__main__":
  345. mercari_main(log=logger)
  346. # get_detail_page(logger, "m69042262006")