# -*- coding: utf-8 -*- # Author : Charley # Python : 3.10.8 # Date : 2026/4/30 14:28 import time import inspect import schedule import requests from loguru import logger from mysql_pool import MySQLConnectionPool from tenacity import retry, stop_after_attempt, wait_fixed """ 请求网址: https://jp.mercari.com/search?category_id=82&page_token=v1%3A1&status=sold_out%7Ctrading """ SEARCH_URL = "https://api.mercari.jp/v2/entities:search" PAGE_SIZE = 120 LAPLACE_DEVICE_UUID = "a00429c5-ad26-4be4-83ae-60b7239e14d5" SEARCH_SESSION_ID = "cfba38acec8cae78136c62441bbb267a" LIST_DPOP = "eyJ0eXAiOiJkcG9wK2p3dCIsImFsZyI6IkVTMjU2IiwiandrIjp7ImNydiI6IlAtMjU2Iiwia3R5IjoiRUMiLCJ4IjoiajlJNmtMS2VrZFNOZEh5SHNhWmw1Z2tiYkZoRGFBUDNEd3N1dlZqQ3JXZyIsInkiOiJOTHREa2RkWVZhZkZ5a1FHYmsteDZBYUp6QWpVblZlcFl0X2pzdmV3cGdJIn19.eyJpYXQiOjE3NzgwNDYyMTksImp0aSI6IjQ0YmM4MzZlLWFiYWEtNDI1OC1hMjQ4LTNlNjkxMTUzZjY2NSIsImh0dSI6Imh0dHBzOi8vYXBpLm1lcmNhcmkuanAvdjIvZW50aXRpZXM6c2VhcmNoIiwiaHRtIjoiUE9TVCIsInV1aWQiOiJhMDA0MjljNS1hZDI2LTRiZTQtODNhZS02MGI3MjM5ZTE0ZDUifQ.KqYWvIC42NYjNTewIfttuPMFHYAwJ4JZIXn4ulQye6s9c5zQutabWoOp8sKDjy-zvmbDCYA-6K7e7dW3bVu3cw" DETAIL_DPOP = "eyJ0eXAiOiJkcG9wK2p3dCIsImFsZyI6IkVTMjU2IiwiandrIjp7ImNydiI6IlAtMjU2Iiwia3R5IjoiRUMiLCJ4IjoiajlJNmtMS2VrZFNOZEh5SHNhWmw1Z2tiYkZoRGFBUDNEd3N1dlZqQ3JXZyIsInkiOiJOTHREa2RkWVZhZkZ5a1FHYmsteDZBYUp6QWpVblZlcFl0X2pzdmV3cGdJIn19.eyJpYXQiOjE3NzgwNDYwMDksImp0aSI6IjFmNGYwNDlhLTdmMGYtNGM0Zi1hZjcxLTIwYmFhZDhhMTc4NCIsImh0dSI6Imh0dHBzOi8vYXBpLm1lcmNhcmkuanAvaXRlbXMvZ2V0IiwiaHRtIjoiR0VUIiwidXVpZCI6ImEwMDQyOWM1LWFkMjYtNGJlNC04M2FlLTYwYjcyMzllMTRkNSJ9._92fashFF1PmC0Ol0HFqz9rIYdzL-w_ZJwXXRTI3zX_8oNP_ziNUIwySB50Itgp88vsgy8skp4DZ2DTd3WBWnQ" logger.remove() logger.add("./logs/{time:YYYYMMDD}.log", encoding='utf-8', rotation="00:00", format="[{time:YYYY-MM-DD HH:mm:ss.SSS}] {level} {message}", level="DEBUG", retention="7 day") def after_log(retry_state): """ retry 回调 :param retry_state: RetryCallState 对象 """ # 检查 args 是否存在且不为空 if retry_state.args and len(retry_state.args) > 0: log = retry_state.args[0] # 获取传入的 logger else: log = logger # 使用全局 logger if retry_state.outcome.failed: log.warning( f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} Times") else: log.info(f"Function '{retry_state.fn.__name__}', Attempt {retry_state.attempt_number} succeeded") @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log) def get_proxys(log): """ 获取代理 :return: 代理 """ tunnel = "x371.kdltps.com:15818" kdl_username = "t13753103189895" kdl_password = "o0yefv6z" try: proxies = { "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel}, "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": kdl_username, "pwd": kdl_password, "proxy": tunnel} } return proxies except Exception as e: log.error(f"Error getting proxy: {e}") raise e def build_headers() -> dict: """构造 Mercari 搜索接口请求头。""" return { "accept": "application/json, text/plain, */*", "accept-language": "ja", "content-type": "application/json", "dpop": LIST_DPOP, # "dpop": "eyJ0eXAiOiJkcG9wK2p3dCIsImFsZyI6IkVTMjU2IiwiandrIjp7ImNydiI6IlAtMjU2Iiwia3R5IjoiRUMiLCJ4IjoiajlJNmtMS2VrZFNOZEh5SHNhWmw1Z2tiYkZoRGFBUDNEd3N1dlZqQ3JXZyIsInkiOiJOTHREa2RkWVZhZkZ5a1FHYmsteDZBYUp6QWpVblZlcFl0X2pzdmV3cGdJIn19.eyJpYXQiOjE3NzgwNDYyMTksImp0aSI6IjQ0YmM4MzZlLWFiYWEtNDI1OC1hMjQ4LTNlNjkxMTUzZjY2NSIsImh0dSI6Imh0dHBzOi8vYXBpLm1lcmNhcmkuanAvdjIvZW50aXRpZXM6c2VhcmNoIiwiaHRtIjoiUE9TVCIsInV1aWQiOiJhMDA0MjljNS1hZDI2LTRiZTQtODNhZS02MGI3MjM5ZTE0ZDUifQ.KqYWvIC42NYjNTewIfttuPMFHYAwJ4JZIXn4ulQye6s9c5zQutabWoOp8sKDjy-zvmbDCYA-6K7e7dW3bVu3cw", "origin": "https://jp.mercari.com", "priority": "u=1, i", "referer": "https://jp.mercari.com/", "sec-ch-ua": "\"Google Chrome\";v=\"147\", \"Not.A/Brand\";v=\"8\", \"Chromium\";v=\"147\"", "sec-ch-ua-mobile": "?0", "sec-ch-ua-platform": "\"Windows\"", "sec-fetch-dest": "empty", "sec-fetch-mode": "cors", "sec-fetch-site": "cross-site", "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/147.0.0.0 Safari/537.36", "x-country-code": "HK", "x-platform": "web", } def build_payload(page_token: str = "v1:0", category_id: int = 1289) -> dict: """构造单页请求参数,第一页 page_token 传 v1:0。""" return { "userId": "", "config": { "responseToggles": [ "QUERY_SUGGESTION_WEB_1", ], }, "pageSize": PAGE_SIZE, "pageToken": page_token, "searchSessionId": SEARCH_SESSION_ID, "source": "BaseSerp", "indexRouting": "INDEX_ROUTING_UNSPECIFIED", "thumbnailTypes": [], "searchCondition": { "keyword": "", "excludeKeyword": "", "sort": "SORT_SCORE", "order": "ORDER_DESC", "status": [ "STATUS_SOLD_OUT", "STATUS_TRADING", ], "sizeId": [], "categoryId": [ category_id, ], "brandId": [], "sellerId": [], "priceMin": 0, "priceMax": 0, "itemConditionId": [], "shippingPayerId": [], "shippingFromArea": [], "shippingMethod": [], "colorId": [], "hasCoupon": False, "attributes": [], "itemTypes": [], "skuIds": [], "shopIds": [], "excludeShippingMethodIds": [], }, "serviceFrom": "suruga", "withItemBrand": True, "withItemSize": False, "withItemPromotions": True, "withItemSizes": True, "withShopname": False, "useDynamicAttribute": True, "withSuggestedItems": True, "withOfferPricePromotion": True, "withProductSuggest": True, "withParentProducts": False, "withProductArticles": True, "withSearchConditionId": False, "withAuction": True, "laplaceDeviceUuid": LAPLACE_DEVICE_UUID, } def build_page_token(page_number: int) -> str: """ 把页码转换成接口 pageToken:第 1 页为 v1:0,第 2 页为 v1:1。 :param page_number: 页码 :return: pageToken """ if page_number < 1: raise ValueError("page_number 必须从 1 开始") return f"v1:{page_number - 1}" @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log) def fetch_page( log, category_id: int, page_number: int, session: requests.Session | None = None, timeout: int = 22, ) -> requests.Response: """ 请求单页数据。 :param log: logger对象 :param category_id: 类别ID :param page_number: 页码 :param session: requests.Session对象 :param timeout: 超时时间 :return: requests.Response对象 """ log.info(f"请求第 {page_number} 页数据............") client = session or requests.Session() page_token = build_page_token(page_number) # print(page_token) response = client.post( SEARCH_URL, headers=build_headers(), json=build_payload(page_token=page_token, category_id=category_id), timeout=timeout ) response.raise_for_status() return response @retry(stop=stop_after_attempt(5), wait=wait_fixed(1), after=after_log) def get_detail_page(log, pid): """ 获取商品详情。 :param log: logger对象 :param pid: 商品ID """ log.info(f"获取商品详情 {pid}............") headers = { "accept": "application/json, text/plain, */*", # "accept-language": "ja", "dpop": DETAIL_DPOP, # "referer": "https://jp.mercari.com/", "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/147.0.0.0 Safari/537.36", "x-platform": "web" } url = "https://api.mercari.jp/items/get" params = { # "id": "m69042262006", "id": pid, "include_item_attributes": "true", "include_product_page_component": "true", "include_non_ui_item_attributes": "true", "include_donation": "true", "include_item_attributes_sections": "true", "include_auction": "true", "country_code": "JP" } response = requests.get(url, headers=headers, params=params, timeout=22) response.raise_for_status() resp_json = response.json() data = resp_json.get("data", {}) tag_seller = data.get("seller", {}) seller_id = tag_seller.get("id") seller_name = tag_seller.get("name") photos = data.get("photos", []) photos = ''.join(photos) if photos else None # print(seller_id, seller_name, photos) return seller_id, seller_name, photos def parse_list(log, resp_json, sql_pool, category_id, category_name): """ 解析商品列表数据。 :param log: logger对象 :param resp_json: 响应的 JSON 数据 :param sql_pool: MySQL连接池 :param category_id: 类别ID :param category_name: 类别名称 """ items = resp_json.get("items", []) for item in items: pid = item.get("id") # sellerId = item.get("sellerId") status = item.get("status") product_name = item.get("name") price = item.get("price") created_at = item.get("created") # 1777512645 时间戳 created_at = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(created_at))) if int(created_at) else None updated_at = item.get("updated") # 1777512645 时间戳 updated_at = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(updated_at))) if int(updated_at) else None # thumbnails = item.get("thumbnails", []) # img = thumbnails[0] if thumbnails else None # categoryId = item.get("categoryId") # 获取详情页多图 try: seller_id, seller_name, photos = get_detail_page(log, pid) except Exception as e: log.error(f"Error getting detail page: {e}") seller_id, seller_name, photos = None, None, None data_dict = { "pid": pid, "seller_id": seller_id, "seller_name": seller_name, "photos": photos, "status": status, "product_name": product_name, "price": price, "created_at": created_at, "updated_at": updated_at, "category_id": category_id, "category_name": category_name } # log.info(data_dict) sql_pool.insert_one_or_dict(table="mercari_record", data=data_dict, ignore=True) def iter_pages( log, sql_pool, category_id: int, category_name: str, start_page: int = 1, end_page: int = 15000, ): """ 循环请求多页,返回页码和 Response。 :param log: logger对象 :param sql_pool: MySQL连接池 :param category_id: 类别ID :param category_name: 类别名称 :param start_page: 开始页码 :param end_page: 结束页码 """ if category_id == 1289: start_page = 42 if end_page < start_page: raise ValueError("end_page 必须大于等于 start_page") with requests.Session() as session: for page_number in range(start_page, end_page + 1): response = fetch_page( log=log, category_id=category_id, page_number=page_number, session=session, ) # 解析 response resp_json = response.json() # print(resp_json) parse_list(log, resp_json, sql_pool, category_id, category_name) # 返回数据条数不固定 不能以120条为标准 len_resp_json = len(resp_json.get("items", [])) log.info(f"第 {page_number} 页返回 {len_resp_json} 个商品...................") if len_resp_json == 0: log.info(f">>>>>>>>>>>>>>>>>>>>> 第 {page_number} 页返回的商品数量为0,停止请求 <<<<<<<<<<<<<<<<<<<<<<") break # if page_number < end_page and sleep_seconds > 0: # time.sleep(sleep_seconds) @retry(stop=stop_after_attempt(100), wait=wait_fixed(3600), after=after_log) def mercari_main(log): """ 主函数 :param log: logger对象 """ log.info( f'开始运行 {inspect.currentframe().f_code.co_name} 爬虫任务....................................................') # 配置 MySQL 连接池 sql_pool = MySQLConnectionPool(log=log) if not sql_pool: log.error("MySQL数据库连接失败") raise Exception("MySQL数据库连接失败") # 抓取类别 crawl_categories = [ {"category_id": 1289, "category_name": "Pokemon"}, {"category_id": 1409, "category_name": "One Piece"}, {"category_id": 7290, "category_name": "Sports"} ] try: for category in crawl_categories: try: category_id = category["category_id"] category_name = category["category_name"] log.debug(f'开始爬取类别 {category_name}............') iter_pages(log, sql_pool, category_id, category_name, start_page=1, end_page=15000) except Exception as e: log.error(f'{inspect.currentframe().f_code.co_name} error: {e}') except Exception as e: log.error(f'{inspect.currentframe().f_code.co_name} error: {e}') finally: log.info(f'爬虫程序 {inspect.currentframe().f_code.co_name} 运行结束,等待下一轮的采集任务............') # def schedule_task(): # """ # 设置定时任务 # """ # mercari_main(log=logger) # # schedule.every().day.at("05:00").do(mercari_main, log=logger) # while True: # schedule.run_pending() # time.sleep(1) if __name__ == "__main__": mercari_main(log=logger) # get_detail_page(logger, "m69042262006")