card_inference.py 1.6 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152
  1. from fastapi import APIRouter, File, UploadFile, Depends, HTTPException, Path
  2. from fastapi.concurrency import run_in_threadpool
  3. from typing import Annotated
  4. from enum import Enum
  5. from ..core.config import settings
  6. from app.services.card_service import CardInferenceService, card_service
  7. import json
  8. router = APIRouter()
  9. model_names = list(settings.CARD_MODELS_CONFIG.keys())
  10. InferenceType = Enum("InferenceType", {name: name for name in model_names})
  11. @router.post("/json_result")
  12. async def card_json_result(
  13. inference_type: InferenceType,
  14. # 依赖注入保持不变
  15. service: CardInferenceService = Depends(lambda: card_service),
  16. file: UploadFile = File(...)
  17. ):
  18. """
  19. 接收一张卡片图片,使用指定类型的模型进行推理,并返回JSON结果。
  20. - **inference_type**: 要使用的模型类型(从下拉列表中选择)。
  21. - **file**: 要上传的图片文件。
  22. """
  23. image_bytes = await file.read()
  24. try:
  25. # 3. 传递参数时,使用 .value 获取 Enum 的字符串值
  26. json_result = await run_in_threadpool(
  27. service.predict,
  28. inference_type=inference_type.value, # 使用 .value
  29. image_bytes=image_bytes
  30. )
  31. return json_result
  32. except ValueError as e:
  33. raise HTTPException(status_code=400, detail=str(e))
  34. except Exception as e:
  35. raise HTTPException(status_code=500, detail=f"服务器内部错误: {e}")
  36. @router.post("/mock_query")
  37. async def mock_query(img_id: int):
  38. # json_data = {"img_id": img_id}
  39. with open("temp/test_return.json", "r") as f:
  40. json_data = json.load(f)
  41. return json_data