card_inference.py 1.4 KB

12345678910111213141516171819202122232425262728293031323334353637383940
  1. from fastapi import APIRouter, File, UploadFile, Depends, HTTPException, Path
  2. from fastapi.concurrency import run_in_threadpool
  3. from typing import Annotated
  4. from enum import Enum
  5. from ..core.config import settings
  6. from app.services.card_service import CardInferenceService, card_service
  7. router = APIRouter()
  8. model_names = list(settings.CARD_MODELS_CONFIG.keys())
  9. InferenceType = Enum("InferenceType", {name: name for name in model_names})
  10. @router.post("/json_result")
  11. async def card_json_result(
  12. inference_type: InferenceType,
  13. # 依赖注入保持不变
  14. service: CardInferenceService = Depends(lambda: card_service),
  15. file: UploadFile = File(...)
  16. ):
  17. """
  18. 接收一张卡片图片,使用指定类型的模型进行推理,并返回JSON结果。
  19. - **inference_type**: 要使用的模型类型(从下拉列表中选择)。
  20. - **file**: 要上传的图片文件。
  21. """
  22. image_bytes = await file.read()
  23. try:
  24. # 3. 传递参数时,使用 .value 获取 Enum 的字符串值
  25. json_result = await run_in_threadpool(
  26. service.predict,
  27. inference_type=inference_type.value, # 使用 .value
  28. image_bytes=image_bytes
  29. )
  30. return json_result
  31. except ValueError as e:
  32. raise HTTPException(status_code=400, detail=str(e))
  33. except Exception as e:
  34. raise HTTPException(status_code=500, detail=f"服务器内部错误: {e}")