| 12345678910111213141516171819202122232425262728293031323334353637383940 |
- from fastapi import APIRouter, File, UploadFile, Depends, HTTPException, Path
- from fastapi.concurrency import run_in_threadpool
- from typing import Annotated
- from enum import Enum
- from ..core.config import settings
- from app.services.card_service import CardInferenceService, card_service
- router = APIRouter()
- model_names = list(settings.CARD_MODELS_CONFIG.keys())
- InferenceType = Enum("InferenceType", {name: name for name in model_names})
- @router.post("/json_result")
- async def card_json_result(
- inference_type: InferenceType,
- # 依赖注入保持不变
- service: CardInferenceService = Depends(lambda: card_service),
- file: UploadFile = File(...)
- ):
- """
- 接收一张卡片图片,使用指定类型的模型进行推理,并返回JSON结果。
- - **inference_type**: 要使用的模型类型(从下拉列表中选择)。
- - **file**: 要上传的图片文件。
- """
- image_bytes = await file.read()
- try:
- # 3. 传递参数时,使用 .value 获取 Enum 的字符串值
- json_result = await run_in_threadpool(
- service.predict,
- inference_type=inference_type.value, # 使用 .value
- image_bytes=image_bytes
- )
- return json_result
- except ValueError as e:
- raise HTTPException(status_code=400, detail=str(e))
- except Exception as e:
- raise HTTPException(status_code=500, detail=f"服务器内部错误: {e}")
|