| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146 |
- """
- Models Router - Proxy to Model Service
- """
- from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks
- from fastapi.responses import JSONResponse
- from typing import List, Optional
- import httpx
- import asyncio
- from shared.config import settings
- from shared.auth import get_current_user
- from shared.models.user import User
- from shared.models.model import ModelRequest, ModelResponse, ModelList
- router = APIRouter()
- # Model service URL
- MODEL_SERVICE_URL = f"http://model-service:8001"
- @router.get("/models", response_model=List[ModelList])
- async def list_models(
- current_user: User = Depends(get_current_user),
- skip: int = 0,
- limit: int = 100
- ):
- """List available models"""
- try:
- async with httpx.AsyncClient() as client:
- response = await client.get(
- f"{MODEL_SERVICE_URL}/models",
- params={"skip": skip, "limit": limit},
- headers={"Authorization": f"Bearer {current_user.token}"}
- )
- response.raise_for_status()
- return response.json()
- except httpx.HTTPError as e:
- raise HTTPException(status_code=500, detail=f"Model service error: {str(e)}")
- @router.post("/models/{model_id}/predict", response_model=ModelResponse)
- async def predict(
- model_id: str,
- request: ModelRequest,
- background_tasks: BackgroundTasks,
- current_user: User = Depends(get_current_user)
- ):
- """Make prediction with specified model"""
- try:
- async with httpx.AsyncClient() as client:
- response = await client.post(
- f"{MODEL_SERVICE_URL}/models/{model_id}/predict",
- json=request.dict(),
- headers={"Authorization": f"Bearer {current_user.token}"}
- )
- response.raise_for_status()
-
- # Log prediction request
- background_tasks.add_task(
- log_prediction_request,
- model_id,
- current_user.id,
- request
- )
-
- return response.json()
- except httpx.HTTPError as e:
- raise HTTPException(status_code=500, detail=f"Model service error: {str(e)}")
- @router.get("/models/{model_id}", response_model=ModelList)
- async def get_model(
- model_id: str,
- current_user: User = Depends(get_current_user)
- ):
- """Get model details"""
- try:
- async with httpx.AsyncClient() as client:
- response = await client.get(
- f"{MODEL_SERVICE_URL}/models/{model_id}",
- headers={"Authorization": f"Bearer {current_user.token}"}
- )
- response.raise_for_status()
- return response.json()
- except httpx.HTTPError as e:
- raise HTTPException(status_code=500, detail=f"Model service error: {str(e)}")
- @router.post("/models/{model_id}/train")
- async def train_model(
- model_id: str,
- training_data: dict,
- background_tasks: BackgroundTasks,
- current_user: User = Depends(get_current_user)
- ):
- """Start model training"""
- try:
- async with httpx.AsyncClient() as client:
- response = await client.post(
- f"{MODEL_SERVICE_URL}/models/{model_id}/train",
- json=training_data,
- headers={"Authorization": f"Bearer {current_user.token}"}
- )
- response.raise_for_status()
-
- # Log training request
- background_tasks.add_task(
- log_training_request,
- model_id,
- current_user.id,
- training_data
- )
-
- return response.json()
- except httpx.HTTPError as e:
- raise HTTPException(status_code=500, detail=f"Model service error: {str(e)}")
- @router.get("/models/{model_id}/status")
- async def get_model_status(
- model_id: str,
- current_user: User = Depends(get_current_user)
- ):
- """Get model training status"""
- try:
- async with httpx.AsyncClient() as client:
- response = await client.get(
- f"{MODEL_SERVICE_URL}/models/{model_id}/status",
- headers={"Authorization": f"Bearer {current_user.token}"}
- )
- response.raise_for_status()
- return response.json()
- except httpx.HTTPError as e:
- raise HTTPException(status_code=500, detail=f"Model service error: {str(e)}")
- async def log_prediction_request(model_id: str, user_id: str, request: ModelRequest):
- """Log prediction request for analytics"""
- # Implementation would log to database or analytics service
- pass
- async def log_training_request(model_id: str, user_id: str, training_data: dict):
- """Log training request for analytics"""
- # Implementation would log to database or analytics service
- pass
|