jessehostetler's picture
Clean up docs. Fix test script incorrect path.
a12ee73
raw
history blame
1.98 kB
"""FastAPI application factory and core infrastructure."""
import asyncio
import warnings
from contextlib import asynccontextmanager
from typing import AsyncGenerator
from fastapi import FastAPI
from pydantic import Field
from pydantic_settings import BaseSettings
from app.core.logging import logger
from app.core.dependencies import set_inference_service
from app.services.inference import ResNetInferenceService
from app.api.routes import prediction
class Settings(BaseSettings):
"""Application settings. Override via environment variables or .env file."""
app_name: str = Field(default="ML Inference Service")
app_version: str = Field(default="0.1.0")
debug: bool = Field(default=False)
host: str = Field(default="0.0.0.0")
port: int = Field(default=8000)
class Config:
env_file = ".env"
settings = Settings()
@asynccontextmanager
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
"""Application lifecycle: startup/shutdown."""
logger.info("Starting ML Inference Service...")
try:
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=FutureWarning)
# Replace ResNetInferenceService with your own implementation
service = ResNetInferenceService(model_name="microsoft/resnet-18")
await asyncio.to_thread(service.load_model)
set_inference_service(service)
logger.info("Startup completed successfully")
except Exception as e:
logger.error(f"Startup failed: {e}")
raise
yield
logger.info("Shutting down...")
def create_app() -> FastAPI:
"""Create and configure FastAPI application."""
app = FastAPI(
title=settings.app_name,
description="ML inference service for image classification",
version=settings.app_version,
debug=settings.debug,
lifespan=lifespan
)
app.include_router(prediction.router)
return app