Merge branch 'master' into 'main'
Browse filesinitial commit
See merge request ul-dsri/sandbox/sachin-sharma-in/ml-inference-service!1
- .python-version +1 -0
- app/__init__.py +7 -0
- app/api/__init__.py +1 -0
- app/api/controllers.py +75 -0
- app/api/models.py +24 -0
- app/api/routes/__init__.py +1 -0
- app/api/routes/prediction.py +20 -0
- app/api/routes/resnet_service_manager.py +19 -0
- app/core/__init__.py +1 -0
- app/core/app.py +25 -0
- app/core/config.py +30 -0
- app/core/dependencies.py +19 -0
- app/core/lifespan.py +43 -0
- app/core/logging.py +49 -0
- app/services/__init__.py +1 -0
- app/services/inference.py +152 -0
- main.py +6 -0
- requirements.in +16 -0
- requirements.txt +161 -0
- scripts/model_download.bash +8 -0
- test_main.http +11 -0
.python-version
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
3.12.11
|
app/__init__.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
ML Inference Service
|
| 3 |
+
|
| 4 |
+
A FastAPI-based web service for machine learning model inference.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
__version__ = "0.1.0"
|
app/api/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""API layer for the ML inference service."""
|
app/api/controllers.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Controllers for handling API business logic.
|
| 3 |
+
"""
|
| 4 |
+
import base64
|
| 5 |
+
import io
|
| 6 |
+
|
| 7 |
+
from fastapi import HTTPException
|
| 8 |
+
from PIL import Image
|
| 9 |
+
|
| 10 |
+
from app.core.logging import logger
|
| 11 |
+
from app.services.inference import ResNetInferenceService
|
| 12 |
+
from app.api.models import ImageRequest, PredictionResponse
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class PredictionController:
|
| 16 |
+
"""Controller for ML prediction endpoints."""
|
| 17 |
+
|
| 18 |
+
@staticmethod
|
| 19 |
+
async def predict_resnet(
|
| 20 |
+
request: ImageRequest,
|
| 21 |
+
resnet_service: ResNetInferenceService
|
| 22 |
+
) -> PredictionResponse:
|
| 23 |
+
"""
|
| 24 |
+
Classify an image using ResNet-18 from base64 encoded data.
|
| 25 |
+
"""
|
| 26 |
+
try:
|
| 27 |
+
# Validate service availability
|
| 28 |
+
if not resnet_service:
|
| 29 |
+
raise HTTPException(
|
| 30 |
+
status_code=503,
|
| 31 |
+
detail="Service not initialized"
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
# Validate media type
|
| 35 |
+
if not request.image.mediaType.startswith('image/'):
|
| 36 |
+
raise HTTPException(
|
| 37 |
+
status_code=400,
|
| 38 |
+
detail=f"Invalid media type: {request.image.mediaType}"
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
# Decode base64 image data
|
| 42 |
+
try:
|
| 43 |
+
image_data = base64.b64decode(request.image.data)
|
| 44 |
+
except Exception as decode_error:
|
| 45 |
+
raise HTTPException(
|
| 46 |
+
status_code=400,
|
| 47 |
+
detail=f"Invalid base64 data: {str(decode_error)}"
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
# Load and validate image
|
| 51 |
+
try:
|
| 52 |
+
image = Image.open(io.BytesIO(image_data))
|
| 53 |
+
except Exception as img_error:
|
| 54 |
+
raise HTTPException(
|
| 55 |
+
status_code=400,
|
| 56 |
+
detail=f"Invalid image file: {str(img_error)}"
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
# Perform prediction
|
| 60 |
+
result = resnet_service.predict(image)
|
| 61 |
+
|
| 62 |
+
# Return structured response
|
| 63 |
+
return PredictionResponse(
|
| 64 |
+
prediction=result["prediction"],
|
| 65 |
+
confidence=result["confidence"],
|
| 66 |
+
model=result["model"],
|
| 67 |
+
predicted_label=result["predicted_label"],
|
| 68 |
+
mediaType=request.image.mediaType
|
| 69 |
+
)
|
| 70 |
+
|
| 71 |
+
except HTTPException:
|
| 72 |
+
raise
|
| 73 |
+
except Exception as e:
|
| 74 |
+
logger.error(f"Prediction failed: {e}")
|
| 75 |
+
raise HTTPException(status_code=500, detail=str(e))
|
app/api/models.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Pydantic models for request/response validation.
|
| 3 |
+
"""
|
| 4 |
+
from pydantic import BaseModel
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class ImageData(BaseModel):
|
| 8 |
+
"""Image data model for base64 encoded images."""
|
| 9 |
+
mediaType: str
|
| 10 |
+
data: str
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class ImageRequest(BaseModel):
|
| 14 |
+
"""Request model for image classification."""
|
| 15 |
+
image: ImageData
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class PredictionResponse(BaseModel):
|
| 19 |
+
"""Response model for image classification results."""
|
| 20 |
+
prediction: str
|
| 21 |
+
confidence: float
|
| 22 |
+
model: str
|
| 23 |
+
predicted_label: int
|
| 24 |
+
mediaType: str
|
app/api/routes/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""API route definitions."""
|
app/api/routes/prediction.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
ML Prediction routes.
|
| 3 |
+
"""
|
| 4 |
+
from fastapi import APIRouter, Depends
|
| 5 |
+
|
| 6 |
+
from app.api.controllers import PredictionController
|
| 7 |
+
from app.api.models import ImageRequest, PredictionResponse
|
| 8 |
+
from app.core.dependencies import get_resnet_service
|
| 9 |
+
from app.services.inference import ResNetInferenceService
|
| 10 |
+
|
| 11 |
+
router = APIRouter()
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@router.post("/predict/resnet", response_model=PredictionResponse)
|
| 15 |
+
async def predict_image(
|
| 16 |
+
request: ImageRequest,
|
| 17 |
+
resnet_service: ResNetInferenceService = Depends(get_resnet_service)
|
| 18 |
+
):
|
| 19 |
+
"""Classify an image using ResNet-18 from base64 encoded data."""
|
| 20 |
+
return await PredictionController.predict_resnet(request, resnet_service)
|
app/api/routes/resnet_service_manager.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# """
|
| 2 |
+
# Dependency injection for FastAPI.
|
| 3 |
+
# """
|
| 4 |
+
# from typing import Optional
|
| 5 |
+
# from app.services.inference import ResNetInferenceService
|
| 6 |
+
#
|
| 7 |
+
# # Global service instance
|
| 8 |
+
# _resnet_service: Optional[ResNetInferenceService] = None
|
| 9 |
+
#
|
| 10 |
+
#
|
| 11 |
+
# def get_resnet_service() -> Optional[ResNetInferenceService]:
|
| 12 |
+
# """Get the ResNet service instance."""
|
| 13 |
+
# return _resnet_service
|
| 14 |
+
#
|
| 15 |
+
#
|
| 16 |
+
# def set_resnet_service(service: ResNetInferenceService) -> None:
|
| 17 |
+
# """Set the global ResNet service instance."""
|
| 18 |
+
# global _resnet_service
|
| 19 |
+
# _resnet_service = service
|
app/core/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""Core utilities and configurations."""
|
app/core/app.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FastAPI application factory.
|
| 3 |
+
"""
|
| 4 |
+
from fastapi import FastAPI
|
| 5 |
+
|
| 6 |
+
from app.core.config import settings
|
| 7 |
+
from app.core.lifespan import lifespan
|
| 8 |
+
from app.api.routes import prediction
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def create_app() -> FastAPI:
|
| 12 |
+
"""Application factory."""
|
| 13 |
+
|
| 14 |
+
app = FastAPI(
|
| 15 |
+
title=settings.app_name,
|
| 16 |
+
description="ML inference service for image classification",
|
| 17 |
+
version=settings.app_version,
|
| 18 |
+
debug=settings.debug,
|
| 19 |
+
lifespan=lifespan
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
# Include only prediction router
|
| 23 |
+
app.include_router(prediction.router)
|
| 24 |
+
|
| 25 |
+
return app
|
app/core/config.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Basic configuration management.
|
| 3 |
+
|
| 4 |
+
Starting simple - just app settings. We'll expand as needed.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from typing import Optional
|
| 8 |
+
from pydantic import Field
|
| 9 |
+
from pydantic_settings import BaseSettings # Changed import
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class Settings(BaseSettings):
|
| 13 |
+
"""Application settings with environment variable support."""
|
| 14 |
+
|
| 15 |
+
# Basic app settings
|
| 16 |
+
app_name: str = Field(default="ML Inference Service", description="Application name")
|
| 17 |
+
app_version: str = Field(default="0.1.0", description="Application version")
|
| 18 |
+
debug: bool = Field(default=False, description="Debug mode")
|
| 19 |
+
|
| 20 |
+
# Server settings
|
| 21 |
+
host: str = Field(default="0.0.0.0", description="Server host")
|
| 22 |
+
port: int = Field(default=8000, description="Server port")
|
| 23 |
+
|
| 24 |
+
class Config:
|
| 25 |
+
"""Load from .env file if it exists."""
|
| 26 |
+
env_file = ".env"
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
# Global settings instance
|
| 30 |
+
settings = Settings()
|
app/core/dependencies.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Dependency injection for FastAPI.
|
| 3 |
+
"""
|
| 4 |
+
from typing import Optional
|
| 5 |
+
from app.services.inference import ResNetInferenceService
|
| 6 |
+
|
| 7 |
+
# Global service instance
|
| 8 |
+
_resnet_service: Optional[ResNetInferenceService] = None
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def get_resnet_service() -> Optional[ResNetInferenceService]:
|
| 12 |
+
"""Get the ResNet service instance."""
|
| 13 |
+
return _resnet_service
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def set_resnet_service(service: ResNetInferenceService) -> None:
|
| 17 |
+
"""Set the global ResNet service instance."""
|
| 18 |
+
global _resnet_service
|
| 19 |
+
_resnet_service = service
|
app/core/lifespan.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Application lifespan management.
|
| 3 |
+
"""
|
| 4 |
+
import warnings
|
| 5 |
+
from contextlib import asynccontextmanager
|
| 6 |
+
from typing import AsyncGenerator
|
| 7 |
+
|
| 8 |
+
from fastapi import FastAPI
|
| 9 |
+
|
| 10 |
+
from app.core.logging import logger
|
| 11 |
+
from app.core.dependencies import set_resnet_service
|
| 12 |
+
from app.services.inference import ResNetInferenceService
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
@asynccontextmanager
|
| 16 |
+
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
|
| 17 |
+
"""Application lifespan manager."""
|
| 18 |
+
|
| 19 |
+
# Startup
|
| 20 |
+
logger.info("Starting ML Inference Service...")
|
| 21 |
+
|
| 22 |
+
try:
|
| 23 |
+
with warnings.catch_warnings():
|
| 24 |
+
warnings.filterwarnings("ignore", category=FutureWarning)
|
| 25 |
+
|
| 26 |
+
# Initialize and load ResNet service
|
| 27 |
+
resnet_service = ResNetInferenceService(
|
| 28 |
+
model_name="microsoft/resnet-18",
|
| 29 |
+
use_local_model=True
|
| 30 |
+
)
|
| 31 |
+
resnet_service.load_model()
|
| 32 |
+
set_resnet_service(resnet_service)
|
| 33 |
+
|
| 34 |
+
logger.info("Startup completed successfully")
|
| 35 |
+
|
| 36 |
+
except Exception as e:
|
| 37 |
+
logger.error(f"Startup failed: {e}")
|
| 38 |
+
raise
|
| 39 |
+
|
| 40 |
+
yield # App runs here
|
| 41 |
+
|
| 42 |
+
# Shutdown
|
| 43 |
+
logger.info("Shutting down...")
|
app/core/logging.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Logging configuration for the application.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import logging
|
| 6 |
+
import sys
|
| 7 |
+
from typing import Optional
|
| 8 |
+
|
| 9 |
+
from app.core.config import settings
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class LoggerSetup:
|
| 13 |
+
"""Logger setup utility class."""
|
| 14 |
+
|
| 15 |
+
@staticmethod
|
| 16 |
+
def setup_logging(
|
| 17 |
+
logger_name: Optional[str] = None,
|
| 18 |
+
level: Optional[str] = None,
|
| 19 |
+
format_string: Optional[str] = None
|
| 20 |
+
) -> logging.Logger:
|
| 21 |
+
"""Set up and configure a logger."""
|
| 22 |
+
logger = logging.getLogger(logger_name or settings.app_name)
|
| 23 |
+
|
| 24 |
+
# Avoid duplicate handlers
|
| 25 |
+
if logger.handlers:
|
| 26 |
+
return logger
|
| 27 |
+
|
| 28 |
+
# Set level
|
| 29 |
+
log_level = getattr(logging, (level or "INFO").upper())
|
| 30 |
+
logger.setLevel(log_level)
|
| 31 |
+
|
| 32 |
+
# Create console handler
|
| 33 |
+
handler = logging.StreamHandler(sys.stdout)
|
| 34 |
+
handler.setLevel(log_level)
|
| 35 |
+
|
| 36 |
+
# Create formatter
|
| 37 |
+
formatter = logging.Formatter(
|
| 38 |
+
format_string or "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
| 39 |
+
)
|
| 40 |
+
handler.setFormatter(formatter)
|
| 41 |
+
|
| 42 |
+
# Add handler to logger
|
| 43 |
+
logger.addHandler(handler)
|
| 44 |
+
|
| 45 |
+
return logger
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
# Create application logger
|
| 49 |
+
logger = LoggerSetup.setup_logging()
|
app/services/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""Business logic services."""
|
app/services/inference.py
ADDED
|
@@ -0,0 +1,152 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Inference service for machine learning models.
|
| 3 |
+
|
| 4 |
+
This service handles the business logic for ML inference,
|
| 5 |
+
following the Single Responsibility Principle.
|
| 6 |
+
"""
|
| 7 |
+
import os
|
| 8 |
+
from typing import Dict, Any
|
| 9 |
+
import torch
|
| 10 |
+
from PIL import Image
|
| 11 |
+
from transformers import AutoImageProcessor, ResNetForImageClassification
|
| 12 |
+
|
| 13 |
+
from app.core.logging import logger
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class ResNetInferenceService:
|
| 17 |
+
"""
|
| 18 |
+
ResNet inference service.
|
| 19 |
+
|
| 20 |
+
Handles loading and inference for ResNet models.
|
| 21 |
+
Follows the Singleton pattern - loads model once.
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
def __init__(self, model_name: str = "microsoft/resnet-18", use_local_model: bool = True):
|
| 25 |
+
"""
|
| 26 |
+
Initialize the ResNet service.
|
| 27 |
+
|
| 28 |
+
Args:
|
| 29 |
+
model_name: HuggingFace model identifier
|
| 30 |
+
"""
|
| 31 |
+
self.model_name = model_name
|
| 32 |
+
self.use_local_model = use_local_model
|
| 33 |
+
self.model = None
|
| 34 |
+
self.processor = None
|
| 35 |
+
self._is_loaded = False
|
| 36 |
+
|
| 37 |
+
if use_local_model:
|
| 38 |
+
self.model_path = os.path.join("models", model_name.split("/")[-1])
|
| 39 |
+
logger.info(f"Initializing ResNet service with local model: {self.model_path}")
|
| 40 |
+
else:
|
| 41 |
+
self.model_path = model_name
|
| 42 |
+
logger.info(f"Initializing ResNet service with remote model: {model_name}")
|
| 43 |
+
|
| 44 |
+
def load_model(self) -> None:
|
| 45 |
+
"""
|
| 46 |
+
Load the ResNet model and processor.
|
| 47 |
+
|
| 48 |
+
This method loads the model once and reuses it for all requests.
|
| 49 |
+
"""
|
| 50 |
+
if self._is_loaded:
|
| 51 |
+
logger.debug("Model already loaded, skipping...")
|
| 52 |
+
return
|
| 53 |
+
|
| 54 |
+
try:
|
| 55 |
+
if self.use_local_model:
|
| 56 |
+
if not os.path.exists(self.model_path):
|
| 57 |
+
raise FileNotFoundError(f"Local model directory not found: {self.model_path}")
|
| 58 |
+
|
| 59 |
+
config_path = os.path.join(self.model_path, "config.json")
|
| 60 |
+
if not os.path.exists(config_path):
|
| 61 |
+
raise FileNotFoundError(f"Model config not found: {config_path}")
|
| 62 |
+
|
| 63 |
+
logger.info(f"Loading ResNet model from local directory: {self.model_path}")
|
| 64 |
+
else:
|
| 65 |
+
logger.info(f"Loading ResNet model from HuggingFace Hub: {self.model_name}")
|
| 66 |
+
|
| 67 |
+
# Suppress warnings during model loading
|
| 68 |
+
import warnings
|
| 69 |
+
with warnings.catch_warnings():
|
| 70 |
+
warnings.filterwarnings("ignore", category=FutureWarning)
|
| 71 |
+
warnings.filterwarnings("ignore", message="Could not find image processor class")
|
| 72 |
+
|
| 73 |
+
# Load processor and model from local directory or remote
|
| 74 |
+
self.processor = AutoImageProcessor.from_pretrained(
|
| 75 |
+
self.model_path,
|
| 76 |
+
local_files_only=self.use_local_model
|
| 77 |
+
)
|
| 78 |
+
self.model = ResNetForImageClassification.from_pretrained(
|
| 79 |
+
self.model_path,
|
| 80 |
+
local_files_only=self.use_local_model
|
| 81 |
+
)
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
self._is_loaded = True
|
| 85 |
+
logger.info("ResNet model loaded successfully")
|
| 86 |
+
logger.info(f"Model architecture: {self.model.config.architectures}")
|
| 87 |
+
logger.info(f"Model has {len(self.model.config.id2label)} classes")
|
| 88 |
+
|
| 89 |
+
except Exception as e:
|
| 90 |
+
logger.error(f"Failed to load ResNet model: {e}")
|
| 91 |
+
if self.use_local_model:
|
| 92 |
+
logger.error("Hint: Make sure the model was downloaded correctly with dwl.bash")
|
| 93 |
+
raise
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def predict(self, image: Image.Image) -> Dict[str, Any]:
|
| 97 |
+
"""
|
| 98 |
+
Perform inference on an image.
|
| 99 |
+
|
| 100 |
+
Args:
|
| 101 |
+
image: PIL Image to classify
|
| 102 |
+
|
| 103 |
+
Returns:
|
| 104 |
+
Dictionary containing prediction results
|
| 105 |
+
|
| 106 |
+
Raises:
|
| 107 |
+
RuntimeError: If model is not loaded
|
| 108 |
+
ValueError: If image processing fails
|
| 109 |
+
"""
|
| 110 |
+
if not self._is_loaded:
|
| 111 |
+
logger.info("Model not loaded, loading now...")
|
| 112 |
+
self.load_model()
|
| 113 |
+
|
| 114 |
+
try:
|
| 115 |
+
logger.debug("Starting ResNet inference")
|
| 116 |
+
|
| 117 |
+
if image.mode != 'RGB':
|
| 118 |
+
image = image.convert('RGB')
|
| 119 |
+
logger.debug(f"Converted image from {image.mode} to RGB")
|
| 120 |
+
|
| 121 |
+
inputs = self.processor(image, return_tensors="pt")
|
| 122 |
+
|
| 123 |
+
# Perform inference
|
| 124 |
+
with torch.no_grad():
|
| 125 |
+
logits = self.model(**inputs).logits
|
| 126 |
+
|
| 127 |
+
# Get prediction
|
| 128 |
+
predicted_label = logits.argmax(-1).item()
|
| 129 |
+
predicted_class = self.model.config.id2label[predicted_label]
|
| 130 |
+
|
| 131 |
+
# Calculate confidence score
|
| 132 |
+
probabilities = torch.nn.functional.softmax(logits, dim=-1)
|
| 133 |
+
confidence = probabilities[0][predicted_label].item()
|
| 134 |
+
|
| 135 |
+
result = {
|
| 136 |
+
"prediction": predicted_class,
|
| 137 |
+
"confidence": round(confidence, 4),
|
| 138 |
+
"model": self.model_name,
|
| 139 |
+
"predicted_label": predicted_label
|
| 140 |
+
}
|
| 141 |
+
|
| 142 |
+
logger.debug(f"Inference completed: {predicted_class} (confidence: {confidence:.4f})")
|
| 143 |
+
return result
|
| 144 |
+
|
| 145 |
+
except Exception as e:
|
| 146 |
+
logger.error(f"Inference failed: {e}")
|
| 147 |
+
raise ValueError(f"Failed to process image: {str(e)}")
|
| 148 |
+
|
| 149 |
+
@property
|
| 150 |
+
def is_loaded(self) -> bool:
|
| 151 |
+
"""Check if model is loaded."""
|
| 152 |
+
return self._is_loaded
|
main.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Main FastAPI application entry point.
|
| 3 |
+
"""
|
| 4 |
+
from app.core.app import create_app
|
| 5 |
+
|
| 6 |
+
app = create_app()
|
requirements.in
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Web framework
|
| 2 |
+
fastapi==0.104.1
|
| 3 |
+
uvicorn[standard]==0.24.0
|
| 4 |
+
|
| 5 |
+
# Configuration management
|
| 6 |
+
pydantic==2.5.0
|
| 7 |
+
pydantic-settings==2.0.3
|
| 8 |
+
python-dotenv==0.21.0
|
| 9 |
+
|
| 10 |
+
# File upload handling
|
| 11 |
+
python-multipart==0.0.6
|
| 12 |
+
|
| 13 |
+
# ML/AI dependencies (newly added)
|
| 14 |
+
transformers>=4.35.0
|
| 15 |
+
torch>=2.4.0 # Newer PyTorch with NumPy 2.x support
|
| 16 |
+
pillow>=10.0.0
|
requirements.txt
ADDED
|
@@ -0,0 +1,161 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file was autogenerated by uv via the following command:
|
| 2 |
+
# uv pip compile requirements.in -o requirements.txt
|
| 3 |
+
annotated-types==0.7.0
|
| 4 |
+
# via pydantic
|
| 5 |
+
anyio==3.7.1
|
| 6 |
+
# via
|
| 7 |
+
# fastapi
|
| 8 |
+
# starlette
|
| 9 |
+
# watchfiles
|
| 10 |
+
certifi==2025.8.3
|
| 11 |
+
# via requests
|
| 12 |
+
charset-normalizer==3.4.3
|
| 13 |
+
# via requests
|
| 14 |
+
click==8.2.1
|
| 15 |
+
# via uvicorn
|
| 16 |
+
fastapi==0.104.1
|
| 17 |
+
# via -r requirements.in
|
| 18 |
+
filelock==3.19.1
|
| 19 |
+
# via
|
| 20 |
+
# huggingface-hub
|
| 21 |
+
# torch
|
| 22 |
+
# transformers
|
| 23 |
+
fsspec==2025.7.0
|
| 24 |
+
# via
|
| 25 |
+
# huggingface-hub
|
| 26 |
+
# torch
|
| 27 |
+
h11==0.16.0
|
| 28 |
+
# via uvicorn
|
| 29 |
+
hf-xet==1.1.8
|
| 30 |
+
# via huggingface-hub
|
| 31 |
+
httptools==0.6.4
|
| 32 |
+
# via uvicorn
|
| 33 |
+
huggingface-hub==0.34.4
|
| 34 |
+
# via
|
| 35 |
+
# tokenizers
|
| 36 |
+
# transformers
|
| 37 |
+
idna==3.10
|
| 38 |
+
# via
|
| 39 |
+
# anyio
|
| 40 |
+
# requests
|
| 41 |
+
jinja2==3.1.6
|
| 42 |
+
# via torch
|
| 43 |
+
markupsafe==3.0.2
|
| 44 |
+
# via jinja2
|
| 45 |
+
mpmath==1.3.0
|
| 46 |
+
# via sympy
|
| 47 |
+
networkx==3.5
|
| 48 |
+
# via torch
|
| 49 |
+
numpy==2.3.2
|
| 50 |
+
# via transformers
|
| 51 |
+
nvidia-cublas-cu12==12.8.4.1
|
| 52 |
+
# via
|
| 53 |
+
# nvidia-cudnn-cu12
|
| 54 |
+
# nvidia-cusolver-cu12
|
| 55 |
+
# torch
|
| 56 |
+
nvidia-cuda-cupti-cu12==12.8.90
|
| 57 |
+
# via torch
|
| 58 |
+
nvidia-cuda-nvrtc-cu12==12.8.93
|
| 59 |
+
# via torch
|
| 60 |
+
nvidia-cuda-runtime-cu12==12.8.90
|
| 61 |
+
# via torch
|
| 62 |
+
nvidia-cudnn-cu12==9.10.2.21
|
| 63 |
+
# via torch
|
| 64 |
+
nvidia-cufft-cu12==11.3.3.83
|
| 65 |
+
# via torch
|
| 66 |
+
nvidia-cufile-cu12==1.13.1.3
|
| 67 |
+
# via torch
|
| 68 |
+
nvidia-curand-cu12==10.3.9.90
|
| 69 |
+
# via torch
|
| 70 |
+
nvidia-cusolver-cu12==11.7.3.90
|
| 71 |
+
# via torch
|
| 72 |
+
nvidia-cusparse-cu12==12.5.8.93
|
| 73 |
+
# via
|
| 74 |
+
# nvidia-cusolver-cu12
|
| 75 |
+
# torch
|
| 76 |
+
nvidia-cusparselt-cu12==0.7.1
|
| 77 |
+
# via torch
|
| 78 |
+
nvidia-nccl-cu12==2.27.3
|
| 79 |
+
# via torch
|
| 80 |
+
nvidia-nvjitlink-cu12==12.8.93
|
| 81 |
+
# via
|
| 82 |
+
# nvidia-cufft-cu12
|
| 83 |
+
# nvidia-cusolver-cu12
|
| 84 |
+
# nvidia-cusparse-cu12
|
| 85 |
+
# torch
|
| 86 |
+
nvidia-nvtx-cu12==12.8.90
|
| 87 |
+
# via torch
|
| 88 |
+
packaging==25.0
|
| 89 |
+
# via
|
| 90 |
+
# huggingface-hub
|
| 91 |
+
# transformers
|
| 92 |
+
pillow==10.1.0
|
| 93 |
+
# via -r requirements.in
|
| 94 |
+
pydantic==2.5.0
|
| 95 |
+
# via
|
| 96 |
+
# -r requirements.in
|
| 97 |
+
# fastapi
|
| 98 |
+
# pydantic-settings
|
| 99 |
+
pydantic-core==2.14.1
|
| 100 |
+
# via pydantic
|
| 101 |
+
pydantic-settings==2.0.3
|
| 102 |
+
# via -r requirements.in
|
| 103 |
+
python-dotenv==0.21.0
|
| 104 |
+
# via
|
| 105 |
+
# -r requirements.in
|
| 106 |
+
# pydantic-settings
|
| 107 |
+
# uvicorn
|
| 108 |
+
python-multipart==0.0.6
|
| 109 |
+
# via -r requirements.in
|
| 110 |
+
pyyaml==6.0.2
|
| 111 |
+
# via
|
| 112 |
+
# huggingface-hub
|
| 113 |
+
# transformers
|
| 114 |
+
# uvicorn
|
| 115 |
+
regex==2025.7.34
|
| 116 |
+
# via transformers
|
| 117 |
+
requests==2.32.5
|
| 118 |
+
# via
|
| 119 |
+
# huggingface-hub
|
| 120 |
+
# transformers
|
| 121 |
+
safetensors==0.6.2
|
| 122 |
+
# via transformers
|
| 123 |
+
setuptools==80.9.0
|
| 124 |
+
# via
|
| 125 |
+
# torch
|
| 126 |
+
# triton
|
| 127 |
+
sniffio==1.3.1
|
| 128 |
+
# via anyio
|
| 129 |
+
starlette==0.27.0
|
| 130 |
+
# via fastapi
|
| 131 |
+
sympy==1.14.0
|
| 132 |
+
# via torch
|
| 133 |
+
tokenizers==0.15.2
|
| 134 |
+
# via transformers
|
| 135 |
+
torch==2.8.0
|
| 136 |
+
# via -r requirements.in
|
| 137 |
+
tqdm==4.67.1
|
| 138 |
+
# via
|
| 139 |
+
# huggingface-hub
|
| 140 |
+
# transformers
|
| 141 |
+
transformers==4.35.2
|
| 142 |
+
# via -r requirements.in
|
| 143 |
+
triton==3.4.0
|
| 144 |
+
# via torch
|
| 145 |
+
typing-extensions==4.15.0
|
| 146 |
+
# via
|
| 147 |
+
# fastapi
|
| 148 |
+
# huggingface-hub
|
| 149 |
+
# pydantic
|
| 150 |
+
# pydantic-core
|
| 151 |
+
# torch
|
| 152 |
+
urllib3==2.5.0
|
| 153 |
+
# via requests
|
| 154 |
+
uvicorn==0.24.0
|
| 155 |
+
# via -r requirements.in
|
| 156 |
+
uvloop==0.21.0
|
| 157 |
+
# via uvicorn
|
| 158 |
+
watchfiles==1.1.0
|
| 159 |
+
# via uvicorn
|
| 160 |
+
websockets==15.0.1
|
| 161 |
+
# via uvicorn
|
scripts/model_download.bash
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
python - <<'PY'
|
| 2 |
+
from huggingface_hub import snapshot_download
|
| 3 |
+
snapshot_download(
|
| 4 |
+
repo_id="microsoft/resnet-18",
|
| 5 |
+
local_dir="models/resnet-18",
|
| 6 |
+
local_dir_use_symlinks=False # copies files; safer for containers
|
| 7 |
+
)
|
| 8 |
+
PY
|
test_main.http
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Test ResNet Prediction Endpoint
|
| 2 |
+
|
| 3 |
+
POST http://127.0.0.1:8000/predict/resnet
|
| 4 |
+
Content-Type: application/json
|
| 5 |
+
|
| 6 |
+
{
|
| 7 |
+
"image": {
|
| 8 |
+
"mediaType": "image/jpeg",
|
| 9 |
+
"data": "/9j/4AAQSkZJRgABAQAAAQABAAD/2wCEAAkGBxMTEhUTExIWFhUVFhoYGBgYGBgYGBgXGBUYFxUYGBcYHSggGBolGxUXITEhJSorLi4uFx8zODMtNygtLisBCgoKDg0OGxAQGy0mHyUtLS0tMjAtLS8tLS0tLy0tLS01LS8vLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLf/AABEIAKoBKQMBIgACEQEDEQH/xAAbAAACAwEBAQAAAAAAAAAAAAAEBQIDBgEAB//EAEMQAAECAwYDBQUGBAUDBQAAAAECEQADIQQFEjFBUWFxkRMigaGxMlLB0fAGFBVCkuFigqLxI1Nyk9IHFkMzsrPC4v/EABkBAAMBAQEAAAAAAAAAAAAAAAECAwQABf/EAC4RAAIBAwMEAAUEAwEBAAAAAAABAgMRIRIxQQQTIlEUYXGRsTJCofAjgdHhBf/aAAwDAQACEQMRAD8Aw5s1A0U9kNocCynNNQ0DLs6tmEC63NOiSlpYJ2HCPJlNpDGXZ2rBgsQUlwKjOJdzNjQ6D03QtkyH08oum2cJGQi8yCPDrE2ejRotg89zd7oEkqAIoOkNhaAQE4RXWkLTZ1Pk4i2y5784TQi3flawzTYhsOkXy7MNh0gizFwKNBQlxS5ltkETZhsOgi5NmGw6CCBLbWCESHaBc7SwEWQbDpHZlhSQRQcWhl2MCrtKUKGJgPFx5QssrcvStGWUmI1XEp3IcHhExcWQYeNI1UmcFB0mhyjxDcYwOjWk9z2Y9bQgtjMKu6UipFdqExXaJiQHCR4t8Ic2iypxYi/wim1XbiY67fOO+GUWtTuxo9c6ibirL2ZS3SysvhFdhACrEraPoCLAQ1AeAELrXdgUXAz+sodudNaUkCn2K0nJt/7MYLPwiwWfhGgXdvARxFgHDpGqnGTyzLWnTV1ERfd+HpEhZeHpD37kNh0ixNjGw6RrUTyZVBALJw9IvRd/DzEPkWQbDpF6LKNh0htiblcRy7u/h80xb+Hjb0h6iQNh0iX3bl0g6iTiIfuHD0jyrAPd9Ieqs/LpEV2fl0jtQNJnV2Lh6RUbHXL0jQLkDh0ildmrp0g3BpEa7INvSK1WYbekPV2Xl0ildl5dI64bCc2UbekV/dht6Q6VZqftEBZuHlHINhKqzDaBewG0aM2T6aAfu300cwJASJymppB9ktYyUIVS1NR4mCTlGNHozWTUWaUkhwxHCDJNnABDZ8PLlGPkWlSS4Lcobyr6mEhyB4CFlSTGhXqQ5uhjOlJByPygKcEDIkHQ/N4eyLRKXXEAfntHLTIkqVgWCDnl8YKbW5F2bwI0CoCWL1NKdXhpJugZ5P6xcLtTKLoDvXEch4HOI2K/CtfZplYqs4LPxY5R177AsRstjmhR71DkaMPAwwtM5EpJMw5DqWp1hhMUhIYqCSqgBLExmL+uObNI7N1M7qJoBpXrAGVjky/5KsgrPLgc+BiVqtgWlAQCSMhw/M+1YrsH2KmYhjUGIdx5jnGl+zlx9kCpQ7xpvT94LaCUXTJUZbkkYXehd826bQDOXMWzoolRAUB7QNXz+Gkay19nKQ61JQjV9eAGpppFCLxshT/6yGpQltaUPEQEwGYu+7FKmv2ndDsRlUsQDGuFkYeEQVZbNZpa7QAMOalJ7z4jn56QnsP2yQcRmIIr3QPdrmSz6c+Edl7HYG8y7Bm3ebWK50pMtJVMUBuTlySIJkX5JKEqMxCSfyk1o/TLWMFfX2hmTz3gAgZJGQrmXzPGDGOQuTasaFd+2erEnCMsiasyd4BvO14UCYO6lTcSX32yeMcu0h8oqm2t+W0NpQY6th9OvNSi9AOIL+sdl20anyMZ2Vajx6xaZyuPUw6kkh+1J8D025O/kYnJtD7dDCIKL6+cMLGo7mGuuAdvhodygTF7GF8qcRv1ghNp4HTWDcjKkHS0RahML/vJaj668YNlzOefwjrk3AsVLMeMqJ9rTKOLUdjHC2BJkqIGRWClJOxiKJKn16GGTFaKOxilciGIknj0Me+7E6HpHXOsKjIeJCzfTw6s93l2LjiU5UgwWRCM1IOlQB6mElWhDdl6fS1KmyEKbrUfy67iFv4cv3fMfOH96XskA4UgUB7qwPzcBGc/Fuf6/wBozrqZS2R6a/8AmU0vJu/9+Rip1oxF2YxZKKgHYtvEJNNQecNJgRMQEpUArV6B9IDnosrYJxoKtd6s+vYImYOESC6wFNQpJYxwTYsmmYpQknZmgu219mXfw02qIMt17qUkIQmvvByrOgB0jNSpu5g2TawmteBByjrISzDECaokl6ZkuW59YayLaZSQqWAfeWdTqG1MJrbfJmJSmjJq7VLgO/SAjaTA3Gs0M7dbitZWrM+HlBtzXsqScSTXIg1BHEPWM+pZOcSwGGxawLPc+m3X9p5cxSUKwSw3tElnbdxhru8aeStKgClSSDkQQX3bePiMte8N5N9qlpTgLKANSKir0MSlT9Dxb5Dft7eOO04KYZQwgjUlioHkaeBjM9qSWakSt9sVNWVqqo5nc5aQPKVWHUtKsOqTkzUSr3TLsi7KHV2jKUrEQyu6WAaoZLHfwqgM1uMUrcxxElR0JibqpGiHSSfBbOtyiGyEU9sTSLU2RSvymCLFZ5aS6yS2QTqeY0iU+oSWDVT6J3ysFRsC2ds46LuUdINVe89+7LASKDuOQNKkRfOtE7D3+7i91OXAloxSrVebHo06FFbJipFiINRBiLKT/eJ2CzlRNSaah/jDuXYhuOkOq9lkWVBXwKE2SuXnBdnsphqixp3T0EXJsoH5k+Xzh41pSZJ04RQDKs5iZlQTMkbK6f3ir7qr3jmNDvzjTGczLKNNlQkq4Qxs0vff4RXKs5bM5H1hnIsZfPXbhFYzMdeMUsFfYCLAkQbLsf00W/cPrDDazHouL0JEXolwaiyAbdGiJnoScknxGsTl1MYlqfSTnsQRZNx5x6b2aNxVjQnR4WW+9yMnyOS29BCW1XiVHNQqD/6hOjRDu1Kn6cI9On0NOC8x/bb1GE10907xnbdeZJoR7Xu8IEnTFHVXUwJMB458Y6nSSeTa3pXieWomhbKF/ZQahJOp84F7NW584u4+iDqW3YjTKpoYjMs6hXfjB6btWQCx5/3ERN3L3HWB3EjzFQk9mAAGIrlQWuxrBZuhEVrlqGhju4hn08wTLSJpMWqA1cHqIj2UdrO7ByJBcR7OJYYOsXsssTOIiQmmKkiJEQXMMaBaFcYJRZphySS8UyJpTklJ5gxbJmzS+ENi2f5tEJ1JcG6l09Pm7+gQq5ptGzOlAY6izS5ZZQmLV7oSBVst4gi7p6iF1cUCipj4EmGdiuxQqucl3qy3roMqHkYyTrNLyl9jZToxviFimRZEGvZTArRD18SoU84cSrKEh3QkalQdjqyiO8fDSIi0KlDNCf8AUsk8SzH1iKbxQ7hBJOaiXHIA6dOUY5dyptsa04wLxbEthBKyNQwB8AwIiuyS5jv93wbLIDB9e9r4GDEXkGGGnBLJPj3fjEzeS1ZqNN8Pq0TjTnwvuLKoTk2KYoP95UAMwC/9TR1UgJDqWFPus1/lLwDNnKUXxEeMUT0Pq5EOumb3YnczgITbUk90N/K3wEW947QHd8glRzy2fWH8mzcPKNcIQhsQqyk3lgaLOdvOCJdm4ecGosx28oJlWQ/Qi6qsyShFAMuxjbzgmXZRtqNYPlWM/SYIFk+sMPrZnlJcC+XZBtvrxg+VZztrvF8uQw66cYqtiyAQHdtOcF1VFXIqDquxYCE5wPaLySMz/SreFk4zN1/1QHaJUz+PL+LeM0qjmz0aXSQgF2m9E+9v+Uwtn20nUdIiqyrOi/0mJpsCvdV+kw0IpF5aUtwNcpKs/jFYsiB9GGX4Ws+8P5TF8q4Vn85/QfnGmM8Gacobt3FZkp+nipVkB0840ku5CKu/8n7wSi6hsP0QVNIk6vzMim7VaDzEL/uC9vMR9LTZEj8g/SPlCb7un3R0EHvEXM+c2FUxGEpOTEO8Ml30A5nSpZIbJIdlKbP6yhR9mb8lJkNObuszt0AMJ/tPeAVaFdmRgDANkW1Fd3rGWOqU3Frbk1VZ0lTUl9jdzZkgueyALcT5GFc/CGZmJAAIZySwoDErivaTNnKRMKQOzSoKUcIBYlVSc6geEZ/7Q3oDaFJlkYEqABFRTDUF9wYWDk5abBmqUY6kzS9lKI70kPuFK9IpNyy1eyseX7kQTJnSVzxKdNZSVgghsRfECXzZojbkIRapVncPMBL7e5XckGnKFVbNvlcq6cd7823FFoukgsK/XKKVXadx6RprJYkrmzJSZgJlpSS71xO7DgwrxiJsQ7fsKYuzx5aO0N8SlgXsX5RlzYiDxi1CFJqAAd2BPnGpNzHSvIExSu7iPytzT84D6mEi8Ona5M0EKNMzwSPhBNnBBZSlJ4YRDU2Ze5+uERFmO3n+0c5pjxjpe7KJKUEe1lulPkKxcpKSw7x8UgfpaLUWfkD4GL5Vn5xJ6dyuqTB+yQP/ABjm7egiXZqOSQkDb94Yy7EOJ8IPs9l2ST1+BiMq0Yhs+RUmQTqfB/nBybtQ3t12NPM5Q0lyfeSG4GvDNUGy5LCiW8B/yNYzS6l8E5zSE9kupGalYtggYusFGyyxVsPMEK/pL9YbGS4qtXgUt5GKDYEh3HUv6RKVST3ZFVE3uLZIQSQkF9z8yTDGVLLRGXISMkjwEFo5RtoW04Fqy9FkmUX8YNlSoGlGLu1LUeNaZhmmwoIimdaEj83kflAmFfvq6n5xEWRRzV1eG1ehVTXLJqtg38jFPbklw0Eou/iOkFpsg2HSBbljOcVhC/t1cI92i9h9eMMvu42HQQDfN4S7NLC1MXmIQ1PzEP0SSekMvkS1E5Sl8IvQkwPeF5ypIlqUQ01YQDTUEvypCu134E3nJs7gJMhb/wCpRlqS+mST9GjJNitmiCYHm25KVhBLEh/MJHFyTAH2kv1EiWFBSSoqSAMQ94O7HJqPlWpEYL/qNfahaWlKDJCA4OIEM5Aagqa8oaMXIXk+qMYXXjeqZK5SVFhMKn4BKCX6gDxA1hJcX2pSbJLUopxBABGMAummR5fWUfO/t3fhnT3SaIDJdlMXeh2y8RBhTbYXg+x2S29oVhLdxQDuC7oSsHh7XlCx1RlfsF9qUgTjOVVRCwVKAySElLqNS4cAZAx7/veV7p/XHODTsdufHQmOhMNU3cWyiYu07RbWiSpsUlEdww3F2K2iYupW0LrQ/bYpSohi8XzLZMMxMzEcSQADSjUEMfwlW0d/CFbQrlG9x1GaRVdd8zZE1U1Ku8oMpw4IO7xcm95vaqnEurBhJ4FOH0iQudW0XJuhexictDyVjrWBlL+2K2kBSB/hPiId19wpS9eJeHU37YyGThlKqAVB6ineAJO7eEZhNzLP5TBAuJZ0jPKjSfBeFWquTbWW2WWYpKUzgSpRSBlUAHUcQHg+TdiFpC0EKSqoLpbyMYBNxL2MHWe6JoyKhpQkZVHmYzS6dftkaFXmbH8HGxjv4YBv0hbd8+1oThCiRhYYg7cQTWHVkvif/wCSUk8g2/PhEJUqvsPfnwVy7E2QEEy7Fw9YZ2S3IXmhSNnDv0hkiWDlEZQnyRn1UluhRIsja+vxg6VZ+MHJk8IsTL4QY0W9zJOvcDMvjFE2zgw4RLjk2VSkafg5OGtEo17MQfdhFiZEHrkxDA0dTi0rGjvXBhKixMqIzrUlOhPKF1pvZY9mWdc66UyjTGEmFKUhuhERtM9MtClrUAlIck6CMpeFstMwNiKB/CCH3rnCKdd0wu5WXDH2qh3Y71DxeNNndp8s2ls+1NklKwqnAl2LAlqO5YZct4Ro/wCoEvtS6T2dBSp7ruQMquOkZw3GrY/pMQmXGpsj+gxZU0DtpBdv/wCoK+3lTUSwyJakqSciVEEts2FPQxl74+0E+0Yu0UGVM7RgG72EIDcGEMplyK2P6DA825lNkf8AbMVUUhHH5C+9L8nTky0TJjpll0hgGO7gOTziidfE1U9M8realmUQNEhIozZAdILXc6joofyGKlXSt/ZV+gxRRQjT9A943zNm4cZBYN7IELpk/l0hv+EK2V+gxFdyK91X+2YooxJtTFcq2KbDSvCK5q4ci5le6r/bMVrudb5K/QYZJAakJRNPCKsf00O1XKvZX+2YD/DFbK/QYNkTambtCpDJ7qMhmeHOLEqle4nqP+UZWUSwPZpelRXLh9ZRei1N/wCM/pEeG1LhnsJx5RqCuSPyJ+vGJCdJ9xPUfOMwbfoEp/mGrR4Ty1EpfavzELpl7DePCNaJ0r/LH14x42mV/lj9JMZNE5fuS+RB+cUrt6sTdjLU2oA+Md25eznKK4NmLxkDNKRzYepif4vZx+VHKh9IxybSvMJlo/lST0Ecl3wx709XhLQPWC6UhdcTdS71le6j64GLk3vK2SeQf0EYtN4YgCkzCd+78IIVaJgFZiv6Q3nEnB+/5HsnsjYpvhGYQP0EeqYtTfKRmEeX/GMEu+QAxExR5GsTk3yMlBXiTlAdOYPA3qb+Gw8A/oIuRfnL+gesfPZN8SnoS2tEv5xem2k1lomKGrhKa0Pz6wHCa5OtTexv/wDuAjUf0x0faBW48SkRh02yaR7BT+k+hETlXo2eNxsk/ExKWtc/yN2qfo3Qv4+95hvKJi+1ak/XKPn0y8VBWJMuYRqWB8qfQgk24kPgmDoPKJt1fZ3Yp+jefjR1V6wIq/Ve8esY+VbD+VJB4gPEROXWp6o+CTGno9cpNSY0enprg1y75V73nFS73Vv6RkV2guAZiknbAFcq0aIptuYxTCRniCUjoTHo9tDaaadrfg1/4wrL5RA3qdvL9ox8y8xlWmZdB8gaxGVeKQCSZhzoEHTZtIdUzv8AEvRr/wAV3w+Xyjn4oNh5RlpFtJGLs5gG6gB5Kyga0W8g1mtwYK8wqCoXwc+2lf8Av8mu/FkuzDoflEF3mk0+BjLpthNHWTuHSPi0cNqdTEKp9VJEFROtD0jSKtaYqXORxhCbakaN/NHUWknceMGzKRVO+y+w77RHHzjgXLfXzhP95zd6cYslz3rUeMdlD6aT4GRmSuPnEwqUd/OFSpvPrHEzq5keMdZh0U/SGyjK2PnER2Wx84RqtR3V+qJCeWd1fqMdZ+zlGk+Byex4+cLOzlbHziEy0NqfBULfvX+r9UMk/Ys6dL0ZIWxQyrTeLBal69IhKkJcd6jfD5xebRhYhIfjyMT5wjw0na7ZMXiRQp+ucdC5inKEkDkdn+ucULt4OaU8+r7xZZrSTQZH6Hp5wko2zYeMtWNR4KmULqqPrXlBGJRGY55Goi6RIQc1e0aas523j0+6kl8KwSOeVM+HzhZVE3kpGjJLx/ILKQoHEC/jBcu0rz7hBOvjxgKfd6gHxUA9Sz+kUJJQWJB8Y5pSyBNxxZoZfeJhOaQ21ItwrA9tLcVE604QnmzgajOIDGugVTjlw8I7tpgda2Ms0EuclJZS3JDgvwGjHcxNFsSNH4kPV4SSrsmPmH5wZOu9ftEuNSCOGY5vCypxezKQqSX7bDgTyWKVpHBgPNmip11xTta5noBxhQuZLQQS6nBOeVWgj8aQAwQw2/fwiPbfCLdyPLt/sZWeyqaigdWdiYsCVADvl6ZVH75QpTfcshsJH8Tl/XhHpiisYkKpsebFzzIhHCXI6lHdMZTLTPTkkq8QGiUtU13Vm+poOJ4QhwzCogE0+vjFk2yziD3nEF0rehVVzyP13tLTmoE/wgepzio3lJPsqUkqo4YtzDRnvw3+J/D1ia7qAFZgJ2HzMaOkpU4yeRZ1qlsRG6J05NUWhKgdHIpuQdeFYOlqnBGIS0zNyApRrpUcNIVWW85VnHcl9/UqL6vTYwDO+1NoJJxnhwrG3S5vCF70Ka8pP82+5ok2tnM1SZb/AMAfhrtwgebbpJHcnYXZ2SEqPiPCEh+0S1pwzQFjiPj9ZCOJNmWaOgmjZDqfrKD2tOWL8UpYg0/rdP8A4OVSEKS6lTa/xAvxr9VidkkywD2ZNNVAO+wJ56CFGBKa41KDigOgj1qvJRDSkYQKd0acTvxhcvCYdcF5SWfux5PmMaHF/N8B9VgY29CQ6mfQMT8TCGVZZkwuVgEnJ6wzlXM5AKiQMz40A2o/SGUYrdgdarP9MfuEm1ylVDOdMuUeUvQM+0DruZOeJhpn8vSLkXaEd5CqjfOtI7VBcjqNV7xS+gR2E1nUgAAO7jIByWfaB1WutMJ8DHBeExL4ypSdRoUjMeIpFtmvCQst2AHgnSC9Vr2v9Du5G9k7P5/+BUjCpJ38YrmSpnsoQC31qeMBT7UUTJaUkgLOlAK69YYJtZSyiol9jX6pAeqI0ZwnvhrewFNVRlsFn2QHqPPjFaitKQWDGg+gYYXdYwpJmKUlWEsxqchk/OOTpImKKQQkJL8NqCG1q9kBwla/2BkzWFWbkYG+8J3HQxJSq5FtdoGxJ2HlDYJOvISWezlTMdhrR96cDF05JoFEUptlE7HbUppgHluD84vtlmxgYSBmaDyprnGVzle1sGVUo6bxd2BBKRo+tdo8be1EjyiKLGpiS4wjIg1+UVT7IUkBi5Zs6uzN1jrJiuU4rCsdm21SjyyrxixFrmJNaOIjJksRiHWj8oItWApISAKv5Gg+vhDtRatYVOd9Vy+TOWujUyD70o+WsGSruCmVMoQ3doHFCC7sz7NCpFrCEsKceORMQn3ip3JzGnn5wqptbFnVj+7JoBckpYSy2wkBVQp2YEUGdM4ptsrsckhSFZHNw+41YfTwkk3gt6Nrm2uecGKvHCkJUHoR7RwnUHiQ/nCSgykasLXSsyybbCr2UgNs+2bxFCphDVKX5OeeekUotYNEpDk15Elhs0XLmzBUghyQ+jUSacH84S1sI7VfLZZ+HKUKjL5hxuzGIS7rJFVMHbwicq1E0xF2y3ap8aeW9IhNvA5ca+EC8w6aW4bZ7slpHeLnUCtHz+EFpWEuE5El/HhzIhYm1qUXBAo/pX+3GCpE6gJfXhlmD4t55xKSk9y8HFYiW/iEsA/4ZDsDtw8jFsudiUCkNTfcf2iaES3cgU5bvkenXjFU+1Swe6APD639PCeHhJlVfdtWCLQZYAxHpxETVd0hdQshxqxY6NTjGVtVqCifKrx6VeC05aZMNeY5Rs6Sg75Znq9VBYawN7bcqkg4ZgUSQANcnJPllvCedY1DyLa1D5Qys98Es4Ph8R4mLkIQsE4iC54B3GVeJjQ5zg8meVKnVzTM+hwQAHPJz0hnc9nmziVAJCQQTiAD1zDhj46xcmRKSpy9D4M9c86U5R6dfobClDCgYACgNBxoWhu7qdooFKgoZqysvS5GiblkIUP8U0oRpic1yJoNOAiM+xWcuoKIO2LfLTb165mfbFhzoTvWh8opN4E6xVU78jvqqEXp0DW22lcs0A4ED64QEu2KDkEseNOXnHFXiFe3VhFlkCVqSkJcbZk5A8svCOUVHdGepUc5eEvodkWxY1LaudOUMLJayaACKvuiW9jMBjrmwPB9orl2RUouVu22r0zeFThIvDvQtyvwjTS7vnLQQlDhSWPeSKKGjmOTfs5JlygqapaV4mUAQQASWySdG11hR/3UpAwjHtRW1IHmfaRUwkKMwjOqqUECNOpf0is+o6d/N/MutFmmoUBNThSrKoJKd6E8IslTbOgsFnGMwQqgzzwttHDOXNBJKlEBkuSojgIW4CJhBQokCtC+mcPvuRk3Td4rD5ZdZDLShS0+0D3XBbxpxiSr3Qw7wxfmGFVICNkmZ99AGaWUMXFvrKHNguATEgqKUOHdSc/Es8GUoJXYlONaUrQR6xze1BlZlRyFMmVmaD2Yj+Bq9w/qT84Z3YuTJFUoUtyQruggEAMCa79Y7+Mo2H60xmdSSfisHofDwkk5tXMMZRSyq5Aqpk5I+n1eG9jVTF7oxB+B2gWRNbulRKSzk1O9a11iAnglTUchqlhvU6ROaMFJ6M+wiTOUrEFOS702Bb5w/tEhBUkqBKqTKHYJc58ATzfWF1lnABSZgTLIYggOXBdidDTxjiLwQoDchjt7Ayfd68hEZXbujVCyXk7iu87PMM3DTvKITsyS2goKQBOCgSkioLEEEeuUav76hRlBVGck8SAH5sBrp4Qjt6DjChValFRSBkfaSATU00rpWtLUqnBkr0UvJPkWoRiqSenX4dYNRZsSHb2AScy44cHIHMvwjsyUQvHgo7qGgr3gT4n9od2BEsDCafkUl83Se9UuAVFJ4FNKUg1KllcWlSu7CuVYFLQlgAaCgFQaBXGjvqOzVR6RdZrmUtQQ7AYiXYNhzetTkW4HaGlltQT7RGVDscTKqfHziSbWgFIo63LijBTBuqTlnEe5LNkalQji7BLHYhhwLSwYEkb0BPIpy3Lcoum5AEvVzpnQmn+kRG8pxKxhd1JGvCtcsh4Qmnz1FVSK6U+HOFScsjNxhgYlA2qfKrv5esD2yUCXAOp6s7vxc8o7ZrUAxI8csjWvjHJtrBDDQN4vsKcPCOV0xZOLRTMThLYvDXx216cY9KtZq78urvxf4xZZ5Yd3ajeH03SLLRJDLpn5OXo2XtK6jaHvF4FSksoGFrKqO1R1Km+MCTJpdnrBapTN3agnU+Bo20CTZRBA+q/Q6Q6UeCMnPkqJjqpxjix4/WvSPJlkkAaxoo7mare2Cci2KSXBZtdoMTeaiGJfy0aIi7y4QWL1JTWgr3S3u8IlarsSlScKlFL1cVAfccI0OzWToqrDY7KtAOYcnKpJenX94MRdoUArIsOAHmXf45RXZ7JhwlOpJdy9DRxpl/UXjgvc6ihJ3bfXPPzjJJO/gaYuKX+UBn2JYJdyBmQ5GcVCzFRPdbNtuXR4dIvVLEKYuK8/r1ipFqST3aZM5bgz+MOqk/QjoU2/1AlnulZFEkmn9SgB5ehhhd13TETC7AHCHerLq4fNtYKss8JV3gcScLDINuSztUnMZcostVuSSWZ3/YGgzbLgIDrTl42Lw6SnHyvsUXhZhgxOcRS+FxzAIGbJCiSNtc4RzZ60ird4AimjlvSDbVaaqc1Hu7voYoBSUlzWgFBo3TKKU04rJOtPVK0GL0uS+p+jDSw3aVEFSS3Q8Dyz6RRZZIWoUYFQDvUOWJ40rpGtu6XLSgSu0ol++SHLknydvCDVqaVgPSdL3H5bC24LFaSsGVgYLq5rwjXSrLJCiZ4Pbkd/CVYdGZqeyEwDfloTKCClQ9kksWybMiMlb78UpRIL5fnJ0EQSnVyjdKVLpvFu5qL6mCWwmUUoOnWj6tFUgTbQgIl4TgD1pTKI3TasSSpYxYTmouwYanIRGdakhalJUzn8pb00gW4NFv33w/uZ+8rCsTRQNgcsa0xQD2Y2PnGvvK9pakKUMGP2QAatu4HE04Rlfvi/8xX14xpgnJGCtCNOWHe/99i3EpgSaEZcK6RYhWEeNcsiND4QPNNTz+EMrMHUp69xH/sTEbXdjCgNdoJNSTRn2bLPlEEWgt0PL6YdIEUYOQkfdSpqicA+rYCWfZ4GlIXW2FWN1lKUkBRoHoK5V0OUXSAQtLkgjvAlmYZau+Ib5VgWznuA6gljqO8pq+A6RdOLyUE54lB9WxKLPs5MTaNMHdZDZs0hSiwepoRV1Emnj8I9aZmqQA4pq5FD4/tCtJ/xE/6k+ZDxK8Syqbn1ELoyP3LJs4u0u7u70+PwiCrSWSH9kU2qf3MAg/XhE5Jr4fAxTSjN3Gx7Z5y1oJBHcSqhNcIGJTJ1oGhbMUSSczn8fSIqWQlwSCCGOooTBt9qKbQSCQTLlmlKmUhz4uYRKzKSldZKl1YJGRr575e15cYrlDekUoUWVXb0Mdeo8PhDWFvyGpUd6joRrF8mYQCWOQ6d4eqRlxgeyLLgOWKFU0owFIqCi45/J4RxuUU7ZCp88sxDEE8GL1p4+cDTpgOv1lEbQs4U1NcPkFgeQA8BAhMGMRJ1Gy4qg6zz8gQzBvJn51MKgcuUEyVFs9B6xWCyLGWR3MWXJI2y8NB9ZwvmW04lMaORtQu0CS5hfM5bxeMpnAn/AOQRWK9jVZ6l44PC3KZnozecVTl4hRgBwH99YGRpyiSDn4xRoya28MkpBD50oXzHAvrQ+ceQ43Zq19Yutij2iq5rrxq9YLuI95R1CFkHYhmI2NTE5Ssrjwp6pKIQZ60DEpsS0guCxwmo7p56CrGphbNtRLkqJJ0isrJCiST3t/4jA69INNWY1WtKSVngNVKfvPSjDicmD0q9Yts1kVRbOAVUJbIZirn+0L5Ki+e0aa7pYJlAgEFC3BDg11GsNUk4op01ONV5+X5RxExGMKwBqMAGy4DlrA9pvIAkhwOQp4QAVFl10/8AsRCu0KL5xOMFctU6uUY+KsO7bfRmYQtSlJAZmTltSBOyC1dxgCHY5gBhVuMKwo7w5sZaUDr/APqKtaY4M8JutPzGFinTUy1nH3H7wDVoOEBTr2Dkd7oIIvlRcV0+MKJWU46hNDqKjKFgk1qZpqzkpKnF7f8AGyxdpcUNYH+8GBUGIxbY82VSUnk//9k="
|
| 10 |
+
}
|
| 11 |
+
}
|