sam3d-body-mcp / app.py
dev-bjoern's picture
Fix MCP config: use url instead of npx mcp-remote
9c10761
"""
SAM 3D Body MCP Server
Image β†’ 3D Human Mesh (GLB)
"""
import os
import sys
import subprocess
import tempfile
import uuid
from pathlib import Path
import gradio as gr
import numpy as np
import spaces
from huggingface_hub import snapshot_download, login
# Login with HF_TOKEN if available
if os.environ.get("HF_TOKEN"):
login(token=os.environ.get("HF_TOKEN"))
from PIL import Image
# Clone sam-3d-body repo if not exists
SAM3D_PATH = Path("/home/user/app/sam-3d-body")
if not SAM3D_PATH.exists():
print("Cloning sam-3d-body repository...")
subprocess.run([
"git", "clone",
"https://github.com/facebookresearch/sam-3d-body.git",
str(SAM3D_PATH)
], check=True)
sys.path.insert(0, str(SAM3D_PATH))
# Add to path
sys.path.insert(0, str(SAM3D_PATH))
# Global model
MODEL = None
FACES = None
def load_model():
"""Load SAM 3D Body model"""
global MODEL, FACES
if MODEL is not None:
return MODEL, FACES
import torch
print("Loading SAM 3D Body model...")
# Download checkpoint
checkpoint_dir = snapshot_download(
repo_id="facebook/sam-3d-body-dinov3",
token=os.environ.get("HF_TOKEN")
)
from sam_3d_body import load_sam_3d_body, SAM3DBodyEstimator
device = "cuda" if torch.cuda.is_available() else "cpu"
model, model_cfg = load_sam_3d_body(
checkpoint_path=f"{checkpoint_dir}/model.ckpt",
device=device,
mhr_path=f"{checkpoint_dir}/assets/mhr_model.pt"
)
MODEL = SAM3DBodyEstimator(
sam_3d_body_model=model,
model_cfg=model_cfg,
)
FACES = MODEL.faces
print("βœ“ Model loaded")
return MODEL, FACES
@spaces.GPU(duration=120)
def reconstruct_body(image: np.ndarray) -> tuple:
"""
Reconstruct 3D body mesh from image.
Args:
image: Input RGB image
Returns:
tuple: (glb_path, status)
"""
if image is None:
return None, "❌ No image provided"
try:
import torch
import trimesh
estimator, faces = load_model()
# Process image
if isinstance(image, Image.Image):
image = np.array(image)
# BGR for OpenCV
import cv2
img_bgr = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
outputs = estimator.process_one_image(img_bgr, bbox_thr=0.5)
if not outputs:
return None, "⚠️ No humans detected"
# Export first person as GLB via trimesh
person = outputs[0]
vertices = person["pred_vertices"]
# Create trimesh mesh
mesh = trimesh.Trimesh(vertices=vertices, faces=faces)
# Rotate 180Β° around X-axis to fix upside-down orientation
rotation = trimesh.transformations.rotation_matrix(np.pi, [1, 0, 0])
mesh.apply_transform(rotation)
# Save GLB
output_dir = tempfile.mkdtemp()
glb_path = f"{output_dir}/body_{uuid.uuid4().hex[:8]}.glb"
mesh.export(glb_path, file_type='glb')
return glb_path, f"βœ“ Reconstructed {len(outputs)} person(s)"
except Exception as e:
import traceback
traceback.print_exc()
return None, f"❌ Error: {e}"
# Gradio Interface
with gr.Blocks(title="SAM 3D Body MCP") as demo:
gr.Markdown("# 🧍 SAM 3D Body MCP Server\n**Image β†’ 3D Human Mesh (GLB)**")
with gr.Row():
with gr.Column():
input_image = gr.Image(label="Input Image", type="numpy")
btn = gr.Button("🎯 Reconstruct", variant="primary")
with gr.Column():
output_model = gr.Model3D(label="3D Preview")
output_file = gr.File(label="Download GLB")
status = gr.Textbox(label="Status")
btn.click(reconstruct_body, inputs=[input_image], outputs=[output_model, status])
output_model.change(lambda x: x, inputs=[output_model], outputs=[output_file])
gr.Markdown("""
---
### MCP Server
```json
{
"mcpServers": {
"sam3d-body": {
"url": "https://dev-bjoern-sam3d-body-mcp.hf.space/gradio_api/mcp/sse"
}
}
}
```
""")
if __name__ == "__main__":
demo.launch(mcp_server=True)