Features: - Vue 3 frontend with Three.js/Online3DViewer - Node.js API with PostgreSQL and Redis - Python worker for model conversion - Docker Compose for deployment - ViewCube navigation with drag rotation and 90° snap - Cross-section, exploded view, and render settings - Parts tree with visibility controls 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
392 lines
14 KiB
Python
392 lines
14 KiB
Python
"""3D model conversion processor with LOD support."""
|
|
|
|
import logging
|
|
from pathlib import Path
|
|
from typing import Any
|
|
|
|
import numpy as np
|
|
import trimesh
|
|
|
|
from ..config import settings
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
# LOD configuration: level -> face ratio (for non-STEP files)
|
|
LOD_LEVELS = {
|
|
0: 1.0, # LOD0: 100% faces (original)
|
|
1: 0.5, # LOD1: 50% faces
|
|
2: 0.25, # LOD2: 25% faces
|
|
}
|
|
|
|
# LOD tessellation parameters for STEP files (cascadio)
|
|
# Higher values = coarser mesh = fewer triangles
|
|
LOD_TESSELLATION = {
|
|
0: {'tol_linear': 0.01, 'tol_angular': 0.5}, # High quality (default)
|
|
1: {'tol_linear': 0.1, 'tol_angular': 1.0}, # Medium quality
|
|
2: {'tol_linear': 0.5, 'tol_angular': 2.0}, # Low quality (for preview)
|
|
}
|
|
|
|
|
|
def convert_to_glb(input_path: Path, output_path: Path, file_type: str) -> dict[str, Any]:
|
|
"""
|
|
Convert a 3D model to GLB format with LOD support.
|
|
|
|
Supports: STEP, STL, OBJ, and other formats via trimesh/cascadio.
|
|
|
|
Returns metadata about the converted model including LOD file paths.
|
|
"""
|
|
file_type = file_type.lower()
|
|
|
|
# Ensure output directory exists
|
|
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
if file_type in ('step', 'stp'):
|
|
return _convert_step(input_path, output_path)
|
|
else:
|
|
return _convert_with_trimesh(input_path, output_path, file_type)
|
|
|
|
|
|
def convert_to_glb_with_lod(input_path: Path, output_dir: Path, file_type: str, model_id: str) -> dict[str, Any]:
|
|
"""
|
|
Convert a 3D model to GLB format with multiple LOD levels.
|
|
|
|
For STEP files: Generate each LOD directly from source with different tessellation precision.
|
|
For other files: Generate LOD0 then simplify for other levels.
|
|
|
|
Args:
|
|
input_path: Path to input file
|
|
output_dir: Directory to save LOD files
|
|
file_type: File extension (step, stl, obj, etc.)
|
|
model_id: Unique model identifier for file naming
|
|
|
|
Returns:
|
|
Metadata including LOD file paths and statistics
|
|
"""
|
|
file_type = file_type.lower()
|
|
output_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
lod_files = {}
|
|
|
|
# STEP files: Generate each LOD with different tessellation precision
|
|
if file_type in ('step', 'stp'):
|
|
return _convert_step_with_lod(input_path, output_dir, model_id)
|
|
|
|
# Non-STEP files: Use post-processing simplification
|
|
return _convert_other_with_lod(input_path, output_dir, file_type, model_id)
|
|
|
|
|
|
def _convert_step_with_lod(input_path: Path, output_dir: Path, model_id: str) -> dict[str, Any]:
|
|
"""
|
|
Convert STEP file to GLB with multiple LOD levels using different tessellation precision.
|
|
|
|
This is more effective than post-processing simplification because it controls
|
|
mesh generation at the source.
|
|
"""
|
|
lod_files = {}
|
|
metadata = None
|
|
|
|
for level, params in LOD_TESSELLATION.items():
|
|
lod_path = output_dir / f"{model_id}_lod{level}.glb"
|
|
|
|
try:
|
|
level_metadata = _convert_step(
|
|
input_path,
|
|
lod_path,
|
|
tol_linear=params['tol_linear'],
|
|
tol_angular=params['tol_angular'],
|
|
)
|
|
|
|
lod_files[f'lod{level}'] = str(lod_path.name)
|
|
faces = level_metadata.get('faces', 0)
|
|
logger.info(f"Generated LOD{level} with {faces:,} faces (tol_linear={params['tol_linear']})")
|
|
|
|
# Use LOD0 metadata as the primary metadata
|
|
if level == 0:
|
|
metadata = level_metadata
|
|
|
|
except Exception as e:
|
|
logger.error(f"Failed to generate LOD{level}: {e}")
|
|
# Fall back to LOD0 if available
|
|
if 'lod0' in lod_files:
|
|
lod_files[f'lod{level}'] = lod_files['lod0']
|
|
|
|
# If LOD0 failed, raise error
|
|
if metadata is None:
|
|
raise RuntimeError("Failed to convert STEP file")
|
|
|
|
# Add LOD info to metadata
|
|
metadata['lod_files'] = lod_files
|
|
metadata['lod_levels'] = len(set(lod_files.values()))
|
|
|
|
return metadata
|
|
|
|
|
|
def _convert_other_with_lod(input_path: Path, output_dir: Path, file_type: str, model_id: str) -> dict[str, Any]:
|
|
"""
|
|
Convert non-STEP files to GLB with LOD using post-processing simplification.
|
|
"""
|
|
# LOD0 path (original quality)
|
|
lod0_path = output_dir / f"{model_id}_lod0.glb"
|
|
|
|
# Convert to LOD0
|
|
metadata = _convert_with_trimesh(input_path, lod0_path, file_type)
|
|
|
|
lod_files = {
|
|
'lod0': str(lod0_path.name),
|
|
}
|
|
|
|
# Get face count for LOD generation decision
|
|
total_faces = metadata.get('faces', 0)
|
|
|
|
# Only generate LODs if model has enough faces
|
|
if total_faces > 1000:
|
|
try:
|
|
# Generate LOD1 and LOD2 using mesh simplification
|
|
for level in [1, 2]:
|
|
lod_path = output_dir / f"{model_id}_lod{level}.glb"
|
|
ratio = LOD_LEVELS[level]
|
|
|
|
# Reload mesh fresh for each LOD level
|
|
mesh = trimesh.load(str(lod0_path))
|
|
|
|
simplified = _simplify_mesh(mesh, ratio)
|
|
if simplified is not None:
|
|
simplified.export(str(lod_path), file_type='glb')
|
|
lod_files[f'lod{level}'] = str(lod_path.name)
|
|
logger.info(f"Generated LOD{level} with {ratio*100:.0f}% faces: {lod_path.name}")
|
|
else:
|
|
logger.warning(f"Failed to generate LOD{level}, using LOD0")
|
|
lod_files[f'lod{level}'] = lod_files['lod0']
|
|
|
|
except Exception as e:
|
|
logger.warning(f"LOD generation failed: {e}, using LOD0 for all levels")
|
|
lod_files['lod1'] = lod_files['lod0']
|
|
lod_files['lod2'] = lod_files['lod0']
|
|
else:
|
|
# Small model, use LOD0 for all levels
|
|
logger.info(f"Model has {total_faces} faces, skipping LOD generation")
|
|
lod_files['lod1'] = lod_files['lod0']
|
|
lod_files['lod2'] = lod_files['lod0']
|
|
|
|
# Add LOD info to metadata
|
|
metadata['lod_files'] = lod_files
|
|
metadata['lod_levels'] = len(set(lod_files.values()))
|
|
|
|
return metadata
|
|
|
|
|
|
def _simplify_mesh(mesh: trimesh.Trimesh | trimesh.Scene, ratio: float) -> trimesh.Trimesh | trimesh.Scene | None:
|
|
"""
|
|
Simplify a mesh or scene to the target face ratio.
|
|
|
|
Args:
|
|
mesh: Trimesh mesh or scene
|
|
ratio: Target ratio of faces (0.0 - 1.0)
|
|
|
|
Returns:
|
|
Simplified mesh/scene or None if failed
|
|
"""
|
|
# Minimum reduction required (at least 10% reduction for fast_simplification to work)
|
|
MIN_REDUCTION_RATIO = 0.9
|
|
|
|
try:
|
|
if isinstance(mesh, trimesh.Scene):
|
|
# Simplify each geometry in the scene
|
|
simplified_geometries = {}
|
|
for name, geom in mesh.geometry.items():
|
|
# Skip small geometries and non-mesh objects
|
|
if not hasattr(geom, 'faces') or len(geom.faces) < 100:
|
|
simplified_geometries[name] = geom
|
|
continue
|
|
|
|
original_faces = len(geom.faces)
|
|
target_faces = max(int(original_faces * ratio), 4)
|
|
|
|
# Only simplify if we're reducing by at least 10%
|
|
# (fast_simplification requires reduction > 0)
|
|
if target_faces < original_faces * MIN_REDUCTION_RATIO:
|
|
try:
|
|
simplified = geom.simplify_quadric_decimation(target_faces)
|
|
simplified_geometries[name] = simplified
|
|
except Exception as e:
|
|
logger.warning(f"Failed to simplify geometry {name}: {e}")
|
|
simplified_geometries[name] = geom
|
|
else:
|
|
# Reduction too small, skip simplification
|
|
simplified_geometries[name] = geom
|
|
|
|
# Create new scene with simplified geometries
|
|
new_scene = trimesh.Scene()
|
|
for name, geom in simplified_geometries.items():
|
|
try:
|
|
# Get original transform if exists
|
|
node_name = None
|
|
if hasattr(mesh.graph, 'nodes_geometry'):
|
|
for item in mesh.graph.nodes_geometry:
|
|
# Handle both tuple formats: (node, geom_name) or (node, geom_name, ...)
|
|
if len(item) >= 2 and item[1] == name:
|
|
node_name = item[0]
|
|
break
|
|
|
|
if node_name:
|
|
transform = mesh.graph.get(node_name)[0]
|
|
new_scene.add_geometry(geom, node_name=node_name, geom_name=name, transform=transform)
|
|
else:
|
|
new_scene.add_geometry(geom, geom_name=name)
|
|
except Exception as e:
|
|
# If transform lookup fails, just add geometry without transform
|
|
logger.debug(f"Could not get transform for {name}: {e}")
|
|
new_scene.add_geometry(geom, geom_name=name)
|
|
|
|
return new_scene
|
|
|
|
elif hasattr(mesh, 'faces') and len(mesh.faces) >= 100:
|
|
# Single mesh simplification
|
|
original_faces = len(mesh.faces)
|
|
target_faces = max(int(original_faces * ratio), 4)
|
|
|
|
# Only simplify if we're reducing by at least 10%
|
|
if target_faces < original_faces * MIN_REDUCTION_RATIO:
|
|
return mesh.simplify_quadric_decimation(target_faces)
|
|
|
|
return mesh
|
|
|
|
except Exception as e:
|
|
logger.error(f"Mesh simplification failed: {e}")
|
|
return None
|
|
|
|
|
|
def _convert_step(
|
|
input_path: Path,
|
|
output_path: Path,
|
|
tol_linear: float = 0.01,
|
|
tol_angular: float = 0.5,
|
|
) -> dict[str, Any]:
|
|
"""Convert STEP file using cascadio with configurable tessellation precision.
|
|
|
|
Args:
|
|
input_path: Path to STEP file
|
|
output_path: Path to save GLB file
|
|
tol_linear: Linear deflection tolerance (higher = coarser mesh)
|
|
tol_angular: Angular deflection tolerance in radians (higher = coarser mesh)
|
|
|
|
Returns:
|
|
Metadata about the converted model
|
|
"""
|
|
try:
|
|
import cascadio
|
|
|
|
logger.info(f"Converting STEP file with cascadio: {input_path}")
|
|
logger.info(f"Tessellation params: tol_linear={tol_linear}, tol_angular={tol_angular}")
|
|
|
|
cascadio.step_to_glb(
|
|
str(input_path),
|
|
str(output_path),
|
|
tol_linear=tol_linear,
|
|
tol_angular=tol_angular,
|
|
)
|
|
|
|
# Load the result to get metadata
|
|
mesh = trimesh.load(str(output_path))
|
|
return _extract_metadata(mesh)
|
|
|
|
except ImportError:
|
|
logger.error("cascadio not installed, cannot convert STEP files")
|
|
raise RuntimeError("STEP conversion requires cascadio package")
|
|
except Exception as e:
|
|
logger.error(f"STEP conversion failed: {e}")
|
|
raise
|
|
|
|
|
|
def _convert_with_trimesh(input_path: Path, output_path: Path, file_type: str) -> dict[str, Any]:
|
|
"""Convert STL, OBJ, and other formats using trimesh."""
|
|
logger.info(f"Converting {file_type.upper()} file with trimesh: {input_path}")
|
|
|
|
try:
|
|
# Load the mesh
|
|
mesh = trimesh.load(str(input_path))
|
|
|
|
# Export to GLB
|
|
mesh.export(str(output_path), file_type='glb')
|
|
|
|
return _extract_metadata(mesh)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Trimesh conversion failed: {e}")
|
|
raise
|
|
|
|
|
|
def _extract_metadata(mesh: trimesh.Trimesh | trimesh.Scene) -> dict[str, Any]:
|
|
"""Extract metadata from a trimesh object."""
|
|
metadata: dict[str, Any] = {}
|
|
|
|
try:
|
|
if isinstance(mesh, trimesh.Scene):
|
|
# Scene with multiple meshes
|
|
metadata['type'] = 'scene'
|
|
metadata['parts_count'] = len(mesh.geometry)
|
|
|
|
# Aggregate stats
|
|
total_vertices = 0
|
|
total_faces = 0
|
|
|
|
for name, geom in mesh.geometry.items():
|
|
if hasattr(geom, 'vertices'):
|
|
total_vertices += len(geom.vertices)
|
|
if hasattr(geom, 'faces'):
|
|
total_faces += len(geom.faces)
|
|
|
|
metadata['vertices'] = total_vertices
|
|
metadata['faces'] = total_faces
|
|
|
|
# Bounding box
|
|
if hasattr(mesh, 'bounds') and mesh.bounds is not None:
|
|
bounds = mesh.bounds
|
|
metadata['bounding_box'] = {
|
|
'min': {'x': float(bounds[0][0]), 'y': float(bounds[0][1]), 'z': float(bounds[0][2])},
|
|
'max': {'x': float(bounds[1][0]), 'y': float(bounds[1][1]), 'z': float(bounds[1][2])},
|
|
}
|
|
|
|
# Parts info
|
|
parts = []
|
|
for name, geom in mesh.geometry.items():
|
|
part_info = {'name': name}
|
|
|
|
if hasattr(geom, 'bounds') and geom.bounds is not None:
|
|
part_bounds = geom.bounds
|
|
part_info['bounding_box'] = {
|
|
'min': {'x': float(part_bounds[0][0]), 'y': float(part_bounds[0][1]), 'z': float(part_bounds[0][2])},
|
|
'max': {'x': float(part_bounds[1][0]), 'y': float(part_bounds[1][1]), 'z': float(part_bounds[1][2])},
|
|
}
|
|
part_info['center_point'] = {
|
|
'x': float((part_bounds[0][0] + part_bounds[1][0]) / 2),
|
|
'y': float((part_bounds[0][1] + part_bounds[1][1]) / 2),
|
|
'z': float((part_bounds[0][2] + part_bounds[1][2]) / 2),
|
|
}
|
|
|
|
parts.append(part_info)
|
|
|
|
metadata['parts'] = parts
|
|
|
|
else:
|
|
# Single mesh
|
|
metadata['type'] = 'mesh'
|
|
metadata['parts_count'] = 1
|
|
|
|
if hasattr(mesh, 'vertices'):
|
|
metadata['vertices'] = len(mesh.vertices)
|
|
if hasattr(mesh, 'faces'):
|
|
metadata['faces'] = len(mesh.faces)
|
|
|
|
if hasattr(mesh, 'bounds') and mesh.bounds is not None:
|
|
bounds = mesh.bounds
|
|
metadata['bounding_box'] = {
|
|
'min': {'x': float(bounds[0][0]), 'y': float(bounds[0][1]), 'z': float(bounds[0][2])},
|
|
'max': {'x': float(bounds[1][0]), 'y': float(bounds[1][1]), 'z': float(bounds[1][2])},
|
|
}
|
|
|
|
except Exception as e:
|
|
logger.warning(f"Error extracting metadata: {e}")
|
|
|
|
return metadata
|