File size: 1,553 Bytes
590a604
 
 
 
 
 
 
 
ee1a8a3
1fbc47b
 
 
 
 
 
 
 
a18e93d
1fbc47b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
"""
FastAPI dependency providers for LexiMind.

Manages lazy initialization and caching of the inference pipeline.

Author: Oliver Perrin
Date: December 2025
"""

from __future__ import annotations

from functools import lru_cache
from pathlib import Path

from fastapi import HTTPException, status

from ..utils.logging import get_logger

logger = get_logger(__name__)

from ..inference.factory import create_inference_pipeline
from ..inference.pipeline import InferencePipeline


@lru_cache(maxsize=1)
def get_pipeline() -> InferencePipeline:
    """Lazily construct and cache the inference pipeline for the API."""

    checkpoint = Path("checkpoints/best.pt")
    labels = Path("artifacts/labels.json")
    model_config = Path("configs/model/base.yaml")

    try:
        pipeline, _ = create_inference_pipeline(
            checkpoint_path=checkpoint,
            labels_path=labels,
            model_config_path=model_config,
        )
    except FileNotFoundError as exc:
        logger.exception("Pipeline initialization failed: missing artifact")
        raise HTTPException(
            status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
            detail="Service temporarily unavailable",
        ) from exc
    except Exception as exc:  # noqa: BLE001 - surface initialization issues to the caller
        logger.exception("Pipeline initialization failed")
        raise HTTPException(
            status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
            detail="Service temporarily unavailable",
        ) from exc
    return pipeline