Skip to content

evaluation

BaseEvaluationModel(config)

Bases: ABC

Base interface for all evaluation models.

Source code in quadra/models/evaluation.py
29
30
31
32
33
def __init__(self, config: DictConfig) -> None:
    self.model: Any
    self.model_path: str | None
    self.device: str
    self.config = config

training: bool property

Return whether model is in training mode.

cpu() abstractmethod

Move model to cpu.

Source code in quadra/models/evaluation.py
55
56
57
@abstractmethod
def cpu(self):
    """Move model to cpu."""

eval() abstractmethod

Set model to evaluation mode.

Source code in quadra/models/evaluation.py
47
48
49
@abstractmethod
def eval(self):
    """Set model to evaluation mode."""

half() abstractmethod

Convert model to half precision.

Source code in quadra/models/evaluation.py
51
52
53
@abstractmethod
def half(self):
    """Convert model to half precision."""

load_from_disk(model_path, device='cpu') abstractmethod

Load model from disk.

Source code in quadra/models/evaluation.py
39
40
41
@abstractmethod
def load_from_disk(self, model_path: str, device: str = "cpu"):
    """Load model from disk."""

to(device) abstractmethod

Move model to device.

Source code in quadra/models/evaluation.py
43
44
45
@abstractmethod
def to(self, device: str):
    """Move model to device."""

ONNXEvaluationModel(config)

Bases: BaseEvaluationModel

Wrapper for ONNX models. It's designed to provide a similar interface to standard torch models.

Source code in quadra/models/evaluation.py
134
135
136
137
138
139
140
def __init__(self, config: DictConfig) -> None:
    if not ONNX_AVAILABLE:
        raise ImportError(
            "onnxruntime is not installed. Please install ONNX capabilities for quadra with: pip install .[onnx]"
        )
    super().__init__(config=config)
    self.session_options = self.generate_session_options()

__call__(*inputs)

Run inference on the model and return the output as torch tensors.

Source code in quadra/models/evaluation.py
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
def __call__(self, *inputs: np.ndarray | torch.Tensor) -> Any:
    """Run inference on the model and return the output as torch tensors."""
    # TODO: Maybe we can support also kwargs
    use_pytorch = False

    onnx_inputs: dict[str, np.ndarray | torch.Tensor] = {}

    for onnx_input, current_input in zip(self.model.get_inputs(), inputs):
        if isinstance(current_input, torch.Tensor):
            onnx_inputs[onnx_input.name] = current_input
            use_pytorch = True
        elif isinstance(current_input, np.ndarray):
            onnx_inputs[onnx_input.name] = current_input
        else:
            raise ValueError(f"Invalid input type: {type(inputs)}")

        if use_pytorch and isinstance(current_input, np.ndarray):
            raise ValueError("Cannot mix torch and numpy inputs")

    if use_pytorch:
        onnx_output = self._forward_from_pytorch(cast(dict[str, torch.Tensor], onnx_inputs))
    else:
        onnx_output = self._forward_from_numpy(cast(dict[str, np.ndarray], onnx_inputs))

    onnx_output = [torch.from_numpy(x).to(self.device) if isinstance(x, np.ndarray) else x for x in onnx_output]

    if len(onnx_output) == 1:
        onnx_output = onnx_output[0]

    return onnx_output

cpu()

Move model to cpu.

Source code in quadra/models/evaluation.py
266
267
268
def cpu(self):
    """Move model to cpu."""
    self.to("cpu")

eval()

Fake interface to match torch models.

Source code in quadra/models/evaluation.py
259
260
def eval(self):
    """Fake interface to match torch models."""

generate_session_options()

Generate session options from the current config.

Source code in quadra/models/evaluation.py
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
def generate_session_options(self) -> ort.SessionOptions:
    """Generate session options from the current config."""
    session_options = ort.SessionOptions()

    if hasattr(self.config, "session_options") and self.config.session_options is not None:
        session_options_dict = cast(
            dict[str, Any], OmegaConf.to_container(self.config.session_options, resolve=True)
        )
        for key, value in session_options_dict.items():
            if isinstance(value, dict) and "_target_" in value:
                value = instantiate(value)

            setattr(session_options, key, value)

    return session_options

half()

Convert model to half precision.

Source code in quadra/models/evaluation.py
262
263
264
def half(self):
    """Convert model to half precision."""
    raise NotImplementedError("At the moment ONNX models do not support half method.")

load_from_disk(model_path, device='cpu')

Load model from disk.

Source code in quadra/models/evaluation.py
227
228
229
230
231
232
233
def load_from_disk(self, model_path: str, device: str = "cpu"):
    """Load model from disk."""
    self.model_path = model_path
    self.device = device

    ort_providers = self._get_providers(device)
    self.model = ort.InferenceSession(self.model_path, providers=ort_providers, sess_options=self.session_options)

to(device)

Move model to device.

Source code in quadra/models/evaluation.py
253
254
255
256
257
def to(self, device: str):
    """Move model to device."""
    self.device = device
    ort_providers = self._get_providers(device)
    self.model.set_providers(ort_providers)

TorchEvaluationModel(config, model_architecture)

Bases: TorchscriptEvaluationModel

Wrapper for torch models.

Parameters:

  • model_architecture (Module) –

    Optional torch model architecture

Source code in quadra/models/evaluation.py
112
113
114
115
116
117
def __init__(self, config: DictConfig, model_architecture: nn.Module) -> None:
    super().__init__(config=config)
    self.model = model_architecture
    self.model.eval()
    device = next(self.model.parameters()).device
    self.device = str(device)

load_from_disk(model_path, device='cpu')

Load model from disk.

Source code in quadra/models/evaluation.py
122
123
124
125
126
127
128
def load_from_disk(self, model_path: str, device: str = "cpu"):
    """Load model from disk."""
    self.model_path = model_path
    self.device = device
    self.model.load_state_dict(torch.load(self.model_path))
    self.model.eval()
    self.model.to(self.device)

TorchscriptEvaluationModel

Bases: BaseEvaluationModel

Wrapper for torchscript models.

training: bool property

Return whether model is in training mode.

cpu()

Move model to cpu.

Source code in quadra/models/evaluation.py
100
101
102
def cpu(self):
    """Move model to cpu."""
    self.model.cpu()

eval()

Set model to evaluation mode.

Source code in quadra/models/evaluation.py
87
88
89
def eval(self):
    """Set model to evaluation mode."""
    self.model.eval()

half()

Convert model to half precision.

Source code in quadra/models/evaluation.py
96
97
98
def half(self):
    """Convert model to half precision."""
    self.model.half()

load_from_disk(model_path, device='cpu')

Load model from disk.

Source code in quadra/models/evaluation.py
71
72
73
74
75
76
77
78
79
80
def load_from_disk(self, model_path: str, device: str = "cpu"):
    """Load model from disk."""
    self.model_path = model_path
    self.device = device

    model = cast(RecursiveScriptModule, torch.jit.load(self.model_path))
    model.eval()
    model.to(self.device)

    self.model = model

to(device)

Move model to device.

Source code in quadra/models/evaluation.py
82
83
84
85
def to(self, device: str):
    """Move model to device."""
    self.model.to(device)
    self.device = device