From 9d29d8f147487e4494f5a3ce0165fba6d6aa52ca Mon Sep 17 00:00:00 2001 From: rozyczko Date: Mon, 13 Apr 2026 13:59:25 +0200 Subject: [PATCH 01/11] initial commit --- src/easyscience/fitting/fitter.py | 2 + .../fitting/minimizers/minimizer_base.py | 1 + .../fitting/minimizers/minimizer_bumps.py | 1 + .../fitting/minimizers/minimizer_dfo.py | 1 + .../fitting/minimizers/minimizer_lmfit.py | 58 ++++++++- src/easyscience/fitting/minimizers/utils.py | 5 + src/easyscience/fitting/multi_fitter.py | 4 + tests/conftest.py | 15 +++ .../fitting/minimizers/test_minimizer_base.py | 4 + .../minimizers/test_minimizer_lmfit.py | 111 +++++++++++++++--- tests/unit/fitting/test_fitter.py | 23 ++++ tests/unit/fitting/test_multi_fitter.py | 63 ++++++++++ 12 files changed, 265 insertions(+), 23 deletions(-) create mode 100644 tests/conftest.py create mode 100644 tests/unit/fitting/test_multi_fitter.py diff --git a/src/easyscience/fitting/fitter.py b/src/easyscience/fitting/fitter.py index 70524996..823b6cf9 100644 --- a/src/easyscience/fitting/fitter.py +++ b/src/easyscience/fitting/fitter.py @@ -209,6 +209,7 @@ def inner_fit_callable( y: np.ndarray, weights: Optional[np.ndarray] = None, vectorized: bool = False, + progress_callback: Optional[Callable[[dict], Optional[bool]]] = None, **kwargs, ) -> FitResults: """This is a wrapped callable which performs the actual @@ -237,6 +238,7 @@ def inner_fit_callable( weights=weights, tolerance=self._tolerance, max_evaluations=self._max_evaluations, + progress_callback=progress_callback, **kwargs, ) diff --git a/src/easyscience/fitting/minimizers/minimizer_base.py b/src/easyscience/fitting/minimizers/minimizer_base.py index b4539ac4..4182248d 100644 --- a/src/easyscience/fitting/minimizers/minimizer_base.py +++ b/src/easyscience/fitting/minimizers/minimizer_base.py @@ -69,6 +69,7 @@ def fit( method: Optional[str] = None, tolerance: Optional[float] = None, max_evaluations: Optional[int] = None, + progress_callback: Optional[Callable[[dict], Optional[bool]]] = None, **kwargs, ) -> FitResults: """Perform a fit using the engine. diff --git a/src/easyscience/fitting/minimizers/minimizer_bumps.py b/src/easyscience/fitting/minimizers/minimizer_bumps.py index 37f8873e..d3ffa7e3 100644 --- a/src/easyscience/fitting/minimizers/minimizer_bumps.py +++ b/src/easyscience/fitting/minimizers/minimizer_bumps.py @@ -75,6 +75,7 @@ def fit( method: Optional[str] = None, tolerance: Optional[float] = None, max_evaluations: Optional[int] = None, + progress_callback: Optional[Callable[[dict], Optional[bool]]] = None, minimizer_kwargs: Optional[dict] = None, engine_kwargs: Optional[dict] = None, **kwargs, diff --git a/src/easyscience/fitting/minimizers/minimizer_dfo.py b/src/easyscience/fitting/minimizers/minimizer_dfo.py index a480c823..696d8db9 100644 --- a/src/easyscience/fitting/minimizers/minimizer_dfo.py +++ b/src/easyscience/fitting/minimizers/minimizer_dfo.py @@ -64,6 +64,7 @@ def fit( method: str = None, tolerance: Optional[float] = None, max_evaluations: Optional[int] = None, + progress_callback: Optional[Callable[[dict], Optional[bool]]] = None, **kwargs, ) -> FitResults: """Perform a fit using the DFO-ls engine. diff --git a/src/easyscience/fitting/minimizers/minimizer_lmfit.py b/src/easyscience/fitting/minimizers/minimizer_lmfit.py index 4a8104b2..7cd0af49 100644 --- a/src/easyscience/fitting/minimizers/minimizer_lmfit.py +++ b/src/easyscience/fitting/minimizers/minimizer_lmfit.py @@ -19,6 +19,7 @@ from .minimizer_base import MINIMIZER_PARAMETER_PREFIX from .minimizer_base import MinimizerBase from .utils import FitError +from .utils import FitCancelled from .utils import FitResults @@ -86,6 +87,7 @@ def fit( method: Optional[str] = None, tolerance: Optional[float] = None, max_evaluations: Optional[int] = None, + progress_callback: Optional[Callable[[dict], Optional[bool]]] = None, minimizer_kwargs: Optional[dict] = None, engine_kwargs: Optional[dict] = None, **kwargs, @@ -144,11 +146,13 @@ def fit( if model is None: model = self._make_model() + iter_cb = self._create_iter_callback(progress_callback) model_results = model.fit( y, x=x, weights=weights, max_nfev=max_evaluations, + iter_cb=iter_cb, fit_kws=fit_kws_dict, **method_kwargs, **engine_kwargs, @@ -156,15 +160,59 @@ def fit( ) self._set_parameter_fit_result(model_results, stack_status) results = self._gen_fit_results(model_results) + except FitCancelled: + self._restore_parameter_values() + raise except Exception as e: - for key in self._cached_pars.keys(): - self._cached_pars[key].value = self._cached_pars_vals[key][0] + self._restore_parameter_values() raise FitError(e) return results - def _get_fit_kws( - self, method: str, tolerance: float, minimizer_kwargs: dict[str:str] - ) -> dict[str:str]: + def _create_iter_callback( + self, + progress_callback: Optional[Callable[[dict], Optional[bool]]], + ) -> Optional[Callable]: + if progress_callback is None: + return None + + def iter_cb(params, iteration: int, residuals: np.ndarray, *args, **kwargs) -> bool: + payload = self._build_progress_payload(params, iteration, residuals) + should_continue = progress_callback(payload) + if should_continue is False: + raise FitCancelled() + return False + + return iter_cb + + def _restore_parameter_values(self) -> None: + for key in self._cached_pars.keys(): + self._cached_pars[key].value = self._cached_pars_vals[key][0] + + def _build_progress_payload(self, params, iteration: int, residuals: np.ndarray) -> dict: + residual_array = np.asarray(residuals) + chi2 = float(np.square(residual_array).sum()) + varied_parameter_count = sum(1 for parameter in params.values() if getattr(parameter, 'vary', False)) + degrees_of_freedom = residual_array.size - varied_parameter_count + reduced_chi2 = chi2 / degrees_of_freedom if degrees_of_freedom > 0 else chi2 + + parameter_values = {} + for parameter_name, parameter in self._cached_pars.items(): + lmfit_parameter_name = f'{MINIMIZER_PARAMETER_PREFIX}{parameter_name}' + if lmfit_parameter_name in params: + parameter_values[parameter_name] = float(params[lmfit_parameter_name].value) + else: + parameter_values[parameter_name] = float(parameter.value) + + return { + 'iteration': int(iteration), + 'chi2': chi2, + 'reduced_chi2': reduced_chi2, + 'parameter_values': parameter_values, + 'refresh_plots': False, + 'finished': False, + } + + def _get_fit_kws(self, method: str, tolerance: float, minimizer_kwargs: dict[str:str]) -> dict[str:str]: if minimizer_kwargs is None: minimizer_kwargs = {} if tolerance is not None: diff --git a/src/easyscience/fitting/minimizers/utils.py b/src/easyscience/fitting/minimizers/utils.py index 76449a17..23226706 100644 --- a/src/easyscience/fitting/minimizers/utils.py +++ b/src/easyscience/fitting/minimizers/utils.py @@ -64,3 +64,8 @@ def __str__(self) -> str: if self.e is not None: s = f'{self.e}\n' return s + 'Something has gone wrong with the fit' + + +class FitCancelled(Exception): + def __str__(self) -> str: + return 'Fit cancelled by progress callback' diff --git a/src/easyscience/fitting/multi_fitter.py b/src/easyscience/fitting/multi_fitter.py index 94e715c6..b296cea1 100644 --- a/src/easyscience/fitting/multi_fitter.py +++ b/src/easyscience/fitting/multi_fitter.py @@ -18,6 +18,10 @@ class MultiFitter(Fitter): We can fit these types of data simultaneously: - Multiple models on multiple datasets. + + The inherited ``fit`` wrapper from ``Fitter`` is used unchanged, + including support for forwarding progress callbacks to the active + minimizer. """ def __init__( diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..e217f34e --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,15 @@ +# SPDX-FileCopyrightText: 2026 EasyScience contributors +# SPDX-License-Identifier: BSD-3-Clause + +from __future__ import annotations + +import sys +from pathlib import Path + + +PROJECT_ROOT = Path(__file__).resolve().parents[1] +SRC_ROOT = PROJECT_ROOT / 'src' + +src_root_str = str(SRC_ROOT) +if src_root_str not in sys.path: + sys.path.insert(0, src_root_str) \ No newline at end of file diff --git a/tests/unit/fitting/minimizers/test_minimizer_base.py b/tests/unit/fitting/minimizers/test_minimizer_base.py index 86ec4292..2411998d 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_base.py +++ b/tests/unit/fitting/minimizers/test_minimizer_base.py @@ -10,6 +10,7 @@ from easyscience import Parameter from easyscience.fitting.minimizers.minimizer_base import MinimizerBase +from easyscience.fitting.minimizers.utils import FitCancelled from easyscience.fitting.minimizers.utils import FitError @@ -213,3 +214,6 @@ def test_get_method_dict_not_supported_method(self, minimizer: MinimizerBase) -> # Then Expect with pytest.raises(FitError): result = minimizer._get_method_kwargs('not_supported_method') + + def test_fit_cancelled_string(self) -> None: + assert str(FitCancelled()) == 'Fit cancelled by progress callback' diff --git a/tests/unit/fitting/minimizers/test_minimizer_lmfit.py b/tests/unit/fitting/minimizers/test_minimizer_lmfit.py index ac280873..d9cbe944 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_lmfit.py +++ b/tests/unit/fitting/minimizers/test_minimizer_lmfit.py @@ -10,6 +10,7 @@ import easyscience.fitting.minimizers.minimizer_lmfit from easyscience import Parameter from easyscience.fitting.minimizers.minimizer_lmfit import LMFit +from easyscience.fitting.minimizers.utils import FitCancelled from easyscience.fitting.minimizers.utils import FitError @@ -128,9 +129,7 @@ def test_fit(self, minimizer: LMFit) -> None: # Expect assert result == 'gen_fit_results' - mock_model.fit.assert_called_once_with( - 2.0, x=1.0, weights=1, max_nfev=None, fit_kws={}, method='leastsq' - ) + mock_model.fit.assert_called_once_with(2.0, x=1.0, weights=1, max_nfev=None, iter_cb=None, fit_kws={}, method='leastsq') minimizer._make_model.assert_called_once_with() minimizer._set_parameter_fit_result.assert_called_once_with('fit', False) minimizer._gen_fit_results.assert_called_once_with('fit') @@ -147,9 +146,7 @@ def test_fit_model(self, minimizer: LMFit) -> None: minimizer.fit(x=1.0, y=2.0, weights=1, model=mock_model) # Expect - mock_model.fit.assert_called_once_with( - 2.0, x=1.0, weights=1, max_nfev=None, fit_kws={}, method='leastsq' - ) + mock_model.fit.assert_called_once_with(2.0, x=1.0, weights=1, max_nfev=None, iter_cb=None, fit_kws={}, method='leastsq') minimizer._make_model.assert_not_called() def test_fit_method(self, minimizer: LMFit) -> None: @@ -166,9 +163,7 @@ def test_fit_method(self, minimizer: LMFit) -> None: minimizer.fit(x=1.0, y=2.0, weights=1, method='method_passed') # Expect - mock_model.fit.assert_called_once_with( - 2.0, x=1.0, weights=1, max_nfev=None, fit_kws={}, method='method_passed' - ) + mock_model.fit.assert_called_once_with(2.0, x=1.0, weights=1, max_nfev=None, iter_cb=None, fit_kws={}, method='method_passed') minimizer.supported_methods.assert_called_once_with() def test_fit_kwargs(self, minimizer: LMFit) -> None: @@ -189,15 +184,95 @@ def test_fit_kwargs(self, minimizer: LMFit) -> None: ) # Expect - mock_model.fit.assert_called_once_with( - 2.0, - x=1.0, - weights=1, - max_nfev=None, - fit_kws={'minimizer_key': 'minimizer_val'}, - method='leastsq', - engine_key='engine_val', - ) + mock_model.fit.assert_called_once_with(2.0, x=1.0, weights=1, max_nfev=None, iter_cb=None, fit_kws={'minimizer_key': 'minimizer_val'}, method='leastsq', engine_key='engine_val') + + def test_fit_progress_callback(self, minimizer: LMFit) -> None: + # When + progress_callback = MagicMock(return_value=True) + mock_model = MagicMock() + mock_model.fit = MagicMock(return_value='fit') + minimizer._make_model = MagicMock(return_value=mock_model) + minimizer._set_parameter_fit_result = MagicMock() + minimizer._gen_fit_results = MagicMock(return_value='gen_fit_results') + + # Then + minimizer.fit(x=1.0, y=2.0, weights=1, progress_callback=progress_callback) + + # Expect + assert mock_model.fit.call_count == 1 + iter_cb = mock_model.fit.call_args.kwargs['iter_cb'] + assert callable(iter_cb) + + def test_create_iter_callback_no_callback(self, minimizer: LMFit) -> None: + # When Then Expect + assert minimizer._create_iter_callback(None) is None + + def test_create_iter_callback_abort(self, minimizer: LMFit) -> None: + # When + progress_callback = MagicMock(return_value=False) + iter_cb = minimizer._create_iter_callback(progress_callback) + + # Then Expect + with pytest.raises(FitCancelled): + iter_cb(MagicMock(), 5, np.array([1.0, -2.0])) + progress_callback.assert_called_once() + + def test_fit_cancellation_restores_parameter_values(self, minimizer: LMFit) -> None: + # When + from easyscience import global_object + + global_object.stack.enabled = False + + parameter = MagicMock(Parameter) + parameter.value = 10.0 + minimizer._cached_pars = {'alpha': parameter} + minimizer._cached_pars_vals = {'alpha': (1.0, None)} + + mock_model = MagicMock() + mock_lm_parameter = MagicMock() + mock_lm_parameter.value = 2.0 + mock_lm_parameter.vary = True + + def fit_side_effect(*args, **kwargs): + kwargs['iter_cb']({'palpha': mock_lm_parameter}, 1, np.array([1.0, -2.0])) + + mock_model.fit.side_effect = fit_side_effect + minimizer._make_model = MagicMock(return_value=mock_model) + + # Then Expect + with pytest.raises(FitCancelled): + minimizer.fit(x=1.0, y=2.0, weights=1, progress_callback=MagicMock(return_value=False)) + + assert parameter.value == 1.0 + + def test_build_progress_payload(self, minimizer: LMFit) -> None: + # When + parameter_a = MagicMock(Parameter) + parameter_a.value = 1.5 + parameter_b = MagicMock(Parameter) + parameter_b.value = 2.5 + minimizer._cached_pars = {'alpha': parameter_a, 'beta': parameter_b} + + mock_param_alpha = MagicMock() + mock_param_alpha.value = 1.0 + mock_param_alpha.vary = True + mock_param_beta = MagicMock() + mock_param_beta.value = 2.0 + mock_param_beta.vary = False + params = {'palpha': mock_param_alpha, 'pbeta': mock_param_beta} + + # Then + payload = minimizer._build_progress_payload(params, 7, np.array([3.0, 4.0])) + + # Expect + assert payload == { + 'iteration': 7, + 'chi2': 25.0, + 'reduced_chi2': 25.0, + 'parameter_values': {'alpha': 1.0, 'beta': 2.0}, + 'refresh_plots': False, + 'finished': False, + } def test_fit_exception(self, minimizer: LMFit) -> None: # When diff --git a/tests/unit/fitting/test_fitter.py b/tests/unit/fitting/test_fitter.py index 73f5a12a..4207d02f 100644 --- a/tests/unit/fitting/test_fitter.py +++ b/tests/unit/fitting/test_fitter.py @@ -212,6 +212,29 @@ def test_fit(self, fitter: Fitter): assert fitter._dependent_dims == 'dims' assert fitter._fit_function == self.mock_fit_function + def test_fit_progress_callback(self, fitter: Fitter): + # When + fitter._precompute_reshaping = MagicMock(return_value=('x_fit', 'x_new', 'y_new', 'weights', 'dims')) + fitter._fit_function_wrapper = MagicMock(return_value='wrapped_fit_function') + fitter._post_compute_reshaping = MagicMock(return_value='fit_result') + fitter._minimizer = MagicMock() + fitter._minimizer.fit = MagicMock(return_value='result') + progress_callback = MagicMock() + + # Then + result = fitter.fit('x', 'y', 'weights', 'vectorized', progress_callback=progress_callback) + + # Expect + assert result == 'fit_result' + fitter._minimizer.fit.assert_called_once_with( + 'x_fit', + 'y_new', + weights='weights', + tolerance=None, + max_evaluations=None, + progress_callback=progress_callback, + ) + def test_post_compute_reshaping(self, fitter: Fitter): # When fit_result = MagicMock() diff --git a/tests/unit/fitting/test_multi_fitter.py b/tests/unit/fitting/test_multi_fitter.py new file mode 100644 index 00000000..6e722c24 --- /dev/null +++ b/tests/unit/fitting/test_multi_fitter.py @@ -0,0 +1,63 @@ +# SPDX-FileCopyrightText: 2026 EasyScience contributors +# SPDX-License-Identifier: BSD-3-Clause + +from unittest.mock import MagicMock + +import pytest + +from easyscience import ObjBase +from easyscience import Parameter +from easyscience.fitting.fitter import Fitter +from easyscience.fitting.multi_fitter import MultiFitter + + +class Line(ObjBase): + m: Parameter + c: Parameter + + def __init__(self, m_val: float, c_val: float): + m = Parameter('m', m_val) + c = Parameter('c', c_val) + super().__init__('line', m=m, c=c) + + def __call__(self, x): + return self.m.value * x + self.c.value + + +class TestMultiFitter: + @pytest.fixture + def multi_fitter(self, monkeypatch): + monkeypatch.setattr(Fitter, '_update_minimizer', MagicMock()) + fit_object_1 = Line(1.0, 0.5) + fit_object_2 = Line(2.0, 1.5) + return MultiFitter([fit_object_1, fit_object_2], [fit_object_1, fit_object_2]) + + def test_fit_progress_callback(self, multi_fitter: MultiFitter): + # When + multi_fitter._precompute_reshaping = MagicMock( + return_value=('x_fit', 'x_new', 'y_new', 'weights', 'dims') + ) + multi_fitter._fit_function_wrapper = MagicMock(return_value='wrapped_fit_function') + multi_fitter._post_compute_reshaping = MagicMock(return_value='fit_result') + multi_fitter._minimizer = MagicMock() + multi_fitter._minimizer.fit = MagicMock(return_value='result') + progress_callback = MagicMock() + + # Then + result = multi_fitter.fit( + ['x_1', 'x_2'], + ['y_1', 'y_2'], + ['weights_1', 'weights_2'], + progress_callback=progress_callback, + ) + + # Expect + assert result == 'fit_result' + multi_fitter._minimizer.fit.assert_called_once_with( + 'x_fit', + 'y_new', + weights='weights', + tolerance=None, + max_evaluations=None, + progress_callback=progress_callback, + ) \ No newline at end of file From 2f66a466807b977a46e6e85911062a9c8b404aaa Mon Sep 17 00:00:00 2001 From: rozyczko Date: Mon, 13 Apr 2026 14:00:14 +0200 Subject: [PATCH 02/11] ruff --- .../fitting/minimizers/minimizer_lmfit.py | 10 +++++--- tests/conftest.py | 3 +-- .../minimizers/test_minimizer_lmfit.py | 23 +++++++++++++++---- tests/unit/fitting/test_fitter.py | 4 +++- tests/unit/fitting/test_multi_fitter.py | 2 +- 5 files changed, 31 insertions(+), 11 deletions(-) diff --git a/src/easyscience/fitting/minimizers/minimizer_lmfit.py b/src/easyscience/fitting/minimizers/minimizer_lmfit.py index 7cd0af49..28938323 100644 --- a/src/easyscience/fitting/minimizers/minimizer_lmfit.py +++ b/src/easyscience/fitting/minimizers/minimizer_lmfit.py @@ -18,8 +18,8 @@ from ..available_minimizers import AvailableMinimizers from .minimizer_base import MINIMIZER_PARAMETER_PREFIX from .minimizer_base import MinimizerBase -from .utils import FitError from .utils import FitCancelled +from .utils import FitError from .utils import FitResults @@ -191,7 +191,9 @@ def _restore_parameter_values(self) -> None: def _build_progress_payload(self, params, iteration: int, residuals: np.ndarray) -> dict: residual_array = np.asarray(residuals) chi2 = float(np.square(residual_array).sum()) - varied_parameter_count = sum(1 for parameter in params.values() if getattr(parameter, 'vary', False)) + varied_parameter_count = sum( + 1 for parameter in params.values() if getattr(parameter, 'vary', False) + ) degrees_of_freedom = residual_array.size - varied_parameter_count reduced_chi2 = chi2 / degrees_of_freedom if degrees_of_freedom > 0 else chi2 @@ -212,7 +214,9 @@ def _build_progress_payload(self, params, iteration: int, residuals: np.ndarray) 'finished': False, } - def _get_fit_kws(self, method: str, tolerance: float, minimizer_kwargs: dict[str:str]) -> dict[str:str]: + def _get_fit_kws( + self, method: str, tolerance: float, minimizer_kwargs: dict[str:str] + ) -> dict[str:str]: if minimizer_kwargs is None: minimizer_kwargs = {} if tolerance is not None: diff --git a/tests/conftest.py b/tests/conftest.py index e217f34e..2f9a6768 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -6,10 +6,9 @@ import sys from pathlib import Path - PROJECT_ROOT = Path(__file__).resolve().parents[1] SRC_ROOT = PROJECT_ROOT / 'src' src_root_str = str(SRC_ROOT) if src_root_str not in sys.path: - sys.path.insert(0, src_root_str) \ No newline at end of file + sys.path.insert(0, src_root_str) diff --git a/tests/unit/fitting/minimizers/test_minimizer_lmfit.py b/tests/unit/fitting/minimizers/test_minimizer_lmfit.py index d9cbe944..82fd6648 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_lmfit.py +++ b/tests/unit/fitting/minimizers/test_minimizer_lmfit.py @@ -129,7 +129,9 @@ def test_fit(self, minimizer: LMFit) -> None: # Expect assert result == 'gen_fit_results' - mock_model.fit.assert_called_once_with(2.0, x=1.0, weights=1, max_nfev=None, iter_cb=None, fit_kws={}, method='leastsq') + mock_model.fit.assert_called_once_with( + 2.0, x=1.0, weights=1, max_nfev=None, iter_cb=None, fit_kws={}, method='leastsq' + ) minimizer._make_model.assert_called_once_with() minimizer._set_parameter_fit_result.assert_called_once_with('fit', False) minimizer._gen_fit_results.assert_called_once_with('fit') @@ -146,7 +148,9 @@ def test_fit_model(self, minimizer: LMFit) -> None: minimizer.fit(x=1.0, y=2.0, weights=1, model=mock_model) # Expect - mock_model.fit.assert_called_once_with(2.0, x=1.0, weights=1, max_nfev=None, iter_cb=None, fit_kws={}, method='leastsq') + mock_model.fit.assert_called_once_with( + 2.0, x=1.0, weights=1, max_nfev=None, iter_cb=None, fit_kws={}, method='leastsq' + ) minimizer._make_model.assert_not_called() def test_fit_method(self, minimizer: LMFit) -> None: @@ -163,7 +167,9 @@ def test_fit_method(self, minimizer: LMFit) -> None: minimizer.fit(x=1.0, y=2.0, weights=1, method='method_passed') # Expect - mock_model.fit.assert_called_once_with(2.0, x=1.0, weights=1, max_nfev=None, iter_cb=None, fit_kws={}, method='method_passed') + mock_model.fit.assert_called_once_with( + 2.0, x=1.0, weights=1, max_nfev=None, iter_cb=None, fit_kws={}, method='method_passed' + ) minimizer.supported_methods.assert_called_once_with() def test_fit_kwargs(self, minimizer: LMFit) -> None: @@ -184,7 +190,16 @@ def test_fit_kwargs(self, minimizer: LMFit) -> None: ) # Expect - mock_model.fit.assert_called_once_with(2.0, x=1.0, weights=1, max_nfev=None, iter_cb=None, fit_kws={'minimizer_key': 'minimizer_val'}, method='leastsq', engine_key='engine_val') + mock_model.fit.assert_called_once_with( + 2.0, + x=1.0, + weights=1, + max_nfev=None, + iter_cb=None, + fit_kws={'minimizer_key': 'minimizer_val'}, + method='leastsq', + engine_key='engine_val', + ) def test_fit_progress_callback(self, minimizer: LMFit) -> None: # When diff --git a/tests/unit/fitting/test_fitter.py b/tests/unit/fitting/test_fitter.py index 4207d02f..702f5e59 100644 --- a/tests/unit/fitting/test_fitter.py +++ b/tests/unit/fitting/test_fitter.py @@ -214,7 +214,9 @@ def test_fit(self, fitter: Fitter): def test_fit_progress_callback(self, fitter: Fitter): # When - fitter._precompute_reshaping = MagicMock(return_value=('x_fit', 'x_new', 'y_new', 'weights', 'dims')) + fitter._precompute_reshaping = MagicMock( + return_value=('x_fit', 'x_new', 'y_new', 'weights', 'dims') + ) fitter._fit_function_wrapper = MagicMock(return_value='wrapped_fit_function') fitter._post_compute_reshaping = MagicMock(return_value='fit_result') fitter._minimizer = MagicMock() diff --git a/tests/unit/fitting/test_multi_fitter.py b/tests/unit/fitting/test_multi_fitter.py index 6e722c24..a54a927b 100644 --- a/tests/unit/fitting/test_multi_fitter.py +++ b/tests/unit/fitting/test_multi_fitter.py @@ -60,4 +60,4 @@ def test_fit_progress_callback(self, multi_fitter: MultiFitter): tolerance=None, max_evaluations=None, progress_callback=progress_callback, - ) \ No newline at end of file + ) From 2766be88fdfaf6fd723434bf15a923a31d5007bd Mon Sep 17 00:00:00 2001 From: rozyczko Date: Tue, 14 Apr 2026 13:06:17 +0200 Subject: [PATCH 03/11] updated BUMPS --- .../fitting/minimizers/minimizer_base.py | 4 + .../fitting/minimizers/minimizer_bumps.py | 153 ++++++-- .../fitting/minimizers/minimizer_lmfit.py | 4 - .../minimizers/test_minimizer_bumps.py | 348 +++++++++++++++++- 4 files changed, 457 insertions(+), 52 deletions(-) diff --git a/src/easyscience/fitting/minimizers/minimizer_base.py b/src/easyscience/fitting/minimizers/minimizer_base.py index 4182248d..67e12abd 100644 --- a/src/easyscience/fitting/minimizers/minimizer_base.py +++ b/src/easyscience/fitting/minimizers/minimizer_base.py @@ -58,6 +58,10 @@ def enum(self) -> AvailableMinimizers: def name(self) -> str: return self._minimizer_enum.name + def _restore_parameter_values(self) -> None: + for key in self._cached_pars.keys(): + self._cached_pars[key].value = self._cached_pars_vals[key][0] + @abstractmethod def fit( self, diff --git a/src/easyscience/fitting/minimizers/minimizer_bumps.py b/src/easyscience/fitting/minimizers/minimizer_bumps.py index d3ffa7e3..858492ba 100644 --- a/src/easyscience/fitting/minimizers/minimizer_bumps.py +++ b/src/easyscience/fitting/minimizers/minimizer_bumps.py @@ -8,7 +8,10 @@ import numpy as np from bumps.fitters import FIT_AVAILABLE_IDS -from bumps.fitters import fit as bumps_fit +from bumps.fitters import FITTERS +from bumps.fitters import FitDriver +from bumps.mapper import SerialMapper +from bumps.monitor import Monitor from bumps.names import Curve from bumps.names import FitProblem from bumps.parameter import Parameter as BumpsParameter @@ -20,6 +23,7 @@ from ..available_minimizers import AvailableMinimizers from .minimizer_base import MINIMIZER_PARAMETER_PREFIX from .minimizer_base import MinimizerBase +from .utils import FitCancelled from .utils import FitError from .utils import FitResults @@ -28,6 +32,28 @@ FIT_AVAILABLE_IDS_FILTERED.remove('pt') +class _BumpsProgressMonitor(Monitor): + def __init__(self, owner, problem, callback): + self._owner = owner + self._problem = problem + self._callback = callback + self.cancel_requested = False + + def config_history(self, history): + history.requires(step=1, point=1, value=1) + + def __call__(self, history): + payload = self._owner._build_progress_payload( + problem=self._problem, + iteration=int(history.step[0]), + point=np.asarray(history.point[0]), + nllf=float(history.value[0]), + ) + should_continue = self._callback(payload) + if should_continue is False: + self.cancel_requested = True + + class Bumps(MinimizerBase): """ This is a wrapper to Bumps: https://bumps.readthedocs.io/ @@ -80,7 +106,7 @@ def fit( engine_kwargs: Optional[dict] = None, **kwargs, ) -> FitResults: - """Perform a fit using the lmfit engine. + """Perform a fit using the BUMPS engine. :param x: points to be calculated at :type x: np.ndarray @@ -89,17 +115,14 @@ def fit( :param weights: Weights for supplied measured points :type weights: np.ndarray :param model: Optional Model which is being fitted to - :type model: lmModel :param parameters: Optional parameters for the fit :type parameters: List[BumpsParameter] - :param kwargs: Additional arguments for the fitting function. :param method: Method for minimization :type method: str + :param progress_callback: Optional callback for progress updates + :type progress_callback: Callable :return: Fit results - :rtype: ModelResult For standard least squares, the weights - should be 1/sigma, where sigma is the standard deviation of - the measurement. For unweighted least squares, these should - be 1. + :rtype: FitResults """ method_dict = self._get_method_kwargs(method) @@ -125,10 +148,8 @@ def fit( minimizer_kwargs.update(engine_kwargs) if tolerance is not None: - minimizer_kwargs['ftol'] = tolerance # tolerance for change in function value - minimizer_kwargs['xtol'] = ( - tolerance # tolerance for change in parameter value, could be an independent value - ) + minimizer_kwargs['ftol'] = tolerance + minimizer_kwargs['xtol'] = tolerance if max_evaluations is not None: minimizer_kwargs['steps'] = max_evaluations @@ -140,6 +161,29 @@ def fit( self._p_0 = {f'p{key}': self._cached_pars[key].value for key in self._cached_pars.keys()} problem = FitProblem(model) + + method_str = method_dict.get('method', self._method) + fitclass = self._resolve_fitclass(method_str) + + monitors = [] + progress_monitor = None + if progress_callback is not None: + progress_monitor = _BumpsProgressMonitor(self, problem, progress_callback) + monitors.append(progress_monitor) + + abort_test = (lambda: progress_monitor.cancel_requested) if progress_monitor else None + + mapper = SerialMapper.start_mapper(problem, []) + driver = FitDriver( + fitclass=fitclass, + problem=problem, + monitors=monitors, + abort_test=abort_test, + mapper=mapper, + **minimizer_kwargs, + ) + driver.clip() + # Why do we do this? Because a fitting template has to have global_object instantiated outside pre-runtime from easyscience import global_object @@ -147,15 +191,56 @@ def fit( global_object.stack.enabled = False try: - model_results = bumps_fit(problem, **method_dict, **minimizer_kwargs, **kwargs) - self._set_parameter_fit_result(model_results, stack_status, problem._parameters) - results = self._gen_fit_results(model_results) + x_result, fx = driver.fit() + if progress_monitor is not None and progress_monitor.cancel_requested: + raise FitCancelled() + self._set_parameter_fit_result(x_result, driver, stack_status, problem._parameters) + results = self._gen_fit_results(x_result, fx, driver) + except FitCancelled: + self._restore_parameter_values() + raise except Exception as e: - for key in self._cached_pars.keys(): - self._cached_pars[key].value = self._cached_pars_vals[key][0] + self._restore_parameter_values() raise FitError(e) return results + @staticmethod + def _resolve_fitclass(method: str): + for fitclass in FITTERS: + if fitclass.id == method: + return fitclass + raise FitError(f'Unknown BUMPS fitting method: {method}') + + def _build_progress_payload( + self, problem, iteration: int, point: np.ndarray, nllf: float + ) -> dict: + # Use the nllf already computed by the fitter to avoid a costly + # model re-evaluation. For Gaussian likelihoods: + # nllf = sum(residuals**2) / 2 => chi2 = 2 * nllf + chi2 = 2.0 * nllf + dof = problem.dof + reduced_chi2 = chi2 / dof if dof > 0 else chi2 + + parameter_values = self._current_parameter_snapshot(problem, point) + + return { + 'iteration': iteration, + 'chi2': chi2, + 'reduced_chi2': reduced_chi2, + 'parameter_values': parameter_values, + 'refresh_plots': False, + 'finished': False, + } + + def _current_parameter_snapshot(self, problem, point: np.ndarray) -> dict: + labels = problem.labels() + values = problem.getp() if point is None else point + snapshot = {} + for label, value in zip(labels, values): + dict_name = label[len(MINIMIZER_PARAMETER_PREFIX) :] + snapshot[dict_name] = float(value) + return snapshot + def convert_to_pars_obj(self, par_list: Optional[List] = None) -> List[BumpsParameter]: """Create a container with the `Parameters` converted from the base object. @@ -223,18 +308,24 @@ def _make_func(x, y, weights): return _outer(self) def _set_parameter_fit_result( - self, fit_result, stack_status: bool, par_list: List[BumpsParameter] + self, + x_result: np.ndarray, + driver: FitDriver, + stack_status: bool, + par_list: List[BumpsParameter], ): """Update parameters to their final values and assign a std error to them. - :param fit_result: Fit object which contains info on the fit - :return: None - :rtype: noneType + :param x_result: Optimized parameter values from FitDriver + :param driver: The FitDriver instance (provides stderr) + :param stack_status: Whether the undo stack was enabled + :param par_list: List of BUMPS parameter objects """ from easyscience import global_object pars = self._cached_pars + stderr = driver.stderr() if stack_status: for name in pars.keys(): @@ -245,15 +336,19 @@ def _set_parameter_fit_result( for index, name in enumerate([par.name for par in par_list]): dict_name = name[len(MINIMIZER_PARAMETER_PREFIX) :] - pars[dict_name].value = fit_result.x[index] - pars[dict_name].error = fit_result.dx[index] + pars[dict_name].value = x_result[index] + pars[dict_name].error = stderr[index] if stack_status: global_object.stack.endMacro() - def _gen_fit_results(self, fit_results, **kwargs) -> FitResults: + def _gen_fit_results( + self, x_result: np.ndarray, fx: float, driver: FitDriver, **kwargs + ) -> FitResults: """Convert fit results into the unified `FitResults` format. - :param fit_result: Fit object which contains info on the fit + :param x_result: Optimized parameter values from FitDriver + :param fx: Final objective function value + :param driver: The FitDriver instance :return: fit results container :rtype: FitResults """ @@ -262,12 +357,11 @@ def _gen_fit_results(self, fit_results, **kwargs) -> FitResults: for name, value in kwargs.items(): if getattr(results, name, False): setattr(results, name, value) - results.success = fit_results.success + results.success = True pars = self._cached_pars item = {} for index, name in enumerate(self._cached_model.pars.keys()): dict_name = name[len(MINIMIZER_PARAMETER_PREFIX) :] - item[name] = pars[dict_name].value results.p0 = self._p_0 @@ -276,10 +370,7 @@ def _gen_fit_results(self, fit_results, **kwargs) -> FitResults: results.y_obs = self._cached_model.y results.y_calc = self.evaluate(results.x, minimizer_parameters=results.p) results.y_err = self._cached_model.dy - # results.residual = results.y_obs - results.y_calc - # results.goodness_of_fit = np.sum(results.residual**2) results.minimizer_engine = self.__class__ results.fit_args = None - results.engine_result = fit_results - # results.check_sanity() + results.engine_result = driver return results diff --git a/src/easyscience/fitting/minimizers/minimizer_lmfit.py b/src/easyscience/fitting/minimizers/minimizer_lmfit.py index 28938323..1a5d379a 100644 --- a/src/easyscience/fitting/minimizers/minimizer_lmfit.py +++ b/src/easyscience/fitting/minimizers/minimizer_lmfit.py @@ -184,10 +184,6 @@ def iter_cb(params, iteration: int, residuals: np.ndarray, *args, **kwargs) -> b return iter_cb - def _restore_parameter_values(self) -> None: - for key in self._cached_pars.keys(): - self._cached_pars[key].value = self._cached_pars_vals[key][0] - def _build_progress_payload(self, params, iteration: int, residuals: np.ndarray) -> dict: residual_array = np.asarray(residuals) chi2 = float(np.square(residual_array).sum()) diff --git a/tests/unit/fitting/minimizers/test_minimizer_bumps.py b/tests/unit/fitting/minimizers/test_minimizer_bumps.py index ba86b4d0..fd393144 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_bumps.py +++ b/tests/unit/fitting/minimizers/test_minimizer_bumps.py @@ -1,13 +1,17 @@ # SPDX-FileCopyrightText: 2026 EasyScience contributors # SPDX-License-Identifier: BSD-3-Clause +from unittest.mock import ANY from unittest.mock import MagicMock +from unittest.mock import patch import numpy as np import pytest import easyscience.fitting.minimizers.minimizer_bumps from easyscience.fitting.minimizers.minimizer_bumps import Bumps +from easyscience.fitting.minimizers.minimizer_bumps import _BumpsProgressMonitor +from easyscience.fitting.minimizers.utils import FitCancelled from easyscience.fitting.minimizers.utils import FitError @@ -47,9 +51,20 @@ def test_fit(self, minimizer: Bumps, monkeypatch) -> None: global_object.stack.enabled = False - mock_bumps_fit = MagicMock(return_value='fit') + # Mock FitDriver + mock_driver_instance = MagicMock() + mock_driver_instance.fit.return_value = (np.array([42.0]), 0.5) + mock_driver_instance.stderr.return_value = np.array([0.1]) + mock_driver_instance.clip = MagicMock() + mock_FitDriver = MagicMock(return_value=mock_driver_instance) monkeypatch.setattr( - easyscience.fitting.minimizers.minimizer_bumps, 'bumps_fit', mock_bumps_fit + easyscience.fitting.minimizers.minimizer_bumps, 'FitDriver', mock_FitDriver + ) + + mock_SerialMapper = MagicMock() + mock_SerialMapper.start_mapper = MagicMock(return_value='mapper') + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, 'SerialMapper', mock_SerialMapper ) # Prepare a mock parameter with .name = 'pmock_parm_1' @@ -72,24 +87,29 @@ def test_fit(self, minimizer: Bumps, monkeypatch) -> None: cached_par.value = 1 cached_pars = {'mock_parm_1': cached_par} minimizer._cached_pars = cached_pars + minimizer._cached_pars_vals = {'mock_parm_1': (1, 0.0)} - # Patch _set_parameter_fit_result to a real function that will not raise KeyError - def fake_set_parameter_fit_result(fit_result, stack_status, par_list): - # Simulate what the real function does: update _cached_pars + # Patch _set_parameter_fit_result + def fake_set_parameter_fit_result(x_result, driver, stack_status, par_list): for index, name in enumerate([par.name for par in par_list]): - dict_name = name[len('p') :] # Remove prefix 'p' - minimizer._cached_pars[dict_name].value = 42 # Arbitrary value + dict_name = name[len('p') :] + minimizer._cached_pars[dict_name].value = x_result[index] minimizer._set_parameter_fit_result = fake_set_parameter_fit_result + mock_fitclass = MagicMock() + mock_fitclass.id = 'amoeba' + minimizer._resolve_fitclass = MagicMock(return_value=mock_fitclass) + # Then result = minimizer.fit(x=1.0, y=2.0, weights=1) # Expect assert result == 'gen_fit_results' - mock_bumps_fit.assert_called_once_with(mock_FitProblem_instance, method='amoeba') + mock_FitDriver.assert_called_once() + mock_driver_instance.clip.assert_called_once() + mock_driver_instance.fit.assert_called_once() minimizer._make_model.assert_called_once_with(parameters=None) - minimizer._gen_fit_results.assert_called_once_with('fit') mock_model_function.assert_called_once_with(1.0, 2.0, 1) mock_FitProblem.assert_called_once_with(mock_model) @@ -148,9 +168,9 @@ def test_set_parameter_fit_result_no_stack_status(self, minimizer: Bumps): mock_cached_model.pars = {'pa': 0, 'pb': 0} minimizer._cached_model = mock_cached_model - mock_fit_result = MagicMock() - mock_fit_result.x = [1.0, 2.0] - mock_fit_result.dx = [0.1, 0.2] + x_result = np.array([1.0, 2.0]) + mock_driver = MagicMock() + mock_driver.stderr.return_value = np.array([0.1, 0.2]) # The new argument: par_list (list of mock parameters) mock_par_a = MagicMock() @@ -160,7 +180,7 @@ def test_set_parameter_fit_result_no_stack_status(self, minimizer: Bumps): par_list = [mock_par_a, mock_par_b] # Then - minimizer._set_parameter_fit_result(mock_fit_result, False, par_list) + minimizer._set_parameter_fit_result(x_result, mock_driver, False, par_list) # Expect assert minimizer._cached_pars['a'].value == 1.0 @@ -176,8 +196,9 @@ def test_gen_fit_results(self, minimizer: Bumps, monkeypatch): easyscience.fitting.minimizers.minimizer_bumps, 'FitResults', mock_FitResults ) - mock_fit_result = MagicMock() - mock_fit_result.success = True + x_result = np.array([1.0, 2.0]) + fx = 0.5 + mock_driver = MagicMock() mock_cached_model = MagicMock() mock_cached_model.x = 'x' @@ -197,13 +218,13 @@ def test_gen_fit_results(self, minimizer: Bumps, monkeypatch): # Then domain_fit_results = minimizer._gen_fit_results( - mock_fit_result, **{'kwargs_set_key': 'kwargs_set_val'} + x_result, fx, mock_driver, **{'kwargs_set_key': 'kwargs_set_val'} ) # Expect assert domain_fit_results == mock_domain_fit_results assert domain_fit_results.kwargs_set_key == 'kwargs_set_val' - assert domain_fit_results.success == True + assert domain_fit_results.success is True assert domain_fit_results.y_obs == 'y' assert domain_fit_results.x == 'x' assert domain_fit_results.p == {'ppar_1': 'par_value_1', 'ppar_2': 'par_value_2'} @@ -215,6 +236,299 @@ def test_gen_fit_results(self, minimizer: Bumps, monkeypatch): == "" ) assert domain_fit_results.fit_args is None + assert domain_fit_results.engine_result == mock_driver minimizer.evaluate.assert_called_once_with( 'x', minimizer_parameters={'ppar_1': 'par_value_1', 'ppar_2': 'par_value_2'} ) + + def test_resolve_fitclass_valid(self, minimizer: Bumps) -> None: + # When Then + fitclass = Bumps._resolve_fitclass('lm') + + # Expect + assert fitclass.id == 'lm' + + def test_resolve_fitclass_invalid(self, minimizer: Bumps) -> None: + # When Then Expect + with pytest.raises(FitError): + Bumps._resolve_fitclass('nonexistent_method') + + def test_fit_progress_callback(self, minimizer: Bumps, monkeypatch) -> None: + # When + from easyscience import global_object + + global_object.stack.enabled = False + + progress_callback = MagicMock(return_value=True) + + mock_driver_instance = MagicMock() + mock_driver_instance.fit.return_value = (np.array([42.0]), 0.5) + mock_driver_instance.stderr.return_value = np.array([0.1]) + mock_driver_instance.clip = MagicMock() + mock_FitDriver = MagicMock(return_value=mock_driver_instance) + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, 'FitDriver', mock_FitDriver + ) + + mock_SerialMapper = MagicMock() + mock_SerialMapper.start_mapper = MagicMock(return_value='mapper') + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, 'SerialMapper', mock_SerialMapper + ) + + mock_bumps_param = MagicMock() + mock_bumps_param.name = 'pmock_parm_1' + mock_FitProblem_instance = MagicMock() + mock_FitProblem_instance._parameters = [mock_bumps_param] + mock_FitProblem = MagicMock(return_value=mock_FitProblem_instance) + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, 'FitProblem', mock_FitProblem + ) + + mock_model = MagicMock() + mock_model_function = MagicMock(return_value=mock_model) + minimizer._make_model = MagicMock(return_value=mock_model_function) + minimizer._set_parameter_fit_result = MagicMock() + minimizer._gen_fit_results = MagicMock(return_value='gen_fit_results') + + cached_par = MagicMock() + cached_par.value = 1 + minimizer._cached_pars = {'mock_parm_1': cached_par} + minimizer._cached_pars_vals = {'mock_parm_1': (1, 0.0)} + + minimizer._resolve_fitclass = MagicMock(return_value=MagicMock(id='amoeba')) + + # Then + result = minimizer.fit(x=1.0, y=2.0, weights=1, progress_callback=progress_callback) + + # Expect - FitDriver was called with a monitor list containing our monitor + assert result == 'gen_fit_results' + driver_call_kwargs = mock_FitDriver.call_args + monitors = driver_call_kwargs.kwargs.get('monitors', driver_call_kwargs[1].get('monitors')) + assert len(monitors) == 1 + assert isinstance(monitors[0], _BumpsProgressMonitor) + + def test_fit_cancellation_restores_parameter_values( + self, minimizer: Bumps, monkeypatch + ) -> None: + # When + from easyscience import global_object + + global_object.stack.enabled = False + + from easyscience.variable import Parameter + + parameter = MagicMock(Parameter) + parameter.value = 10.0 + minimizer._cached_pars = {'alpha': parameter} + minimizer._cached_pars_vals = {'alpha': (1.0, None)} + + mock_driver_instance = MagicMock() + + def fit_side_effect(*args, **kwargs): + # Simulate the monitor triggering cancellation + raise FitCancelled() + + mock_driver_instance.fit.side_effect = fit_side_effect + mock_driver_instance.clip = MagicMock() + mock_FitDriver = MagicMock(return_value=mock_driver_instance) + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, 'FitDriver', mock_FitDriver + ) + + mock_SerialMapper = MagicMock() + mock_SerialMapper.start_mapper = MagicMock(return_value='mapper') + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, 'SerialMapper', mock_SerialMapper + ) + + mock_FitProblem_instance = MagicMock() + mock_FitProblem_instance._parameters = [] + mock_FitProblem = MagicMock(return_value=mock_FitProblem_instance) + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, 'FitProblem', mock_FitProblem + ) + + mock_model = MagicMock() + mock_model_function = MagicMock(return_value=mock_model) + minimizer._make_model = MagicMock(return_value=mock_model_function) + minimizer._resolve_fitclass = MagicMock(return_value=MagicMock(id='amoeba')) + + # Then Expect + with pytest.raises(FitCancelled): + minimizer.fit(x=1.0, y=2.0, weights=1, progress_callback=MagicMock(return_value=False)) + + assert parameter.value == 1.0 + + def test_build_progress_payload(self, minimizer: Bumps) -> None: + # When + mock_problem = MagicMock() + mock_problem.dof = 2 + mock_problem.labels.return_value = ['palpha', 'pbeta'] + mock_problem.getp.return_value = np.array([1.0, 2.0]) + + point = np.array([1.0, 2.0]) + nllf = 12.5 # chi2 = 2 * 12.5 = 25.0; reduced = 25.0 / 2 = 12.5 + + # Then + payload = minimizer._build_progress_payload(mock_problem, 7, point, nllf) + + # Expect + assert payload == { + 'iteration': 7, + 'chi2': 25.0, + 'reduced_chi2': 12.5, + 'parameter_values': {'alpha': 1.0, 'beta': 2.0}, + 'refresh_plots': False, + 'finished': False, + } + # setp should NOT be called – the monitor avoids model re-evaluation + mock_problem.setp.assert_not_called() + + def test_build_progress_payload_keys_match_lmfit(self, minimizer: Bumps) -> None: + # When + mock_problem = MagicMock() + mock_problem.dof = 2 + mock_problem.labels.return_value = ['pa'] + mock_problem.getp.return_value = np.array([5.0]) + + minimizer._cached_pars = {'a': MagicMock(value=5.0)} + + # Then + payload = minimizer._build_progress_payload(mock_problem, 1, np.array([5.0]), nllf=5.0) + + # Expect - same keys as LMFit payload + expected_keys = { + 'iteration', + 'chi2', + 'reduced_chi2', + 'parameter_values', + 'refresh_plots', + 'finished', + } + assert set(payload.keys()) == expected_keys + assert isinstance(payload['iteration'], int) + assert isinstance(payload['chi2'], float) + assert isinstance(payload['reduced_chi2'], float) + assert isinstance(payload['parameter_values'], dict) + assert payload['refresh_plots'] is False + assert payload['finished'] is False + + def test_build_progress_payload_reduced_chi2_positive_dof(self, minimizer: Bumps) -> None: + # When - dof = 2, nllf = 5.0 -> chi2 = 10.0 -> reduced = 5.0 + mock_problem = MagicMock() + mock_problem.dof = 2 + mock_problem.labels.return_value = ['pa'] + mock_problem.getp.return_value = np.array([5.0]) + + minimizer._cached_pars = {'a': MagicMock(value=5.0)} + + # Then + payload = minimizer._build_progress_payload(mock_problem, 1, np.array([5.0]), nllf=5.0) + + # Expect + assert payload['chi2'] == 10.0 # 2 * nllf + assert payload['reduced_chi2'] == 5.0 # 10.0 / 2 + + def test_current_parameter_snapshot(self, minimizer: Bumps) -> None: + # When + mock_problem = MagicMock() + mock_problem.labels.return_value = ['palpha', 'pbeta'] + + point = np.array([1.5, 2.5]) + + # Then + snapshot = minimizer._current_parameter_snapshot(mock_problem, point) + + # Expect + assert snapshot == {'alpha': 1.5, 'beta': 2.5} + + def test_bumps_progress_monitor_calls_callback(self, minimizer: Bumps) -> None: + # When + callback = MagicMock(return_value=True) + mock_problem = MagicMock() + + monitor = _BumpsProgressMonitor(minimizer, mock_problem, callback) + minimizer._build_progress_payload = MagicMock(return_value={'iteration': 1}) + + mock_history = MagicMock() + mock_history.step = [5] + mock_history.point = [np.array([1.0])] + mock_history.value = [42.0] + + # Then + monitor(mock_history) + + # Expect + assert not monitor.cancel_requested + callback.assert_called_once_with({'iteration': 1}) + minimizer._build_progress_payload.assert_called_once_with( + problem=mock_problem, + iteration=5, + point=ANY, + nllf=42.0, + ) + + def test_bumps_progress_monitor_cancel(self, minimizer: Bumps) -> None: + # When + callback = MagicMock(return_value=False) + mock_problem = MagicMock() + + monitor = _BumpsProgressMonitor(minimizer, mock_problem, callback) + minimizer._build_progress_payload = MagicMock(return_value={'iteration': 1}) + + mock_history = MagicMock() + mock_history.step = [5] + mock_history.point = [np.array([1.0])] + mock_history.value = [42.0] + + # Then + monitor(mock_history) + + # Expect + assert monitor.cancel_requested is True + + def test_fit_exception_restores_values(self, minimizer: Bumps, monkeypatch) -> None: + # When + from easyscience import global_object + + global_object.stack.enabled = False + + from easyscience.variable import Parameter + + parameter = MagicMock(Parameter) + parameter.value = 10.0 + minimizer._cached_pars = {'alpha': parameter} + minimizer._cached_pars_vals = {'alpha': (1.0, None)} + + mock_driver_instance = MagicMock() + mock_driver_instance.fit.side_effect = RuntimeError('something broke') + mock_driver_instance.clip = MagicMock() + mock_FitDriver = MagicMock(return_value=mock_driver_instance) + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, 'FitDriver', mock_FitDriver + ) + + mock_SerialMapper = MagicMock() + mock_SerialMapper.start_mapper = MagicMock(return_value='mapper') + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, 'SerialMapper', mock_SerialMapper + ) + + mock_FitProblem_instance = MagicMock() + mock_FitProblem_instance._parameters = [] + mock_FitProblem = MagicMock(return_value=mock_FitProblem_instance) + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, 'FitProblem', mock_FitProblem + ) + + mock_model = MagicMock() + mock_model_function = MagicMock(return_value=mock_model) + minimizer._make_model = MagicMock(return_value=mock_model_function) + minimizer._resolve_fitclass = MagicMock(return_value=MagicMock(id='amoeba')) + + # Then Expect + with pytest.raises(FitError): + minimizer.fit(x=1.0, y=2.0, weights=1) + + assert parameter.value == 1.0 From 92036d44d5885e6ac5a7ad85355bb55469f0ee70 Mon Sep 17 00:00:00 2001 From: rozyczko Date: Wed, 15 Apr 2026 14:24:07 +0200 Subject: [PATCH 04/11] added DFO changes, streamlined implementation --- .../fitting/minimizers/minimizer_bumps.py | 38 +- .../fitting/minimizers/minimizer_dfo.py | 168 ++++++++- .../fitting/minimizers/minimizer_lmfit.py | 2 + src/easyscience/fitting/minimizers/utils.py | 39 ++- src/easyscience/fitting/multi_fitter.py | 2 + tests/integration/fitting/test_fitter.py | 56 ++- .../integration/fitting/test_multi_fitter.py | 46 ++- .../minimizers/test_minimizer_bumps.py | 6 +- .../fitting/minimizers/test_minimizer_dfo.py | 326 +++++++++++++++++- 9 files changed, 650 insertions(+), 33 deletions(-) diff --git a/src/easyscience/fitting/minimizers/minimizer_bumps.py b/src/easyscience/fitting/minimizers/minimizer_bumps.py index 858492ba..756182f9 100644 --- a/src/easyscience/fitting/minimizers/minimizer_bumps.py +++ b/src/easyscience/fitting/minimizers/minimizer_bumps.py @@ -32,6 +32,19 @@ FIT_AVAILABLE_IDS_FILTERED.remove('pt') +class _StepCounterMonitor(Monitor): + """Lightweight monitor that ensures step count is recorded in history.""" + + def __init__(self): + self.last_step = 0 + + def config_history(self, history): + history.requires(step=1) + + def __call__(self, history): + self.last_step = int(history.step[0]) + + class _BumpsProgressMonitor(Monitor): def __init__(self, owner, problem, callback): self._owner = owner @@ -165,7 +178,8 @@ def fit( method_str = method_dict.get('method', self._method) fitclass = self._resolve_fitclass(method_str) - monitors = [] + step_counter = _StepCounterMonitor() + monitors = [step_counter] progress_monitor = None if progress_callback is not None: progress_monitor = _BumpsProgressMonitor(self, problem, progress_callback) @@ -195,7 +209,9 @@ def fit( if progress_monitor is not None and progress_monitor.cancel_requested: raise FitCancelled() self._set_parameter_fit_result(x_result, driver, stack_status, problem._parameters) - results = self._gen_fit_results(x_result, fx, driver) + results = self._gen_fit_results( + x_result, fx, driver, step_counter.last_step, max_evaluations + ) except FitCancelled: self._restore_parameter_values() raise @@ -342,13 +358,21 @@ def _set_parameter_fit_result( global_object.stack.endMacro() def _gen_fit_results( - self, x_result: np.ndarray, fx: float, driver: FitDriver, **kwargs + self, + x_result: np.ndarray, + fx: float, + driver: FitDriver, + n_evaluations: int = 0, + max_evaluations: Optional[int] = None, + **kwargs, ) -> FitResults: """Convert fit results into the unified `FitResults` format. :param x_result: Optimized parameter values from FitDriver :param fx: Final objective function value :param driver: The FitDriver instance + :param n_evaluations: Number of iterations completed + :param max_evaluations: Maximum evaluations budget (if set) :return: fit results container :rtype: FitResults """ @@ -357,7 +381,13 @@ def _gen_fit_results( for name, value in kwargs.items(): if getattr(results, name, False): setattr(results, name, value) - results.success = True + results.n_evaluations = n_evaluations + if max_evaluations is not None and n_evaluations >= max_evaluations - 1: + results.success = False + results.message = f'Maximum number of evaluations ({max_evaluations}) reached' + else: + results.success = True + results.message = 'Optimization terminated successfully' pars = self._cached_pars item = {} for index, name in enumerate(self._cached_model.pars.keys()): diff --git a/src/easyscience/fitting/minimizers/minimizer_dfo.py b/src/easyscience/fitting/minimizers/minimizer_dfo.py index 696d8db9..2a778465 100644 --- a/src/easyscience/fitting/minimizers/minimizer_dfo.py +++ b/src/easyscience/fitting/minimizers/minimizer_dfo.py @@ -1,6 +1,7 @@ # SPDX-FileCopyrightText: 2026 EasyScience contributors # SPDX-License-Identifier: BSD-3-Clause +from dataclasses import dataclass from typing import Callable from typing import Dict from typing import List @@ -20,6 +21,21 @@ from .utils import FitResults +@dataclass(frozen=True) +class DFOCallbackState: + """Snapshot of a DFO objective evaluation.""" + + evaluation: int + xk: np.ndarray + residuals: np.ndarray + objective: float + parameters: dict[str, float] + best_xk: np.ndarray + best_objective: float + best_parameters: dict[str, float] + improved: bool + + class DFO(MinimizerBase): """ This is a wrapper to Derivative Free Optimisation for Least Square: https://numericalalgorithmsgroup.github.io/dfols/ @@ -65,6 +81,9 @@ def fit( tolerance: Optional[float] = None, max_evaluations: Optional[int] = None, progress_callback: Optional[Callable[[dict], Optional[bool]]] = None, + callback: Optional[Callable[[DFOCallbackState], None]] = None, + callback_every: int = 1, + callback_on_improvement_only: bool = False, **kwargs, ) -> FitResults: """Perform a fit using the DFO-ls engine. @@ -102,9 +121,32 @@ def fit( if (weights <= 0).any(): raise ValueError('Weights must be strictly positive and non-zero.') + if callback_every < 1: + raise ValueError('callback_every must be a positive integer.') + + # Bridge progress_callback into the DFO callback mechanism + if progress_callback is not None and callback is None: + dof = max(len(x) - len(self._cached_pars), 1) + callback = self._make_progress_adapter(progress_callback, dof) + if callback_every == 1: + callback_every = 1 # keep default; adapter fires every evaluation + if model is None: - model_function = self._make_model(parameters=parameters) + model_function = self._make_model( + parameters=parameters, + callback=callback, + callback_every=callback_every, + callback_on_improvement_only=callback_on_improvement_only, + ) model = model_function(x, y, weights) + elif callback is not None: + model = self._wrap_model_with_callback( + model, + self._get_callback_parameter_names(parameters), + callback, + callback_every, + callback_on_improvement_only, + ) self._cached_model = model self._cached_model.x = x self._cached_model.y = y @@ -123,6 +165,10 @@ def fit( model_results = self._dfo_fit(self._cached_pars, model, **kwargs) self._set_parameter_fit_result(model_results, stack_status) results = self._gen_fit_results(model_results, weights) + except FitError: + for key in self._cached_pars.keys(): + self._cached_pars[key].value = self._cached_pars_vals[key][0] + raise except Exception as e: for key in self._cached_pars.keys(): self._cached_pars[key].value = self._cached_pars_vals[key][0] @@ -138,7 +184,13 @@ def convert_to_par_object(obj) -> None: """Required by interface but not needed for DFO-LS.""" pass - def _make_model(self, parameters: Optional[List[Parameter]] = None) -> Callable: + def _make_model( + self, + parameters: Optional[List[Parameter]] = None, + callback: Optional[Callable[[DFOCallbackState], None]] = None, + callback_every: int = 1, + callback_on_improvement_only: bool = False, + ) -> Callable: """Generate a model from the supplied `fit_function` and parameters in the base object. Note that this makes a callable as it needs to be initialized with *x*, *y*, *weights* @@ -163,12 +215,111 @@ def _residuals(pars_values: List[float]) -> np.ndarray: dfo_pars[par_name] = pars_values[idx] return (y - fit_func(x, **dfo_pars)) * weights - return _residuals + return obj._wrap_model_with_callback( + _residuals, + list(dfo_pars.keys()), + callback, + callback_every, + callback_on_improvement_only, + ) return _make_func return _outer(self) + def _get_callback_parameter_names( + self, parameters: Optional[List[Parameter]] = None + ) -> list[str]: + if parameters is not None: + return [MINIMIZER_PARAMETER_PREFIX + parameter.unique_name for parameter in parameters] + return [MINIMIZER_PARAMETER_PREFIX + name for name in self._cached_pars.keys()] + + @staticmethod + def _wrap_model_with_callback( + model: Callable, + parameter_names: list[str], + callback: Optional[Callable[[DFOCallbackState], None]], + callback_every: int, + callback_on_improvement_only: bool, + ) -> Callable: + if callback is None: + return model + + evaluation = 0 + best_objective = np.inf + best_xk = np.array([], dtype=float) + best_parameters: dict[str, float] = {} + + def wrapped_model(pars_values: List[float]) -> np.ndarray: + nonlocal evaluation, best_objective, best_xk, best_parameters + + residuals = np.asarray(model(pars_values), dtype=float) + xk = np.asarray(pars_values, dtype=float).copy() + parameters = {name: value for name, value in zip(parameter_names, xk)} + objective = float(np.dot(residuals.ravel(), residuals.ravel())) + + evaluation += 1 + improved = objective < best_objective + if improved: + best_objective = objective + best_xk = xk.copy() + best_parameters = parameters.copy() + + should_notify = evaluation % callback_every == 0 + if callback_on_improvement_only: + should_notify = should_notify and improved + + if should_notify: + callback( + DFOCallbackState( + evaluation=evaluation, + xk=xk, + residuals=residuals.copy(), + objective=objective, + parameters=parameters, + best_xk=best_xk.copy(), + best_objective=best_objective, + best_parameters=best_parameters.copy(), + improved=improved, + ) + ) + + return residuals + + return wrapped_model + + @staticmethod + def _make_progress_adapter( + progress_callback: Callable[[dict], Optional[bool]], + dof: int, + ) -> Callable[['DFOCallbackState'], None]: + """Create a DFO callback that translates DFOCallbackState into + the standard progress_callback dict format used by the GUI. + + :param progress_callback: Standard progress callback (dict -> bool|None) + :param dof: Degrees of freedom for reduced chi2 calculation + :return: DFO-compatible callback + """ + + def adapter(state: 'DFOCallbackState') -> None: + chi2 = state.best_objective + reduced_chi2 = chi2 / dof if dof > 0 else chi2 + param_snapshot = { + name[len(MINIMIZER_PARAMETER_PREFIX) :]: float(val) + for name, val in state.best_parameters.items() + } + payload = { + 'iteration': state.evaluation, + 'chi2': chi2, + 'reduced_chi2': reduced_chi2, + 'parameter_values': param_snapshot, + 'refresh_plots': False, + 'finished': False, + } + progress_callback(payload) + + return adapter + def _set_parameter_fit_result(self, fit_result, stack_status, ci: float = 0.95) -> None: """Update parameters to their final values and assign a std error to them. @@ -209,7 +360,7 @@ def _gen_fit_results(self, fit_results, weights, **kwargs) -> FitResults: for name, value in kwargs.items(): if getattr(results, name, False): setattr(results, name, value) - results.success = not bool(fit_results.flag) + results.success = fit_results.flag == fit_results.EXIT_SUCCESS pars = {} for p_name, par in self._cached_pars.items(): @@ -221,11 +372,14 @@ def _gen_fit_results(self, fit_results, weights, **kwargs) -> FitResults: results.y_obs = self._cached_model.y results.y_calc = self.evaluate(results.x, minimizer_parameters=results.p) results.y_err = weights + results.n_evaluations = int(fit_results.nf) + results.message = str(fit_results.msg) # results.residual = results.y_obs - results.y_calc # results.goodness_of_fit = fit_results.f results.minimizer_engine = self.__class__ results.fit_args = None + results.engine_result = fit_results # results.check_sanity() return results @@ -259,10 +413,10 @@ def _dfo_fit( results = dfols.solve(model, pars_values, bounds=bounds, **kwargs) - if 'Success' not in results.msg: - raise FitError(f'Fit failed with message: {results.msg}') + if results.flag in {results.EXIT_SUCCESS, results.EXIT_MAXFUN_WARNING}: + return results - return results + raise FitError(f'Fit failed with message: {results.msg}') @staticmethod def _prepare_kwargs( diff --git a/src/easyscience/fitting/minimizers/minimizer_lmfit.py b/src/easyscience/fitting/minimizers/minimizer_lmfit.py index 1a5d379a..adb0bf54 100644 --- a/src/easyscience/fitting/minimizers/minimizer_lmfit.py +++ b/src/easyscience/fitting/minimizers/minimizer_lmfit.py @@ -346,6 +346,8 @@ def _gen_fit_results(self, fit_results: ModelResult, **kwargs) -> FitResults: # results.goodness_of_fit = fit_results.chisqr results.y_calc = fit_results.best_fit results.y_err = 1 / fit_results.weights + results.n_evaluations = fit_results.nfev + results.message = fit_results.message results.minimizer_engine = self.__class__ results.fit_args = None diff --git a/src/easyscience/fitting/minimizers/utils.py b/src/easyscience/fitting/minimizers/utils.py index 23226706..da1ab517 100644 --- a/src/easyscience/fitting/minimizers/utils.py +++ b/src/easyscience/fitting/minimizers/utils.py @@ -20,6 +20,8 @@ class FitResults: 'y_obs', 'y_calc', 'y_err', + 'n_evaluations', + 'message', 'engine_result', 'total_results', ] @@ -35,9 +37,44 @@ def __init__(self): self.y_obs = np.ndarray([]) self.y_calc = np.ndarray([]) self.y_err = np.ndarray([]) + self.n_evaluations = None + self.message = '' self.engine_result = None self.total_results = None + def __repr__(self) -> str: + engine_name = self.minimizer_engine.__name__ if self.minimizer_engine else None + try: + chi2_val = self.chi2 + reduced_val = self.reduced_chi2 + if not np.isfinite(chi2_val) or not np.isfinite(reduced_val): + raise ValueError + chi2 = f'{chi2_val:.4g}' + reduced = f'{reduced_val:.4g}' + except Exception: + chi2 = 'N/A' + reduced = 'N/A' + + try: + n_points = len(self.x) + except TypeError: + n_points = 0 + + lines = [ + f'FitResults(success={self.success}', + f' n_pars={self.n_pars}, n_points={n_points}', + f' chi2={chi2}, reduced_chi2={reduced}', + f' n_evaluations={self.n_evaluations}', + f' minimizer={engine_name}', + ] + if self.message: + lines.append(f" message='{self.message}'") + if self.p: + par_str = ', '.join(f'{k}={v:.4g}' for k, v in self.p.items()) + lines.append(f' parameters={{{par_str}}}') + lines.append(')') + return '\n'.join(lines) + @property def n_pars(self): return len(self.p) @@ -51,7 +88,7 @@ def chi2(self): return ((self.residual / self.y_err) ** 2).sum() @property - def reduced_chi(self): + def reduced_chi2(self): return self.chi2 / (len(self.x) - self.n_pars) diff --git a/src/easyscience/fitting/multi_fitter.py b/src/easyscience/fitting/multi_fitter.py index b296cea1..af8ab978 100644 --- a/src/easyscience/fitting/multi_fitter.py +++ b/src/easyscience/fitting/multi_fitter.py @@ -131,6 +131,8 @@ def _post_compute_reshaping( current_results.minimizer_engine = fit_result_obj.minimizer_engine current_results.p = fit_result_obj.p current_results.p0 = fit_result_obj.p0 + current_results.n_evaluations = fit_result_obj.n_evaluations + current_results.message = fit_result_obj.message current_results.x = this_x current_results.y_obs = y[idx] current_results.y_calc = np.reshape( diff --git a/tests/integration/fitting/test_fitter.py b/tests/integration/fitting/test_fitter.py index c6d130fd..63ede513 100644 --- a/tests/integration/fitting/test_fitter.py +++ b/tests/integration/fitting/test_fitter.py @@ -81,7 +81,7 @@ def __call__(self, x: np.ndarray) -> np.ndarray: def check_fit_results(result, sp_sin, ref_sin, x, **kwargs): assert result.n_pars == len(sp_sin.get_fit_parameters()) assert result.chi2 == pytest.approx(0, abs=1.5e-3 * (len(result.x) - result.n_pars)) - assert result.reduced_chi == pytest.approx(0, abs=1.5e-3) + assert result.reduced_chi2 == pytest.approx(0, abs=1.5e-3) assert result.success if 'sp_ref1' in kwargs.keys(): sp_ref1 = kwargs['sp_ref1'] @@ -207,14 +207,48 @@ def test_basic_max_evaluations(fit_engine): except AttributeError: pytest.skip(msg=f'{fit_engine} is not installed') f.max_evaluations = 3 - try: - result = f.fit(x=x, y=y, weights=weights) - # Result should not be the same as the reference - assert sp_sin.phase.value != pytest.approx(ref_sin.phase.value, rel=1e-3) - assert sp_sin.offset.value != pytest.approx(ref_sin.offset.value, rel=1e-3) - except FitError as e: - # DFO throws a different error - assert 'Objective has been called MAXFUN times' in str(e) + result = f.fit(x=x, y=y, weights=weights) + # Result should not be the same as the reference + assert sp_sin.phase.value != pytest.approx(ref_sin.phase.value, rel=1e-3) + assert sp_sin.offset.value != pytest.approx(ref_sin.offset.value, rel=1e-3) + + +@pytest.mark.fast +@pytest.mark.parametrize( + 'fit_engine', + [ + None, + AvailableMinimizers.LMFit, + AvailableMinimizers.Bumps, + AvailableMinimizers.DFO, + ], +) +def test_max_evaluations_populates_fit_result_fields(fit_engine): + """With a tight budget every engine must return success=False, n_evaluations>0, non-empty message.""" + ref_sin = AbsSin(0.2, np.pi) + sp_sin = AbsSin(0.354, 3.05) + + x = np.linspace(0, 5, 200) + weights = np.ones_like(x) + y = ref_sin(x) + + sp_sin.offset.fixed = False + sp_sin.phase.fixed = False + + f = Fitter(sp_sin, sp_sin) + if fit_engine is not None: + try: + f.switch_minimizer(fit_engine) + except AttributeError: + pytest.skip(msg=f'{fit_engine} is not installed') + f.max_evaluations = 3 + result = f.fit(x=x, y=y, weights=weights) + + assert result.success is False + assert result.n_evaluations is not None + assert result.n_evaluations > 0 + assert isinstance(result.message, str) + assert len(result.message) > 0 @pytest.mark.fast @@ -351,7 +385,7 @@ def test_2D_vectorized(fit_engine): else: raise e assert result.n_pars == len(m2.get_fit_parameters()) - assert result.reduced_chi == pytest.approx(0, abs=1.5e-3) + assert result.reduced_chi2 == pytest.approx(0, abs=1.5e-3) assert result.success assert np.all(result.x == XY) y_calc_ref = m2(XY) @@ -390,7 +424,7 @@ def test_2D_non_vectorized(fit_engine): else: raise e assert result.n_pars == len(m2.get_fit_parameters()) - assert result.reduced_chi == pytest.approx(0, abs=1.5e-3) + assert result.reduced_chi2 == pytest.approx(0, abs=1.5e-3) assert result.success assert np.all(result.x == XY) y_calc_ref = m2(XY.reshape(-1, 2)) diff --git a/tests/integration/fitting/test_multi_fitter.py b/tests/integration/fitting/test_multi_fitter.py index 1cc5b395..fe4df933 100644 --- a/tests/integration/fitting/test_multi_fitter.py +++ b/tests/integration/fitting/test_multi_fitter.py @@ -95,7 +95,7 @@ def test_multi_fit(fit_engine): sp_sin_2.get_fit_parameters() ) assert result.chi2 == pytest.approx(0, abs=1.5e-3 * (len(result.x) - result.n_pars)) - assert result.reduced_chi == pytest.approx(0, abs=1.5e-3) + assert result.reduced_chi2 == pytest.approx(0, abs=1.5e-3) assert result.success assert np.all(result.x == X[idx]) assert np.all(result.y_obs == Y[idx]) @@ -103,6 +103,46 @@ def test_multi_fit(fit_engine): assert result.residual == pytest.approx(F_real[idx](X[idx]) - F_ref[idx](X[idx]), abs=1e-2) +@pytest.mark.parametrize('fit_engine', [None, 'LMFit', 'Bumps', 'DFO']) +def test_multi_fit_propagates_n_evaluations_and_message(fit_engine): + """Verify that n_evaluations and message are copied into each per-dataset result.""" + ref_sin_1 = AbsSin(0.2, np.pi) + sp_sin_1 = AbsSin(0.354, 3.05) + ref_sin_2 = AbsSin(np.pi * 0.45, 0.45 * np.pi * 0.5) + sp_sin_2 = AbsSin(1, 0.5) + + ref_sin_2.offset.make_dependent_on( + dependency_expression='ref_sin1', dependency_map={'ref_sin1': ref_sin_1.offset} + ) + sp_sin_2.offset.make_dependent_on( + dependency_expression='sp_sin1', dependency_map={'sp_sin1': sp_sin_1.offset} + ) + + x1 = np.linspace(0, 5, 200) + y1 = ref_sin_1(x1) + x2 = np.copy(x1) + y2 = ref_sin_2(x2) + weights = np.ones_like(x1) + + sp_sin_1.offset.fixed = False + sp_sin_1.phase.fixed = False + sp_sin_2.phase.fixed = False + + f = MultiFitter([sp_sin_1, sp_sin_2], [sp_sin_1, sp_sin_2]) + if fit_engine is not None: + try: + f.switch_minimizer(fit_engine) + except AttributeError: + pytest.skip(msg=f'{fit_engine} is not installed') + + results = f.fit(x=[x1, x2], y=[y1, y2], weights=[weights, weights]) + for result in results: + assert result.n_evaluations is not None + assert isinstance(result.n_evaluations, int) + assert result.n_evaluations > 0 + assert isinstance(result.message, str) + + @pytest.mark.parametrize('fit_engine', [None, 'LMFit', 'Bumps', 'DFO']) def test_multi_fit2(fit_engine): ref_sin_1 = AbsSin(0.2, np.pi) @@ -160,7 +200,7 @@ def test_multi_fit2(fit_engine): sp_sin_2.get_fit_parameters() ) + len(sp_line.get_fit_parameters()) assert result.chi2 == pytest.approx(0, abs=1.5e-3 * (len(result.x) - result.n_pars)) - assert result.reduced_chi == pytest.approx(0, abs=1.5e-3) + assert result.reduced_chi2 == pytest.approx(0, abs=1.5e-3) assert result.success assert np.all(result.x == X[idx]) assert np.all(result.y_obs == Y[idx]) @@ -235,7 +275,7 @@ def test_multi_fit_1D_2D(fit_engine): fit_engine != 'DFO' ): # DFO apparently does not fit well with even weights. Can't be bothered to fix assert result.chi2 == pytest.approx(0, abs=1.5e-3 * (len(result.x) - result.n_pars)) - assert result.reduced_chi == pytest.approx(0, abs=1.5e-3) + assert result.reduced_chi2 == pytest.approx(0, abs=1.5e-3) assert result.y_calc == pytest.approx(F_ref[idx](X[idx]), abs=1e-2) assert result.residual == pytest.approx( F_real[idx](X[idx]) - F_ref[idx](X[idx]), abs=1e-2 diff --git a/tests/unit/fitting/minimizers/test_minimizer_bumps.py b/tests/unit/fitting/minimizers/test_minimizer_bumps.py index fd393144..5dbcfedf 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_bumps.py +++ b/tests/unit/fitting/minimizers/test_minimizer_bumps.py @@ -11,6 +11,7 @@ import easyscience.fitting.minimizers.minimizer_bumps from easyscience.fitting.minimizers.minimizer_bumps import Bumps from easyscience.fitting.minimizers.minimizer_bumps import _BumpsProgressMonitor +from easyscience.fitting.minimizers.minimizer_bumps import _StepCounterMonitor from easyscience.fitting.minimizers.utils import FitCancelled from easyscience.fitting.minimizers.utils import FitError @@ -305,8 +306,9 @@ def test_fit_progress_callback(self, minimizer: Bumps, monkeypatch) -> None: assert result == 'gen_fit_results' driver_call_kwargs = mock_FitDriver.call_args monitors = driver_call_kwargs.kwargs.get('monitors', driver_call_kwargs[1].get('monitors')) - assert len(monitors) == 1 - assert isinstance(monitors[0], _BumpsProgressMonitor) + assert len(monitors) == 2 + assert isinstance(monitors[0], _StepCounterMonitor) + assert isinstance(monitors[1], _BumpsProgressMonitor) def test_fit_cancellation_restores_parameter_values( self, minimizer: Bumps, monkeypatch diff --git a/tests/unit/fitting/minimizers/test_minimizer_dfo.py b/tests/unit/fitting/minimizers/test_minimizer_dfo.py index e1d5eeef..5aa711d1 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_dfo.py +++ b/tests/unit/fitting/minimizers/test_minimizer_dfo.py @@ -8,6 +8,7 @@ import easyscience.fitting.minimizers.minimizer_dfo from easyscience.fitting.minimizers.minimizer_dfo import DFO +from easyscience.fitting.minimizers.minimizer_dfo import DFOCallbackState from easyscience.fitting.minimizers.utils import FitError from easyscience.variable import Parameter @@ -66,11 +67,43 @@ def test_fit(self, minimizer: DFO) -> None: # Expect assert result == 'gen_fit_results' minimizer._dfo_fit.assert_called_once_with(cached_pars, mock_model) - minimizer._make_model.assert_called_once_with(parameters=None) + minimizer._make_model.assert_called_once_with( + parameters=None, + callback=None, + callback_every=1, + callback_on_improvement_only=False, + ) minimizer._set_parameter_fit_result.assert_called_once_with('fit', False) minimizer._gen_fit_results.assert_called_once_with('fit', 1) mock_model_function.assert_called_once_with(1.0, 2.0, 1) + def test_fit_passes_callback_to_model_builder(self, minimizer: DFO) -> None: + from easyscience import global_object + + global_object.stack.enabled = False + + mock_model = MagicMock() + mock_model_function = MagicMock(return_value=mock_model) + minimizer._make_model = MagicMock(return_value=mock_model_function) + minimizer._dfo_fit = MagicMock(return_value='fit') + minimizer._set_parameter_fit_result = MagicMock() + minimizer._gen_fit_results = MagicMock(return_value='gen_fit_results') + + cached_par = MagicMock() + cached_par.value = 1 + minimizer._cached_pars = {'mock_parm_1': cached_par} + + callback = MagicMock() + + minimizer.fit(x=1.0, y=2.0, weights=1, callback=callback) + + minimizer._make_model.assert_called_once_with( + parameters=None, + callback=callback, + callback_every=1, + callback_on_improvement_only=False, + ) + def test_generate_fit_function(self, minimizer: DFO) -> None: # When minimizer._original_fit_function = MagicMock(return_value='fit_function_result') @@ -142,6 +175,89 @@ def test_make_model(self, minimizer: DFO) -> None: 'pmock_parm_2': 2222, } + def test_make_model_callback(self, minimizer: DFO) -> None: + mock_fit_function = MagicMock(return_value=np.array([11, 22])) + minimizer._generate_fit_function = MagicMock(return_value=mock_fit_function) + + mock_parm_1 = MagicMock() + mock_parm_1.unique_name = 'mock_parm_1' + mock_parm_1.value = 1000.0 + mock_parm_2 = MagicMock() + mock_parm_2.unique_name = 'mock_parm_2' + mock_parm_2.value = 2000.0 + + callback = MagicMock() + + model = minimizer._make_model(parameters=[mock_parm_1, mock_parm_2], callback=callback) + residuals_for_model = model( + x=np.array([1, 2]), + y=np.array([10, 20]), + weights=np.array([1 / 100, 1 / 200]), + ) + + residuals = residuals_for_model(np.array([1111, 2222])) + + assert all(np.array([-0.01, -0.01]) == residuals) + callback.assert_called_once() + state = callback.call_args[0][0] + assert isinstance(state, DFOCallbackState) + assert state.evaluation == 1 + assert state.improved == True + assert state.objective == pytest.approx(0.0002) + assert all(state.xk == np.array([1111, 2222])) + assert all(state.residuals == np.array([-0.01, -0.01])) + assert state.parameters == { + 'pmock_parm_1': 1111.0, + 'pmock_parm_2': 2222.0, + } + assert all(state.best_xk == np.array([1111, 2222])) + assert state.best_parameters == { + 'pmock_parm_1': 1111.0, + 'pmock_parm_2': 2222.0, + } + + def test_make_model_callback_every(self, minimizer: DFO) -> None: + mock_fit_function = MagicMock(return_value=np.array([11, 22])) + minimizer._generate_fit_function = MagicMock(return_value=mock_fit_function) + + mock_parm_1 = MagicMock() + mock_parm_1.unique_name = 'mock_parm_1' + mock_parm_1.value = 1000.0 + mock_parm_2 = MagicMock() + mock_parm_2.unique_name = 'mock_parm_2' + mock_parm_2.value = 2000.0 + + callback = MagicMock() + + model = minimizer._make_model( + parameters=[mock_parm_1, mock_parm_2], + callback=callback, + callback_every=2, + ) + residuals_for_model = model( + x=np.array([1, 2]), + y=np.array([10, 20]), + weights=np.array([1 / 100, 1 / 200]), + ) + + residuals_for_model(np.array([1111, 2222])) + residuals_for_model(np.array([1222, 2333])) + + callback.assert_called_once() + state = callback.call_args[0][0] + assert state.evaluation == 2 + assert all(state.xk == np.array([1222, 2333])) + + def test_fit_callback_every_must_be_positive(self, minimizer: DFO) -> None: + with pytest.raises(ValueError, match='callback_every must be a positive integer'): + minimizer.fit( + x=np.array([1.0]), + y=np.array([1.0]), + weights=np.array([1.0]), + callback=MagicMock(), + callback_every=0, + ) + def test_set_parameter_fit_result_no_stack_status(self, minimizer: DFO): # When minimizer._cached_pars = { @@ -177,7 +293,10 @@ def test_gen_fit_results(self, minimizer: DFO, monkeypatch): ) mock_fit_result = MagicMock() - mock_fit_result.flag = False + mock_fit_result.EXIT_SUCCESS = 0 + mock_fit_result.flag = 0 + mock_fit_result.nf = 12 + mock_fit_result.msg = 'Maximum function evaluations reached' mock_cached_model = MagicMock() mock_cached_model.x = 'x' @@ -214,15 +333,80 @@ def test_gen_fit_results(self, minimizer: DFO, monkeypatch): assert domain_fit_results.p0 == 'p_0' assert domain_fit_results.y_calc == 'evaluate' assert domain_fit_results.y_err == 'weights' + assert domain_fit_results.n_evaluations == 12 + assert domain_fit_results.message == 'Maximum function evaluations reached' + assert domain_fit_results.engine_result == mock_fit_result assert ( str(domain_fit_results.minimizer_engine) == "" ) - assert domain_fit_results.fit_args is None - minimizer.evaluate.assert_called_once_with( - 'x', minimizer_parameters={'ppar_1': 'par_value_1', 'ppar_2': 'par_value_2'} + + def test_gen_fit_results_maxfun_warning_sets_success_false(self, minimizer: DFO, monkeypatch): + """When DFO returns EXIT_MAXFUN_WARNING, _gen_fit_results must set success=False.""" + mock_domain_fit_results = MagicMock() + mock_FitResults = MagicMock(return_value=mock_domain_fit_results) + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_dfo, 'FitResults', mock_FitResults ) + mock_fit_result = MagicMock() + mock_fit_result.EXIT_SUCCESS = 0 + mock_fit_result.EXIT_MAXFUN_WARNING = 1 + mock_fit_result.flag = 1 # MAXFUN_WARNING + mock_fit_result.nf = 50 + mock_fit_result.msg = 'Objective has been called MAXFUN times' + + mock_cached_model = MagicMock() + mock_cached_model.x = 'x' + mock_cached_model.y = 'y' + minimizer._cached_model = mock_cached_model + + mock_cached_par_1 = MagicMock() + mock_cached_par_1.value = 'v1' + minimizer._cached_pars = {'par_1': mock_cached_par_1} + minimizer._p_0 = 'p_0' + minimizer.evaluate = MagicMock(return_value='evaluate') + + domain_fit_results = minimizer._gen_fit_results(mock_fit_result, 'weights') + + assert domain_fit_results.success == False + assert domain_fit_results.n_evaluations == 50 + assert domain_fit_results.message == 'Objective has been called MAXFUN times' + + def test_dfo_fit_allows_maxfun_warning(self, minimizer: DFO, monkeypatch) -> None: + mock_result = MagicMock() + mock_result.EXIT_SUCCESS = 0 + mock_result.EXIT_MAXFUN_WARNING = 1 + mock_result.flag = 1 + + mock_solve = MagicMock(return_value=mock_result) + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_dfo.dfols, 'solve', mock_solve + ) + + parameter = MagicMock(min=0.0, max=1.0, value=0.5) + + result = minimizer._dfo_fit({'par': parameter}, MagicMock()) + + assert result == mock_result + + def test_dfo_fit_raises_for_non_maxfun_failure(self, minimizer: DFO, monkeypatch) -> None: + mock_result = MagicMock() + mock_result.EXIT_SUCCESS = 0 + mock_result.EXIT_MAXFUN_WARNING = 1 + mock_result.flag = 4 + mock_result.msg = 'linear algebra error' + + mock_solve = MagicMock(return_value=mock_result) + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_dfo.dfols, 'solve', mock_solve + ) + + parameter = MagicMock(min=0.0, max=1.0, value=0.5) + + with pytest.raises(FitError, match='linear algebra error'): + minimizer._dfo_fit({'par': parameter}, MagicMock()) + def test_dfo_fit(self, minimizer: DFO, monkeypatch): # When mock_parm_1 = MagicMock(Parameter) @@ -239,6 +423,9 @@ def test_dfo_fit(self, minimizer: DFO, monkeypatch): mock_dfols = MagicMock() mock_results = MagicMock() + mock_results.EXIT_SUCCESS = 0 + mock_results.EXIT_MAXFUN_WARNING = 1 + mock_results.flag = 0 mock_results.msg = 'Success' mock_dfols.solve = MagicMock(return_value=mock_results) @@ -272,6 +459,9 @@ def test_dfo_fit_no_scaling(self, minimizer: DFO, monkeypatch): mock_dfols = MagicMock() mock_results = MagicMock() + mock_results.EXIT_SUCCESS = 0 + mock_results.EXIT_MAXFUN_WARNING = 1 + mock_results.flag = 0 mock_results.msg = 'Success' mock_dfols.solve = MagicMock(return_value=mock_results) @@ -290,6 +480,33 @@ def test_dfo_fit_no_scaling(self, minimizer: DFO, monkeypatch): assert 'kwargs_set_key' in list(mock_dfols.solve.call_args[1].keys()) assert mock_dfols.solve.call_args[1]['kwargs_set_key'] == 'kwargs_set_val' + def test_fit_generic_exception_resets_parameters_and_raises_fit_error( + self, minimizer: DFO + ) -> None: + """When _dfo_fit raises a non-FitError exception, fit() must reset + parameter values to cached originals and re-raise as FitError.""" + from easyscience import global_object + + global_object.stack.enabled = False + + mock_model = MagicMock() + mock_model_function = MagicMock(return_value=mock_model) + minimizer._make_model = MagicMock(return_value=mock_model_function) + minimizer._dfo_fit = MagicMock(side_effect=RuntimeError('solver crashed')) + + cached_par_1 = MagicMock() + cached_par_1.value = 5.0 + cached_par_2 = MagicMock() + cached_par_2.value = 10.0 + minimizer._cached_pars = {'a': cached_par_1, 'b': cached_par_2} + minimizer._cached_pars_vals = {'a': (1.0, 0.1), 'b': (2.0, 0.2)} + + with pytest.raises(FitError): + minimizer.fit(x=np.array([1.0]), y=np.array([1.0]), weights=np.array([1.0])) + + assert cached_par_1.value == 1.0 + assert cached_par_2.value == 2.0 + def test_dfo_fit_exception(self, minimizer: DFO, monkeypatch): # When pars = {1: MagicMock(Parameter)} @@ -297,6 +514,9 @@ def test_dfo_fit_exception(self, minimizer: DFO, monkeypatch): mock_dfols = MagicMock() mock_results = MagicMock() + mock_results.EXIT_SUCCESS = 0 + mock_results.EXIT_MAXFUN_WARNING = 1 + mock_results.flag = 3 mock_results.msg = 'Failed' mock_dfols.solve = MagicMock(return_value=mock_results) @@ -305,3 +525,99 @@ def test_dfo_fit_exception(self, minimizer: DFO, monkeypatch): # Then Expect with pytest.raises(FitError): minimizer._dfo_fit(pars, 'model', **kwargs) + + def test_progress_callback_creates_adapter_when_no_explicit_callback( + self, minimizer: DFO + ) -> None: + """When progress_callback is provided without an explicit callback, + fit() should auto-create a DFO callback adapter.""" + from easyscience import global_object + + global_object.stack.enabled = False + + mock_model = MagicMock() + mock_model_function = MagicMock(return_value=mock_model) + minimizer._make_model = MagicMock(return_value=mock_model_function) + minimizer._dfo_fit = MagicMock(return_value='fit') + minimizer._set_parameter_fit_result = MagicMock() + minimizer._gen_fit_results = MagicMock(return_value='gen_fit_results') + + cached_par = MagicMock() + cached_par.value = 1 + minimizer._cached_pars = {'mock_parm_1': cached_par} + + progress_cb = MagicMock() + + minimizer.fit( + x=np.array([1.0, 2.0, 3.0]), + y=np.array([1.0, 2.0, 3.0]), + weights=np.array([1.0, 1.0, 1.0]), + progress_callback=progress_cb, + ) + + # The adapter should have been passed as callback to _make_model + call_kwargs = minimizer._make_model.call_args[1] + assert call_kwargs['callback'] is not None + assert callable(call_kwargs['callback']) + + def test_progress_callback_not_used_when_explicit_callback_given(self, minimizer: DFO) -> None: + """When both progress_callback and callback are given, the explicit + callback takes precedence and progress_callback is ignored.""" + from easyscience import global_object + + global_object.stack.enabled = False + + mock_model = MagicMock() + mock_model_function = MagicMock(return_value=mock_model) + minimizer._make_model = MagicMock(return_value=mock_model_function) + minimizer._dfo_fit = MagicMock(return_value='fit') + minimizer._set_parameter_fit_result = MagicMock() + minimizer._gen_fit_results = MagicMock(return_value='gen_fit_results') + + cached_par = MagicMock() + cached_par.value = 1 + minimizer._cached_pars = {'mock_parm_1': cached_par} + + progress_cb = MagicMock() + explicit_cb = MagicMock() + + minimizer.fit( + x=np.array([1.0, 2.0, 3.0]), + y=np.array([1.0, 2.0, 3.0]), + weights=np.array([1.0, 1.0, 1.0]), + progress_callback=progress_cb, + callback=explicit_cb, + ) + + call_kwargs = minimizer._make_model.call_args[1] + assert call_kwargs['callback'] is explicit_cb + + def test_make_progress_adapter_payload_format(self) -> None: + """The adapter must produce the standard progress payload dict.""" + progress_cb = MagicMock() + dof = 5 + + adapter = DFO._make_progress_adapter(progress_cb, dof) + + state = DFOCallbackState( + evaluation=10, + xk=np.array([1.0, 2.0]), + residuals=np.array([0.1, 0.2]), + objective=0.05, + parameters={'pmock_parm_1': 1.0, 'pmock_parm_2': 2.0}, + best_xk=np.array([1.0, 2.0]), + best_objective=0.04, + best_parameters={'pmock_parm_1': 1.0, 'pmock_parm_2': 2.0}, + improved=True, + ) + + adapter(state) + + progress_cb.assert_called_once() + payload = progress_cb.call_args[0][0] + assert payload['iteration'] == 10 + assert payload['chi2'] == 0.04 # best_objective + assert payload['reduced_chi2'] == pytest.approx(0.04 / 5) + assert payload['parameter_values'] == {'mock_parm_1': 1.0, 'mock_parm_2': 2.0} + assert payload['refresh_plots'] is False + assert payload['finished'] is False From c60bf5a1fe718189d7b94aa60b640c6cf4c2507a Mon Sep 17 00:00:00 2001 From: rozyczko Date: Wed, 15 Apr 2026 14:32:11 +0200 Subject: [PATCH 05/11] pixi run docs-format-fix --- src/easyscience/fitting/minimizers/minimizer_bumps.py | 4 +++- src/easyscience/fitting/minimizers/minimizer_dfo.py | 3 ++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/easyscience/fitting/minimizers/minimizer_bumps.py b/src/easyscience/fitting/minimizers/minimizer_bumps.py index 756182f9..3bb2f0dd 100644 --- a/src/easyscience/fitting/minimizers/minimizer_bumps.py +++ b/src/easyscience/fitting/minimizers/minimizer_bumps.py @@ -33,7 +33,9 @@ class _StepCounterMonitor(Monitor): - """Lightweight monitor that ensures step count is recorded in history.""" + """Lightweight monitor that ensures step count is recorded in + history. + """ def __init__(self): self.last_step = 0 diff --git a/src/easyscience/fitting/minimizers/minimizer_dfo.py b/src/easyscience/fitting/minimizers/minimizer_dfo.py index 2a778465..e66c5362 100644 --- a/src/easyscience/fitting/minimizers/minimizer_dfo.py +++ b/src/easyscience/fitting/minimizers/minimizer_dfo.py @@ -296,7 +296,8 @@ def _make_progress_adapter( """Create a DFO callback that translates DFOCallbackState into the standard progress_callback dict format used by the GUI. - :param progress_callback: Standard progress callback (dict -> bool|None) + :param progress_callback: Standard progress callback (dict -> + bool|None) :param dof: Degrees of freedom for reduced chi2 calculation :return: DFO-compatible callback """ From 53a80eb5479876e2c474a4feaea90a8d9a4ca0a7 Mon Sep 17 00:00:00 2001 From: Piotr Rozyczko Date: Wed, 15 Apr 2026 21:26:46 +0200 Subject: [PATCH 06/11] changes after internal CR --- .../fitting/minimizers/minimizer_bumps.py | 2 + .../fitting/minimizers/minimizer_dfo.py | 8 +--- .../minimizers/test_minimizer_bumps.py | 40 +++++++++++++++++++ 3 files changed, 44 insertions(+), 6 deletions(-) diff --git a/src/easyscience/fitting/minimizers/minimizer_bumps.py b/src/easyscience/fitting/minimizers/minimizer_bumps.py index 3bb2f0dd..3f702709 100644 --- a/src/easyscience/fitting/minimizers/minimizer_bumps.py +++ b/src/easyscience/fitting/minimizers/minimizer_bumps.py @@ -384,6 +384,8 @@ def _gen_fit_results( if getattr(results, name, False): setattr(results, name, value) results.n_evaluations = n_evaluations + # Bumps step counter is 0-indexed, so the last step of a budget of N + # is N-1. We therefore compare with ``max_evaluations - 1``. if max_evaluations is not None and n_evaluations >= max_evaluations - 1: results.success = False results.message = f'Maximum number of evaluations ({max_evaluations}) reached' diff --git a/src/easyscience/fitting/minimizers/minimizer_dfo.py b/src/easyscience/fitting/minimizers/minimizer_dfo.py index e66c5362..dbcd6f69 100644 --- a/src/easyscience/fitting/minimizers/minimizer_dfo.py +++ b/src/easyscience/fitting/minimizers/minimizer_dfo.py @@ -128,8 +128,6 @@ def fit( if progress_callback is not None and callback is None: dof = max(len(x) - len(self._cached_pars), 1) callback = self._make_progress_adapter(progress_callback, dof) - if callback_every == 1: - callback_every = 1 # keep default; adapter fires every evaluation if model is None: model_function = self._make_model( @@ -166,12 +164,10 @@ def fit( self._set_parameter_fit_result(model_results, stack_status) results = self._gen_fit_results(model_results, weights) except FitError: - for key in self._cached_pars.keys(): - self._cached_pars[key].value = self._cached_pars_vals[key][0] + self._restore_parameter_values() raise except Exception as e: - for key in self._cached_pars.keys(): - self._cached_pars[key].value = self._cached_pars_vals[key][0] + self._restore_parameter_values() raise FitError(e) return results diff --git a/tests/unit/fitting/minimizers/test_minimizer_bumps.py b/tests/unit/fitting/minimizers/test_minimizer_bumps.py index 5dbcfedf..2ce11956 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_bumps.py +++ b/tests/unit/fitting/minimizers/test_minimizer_bumps.py @@ -242,6 +242,46 @@ def test_gen_fit_results(self, minimizer: Bumps, monkeypatch): 'x', minimizer_parameters={'ppar_1': 'par_value_1', 'ppar_2': 'par_value_2'} ) + @pytest.mark.parametrize( + 'n_evaluations, max_evaluations, expected_success', + [ + (1, 3, True), # last step (1) < budget-1 (2) => success + (2, 3, False), # last step (2) == budget-1 (2) => budget consumed => failure + (3, 3, False), # last step (3) > budget-1 (2) => failure + (0, 1, False), # 0 >= 0 => failure (budget of 1, step counter 0-indexed) + (5, None, True), # no budget => always success + ], + ) + def test_gen_fit_results_max_evaluations_boundary( + self, minimizer: Bumps, monkeypatch, n_evaluations, max_evaluations, expected_success + ): + """Bumps step counter is 0-indexed so the last step of a budget + of N is N-1. Verify the boundary condition in _gen_fit_results.""" + mock_domain_fit_results = MagicMock() + monkeypatch.setattr( + easyscience.fitting.minimizers.minimizer_bumps, + 'FitResults', + MagicMock(return_value=mock_domain_fit_results), + ) + + mock_cached_model = MagicMock() + mock_cached_model.pars = {'ppar_1': 0} + minimizer._cached_model = mock_cached_model + + mock_par = MagicMock() + mock_par.value = 1.0 + minimizer._cached_pars = {'par_1': mock_par} + minimizer._p_0 = 'p_0' + minimizer.evaluate = MagicMock(return_value='evaluate') + + mock_driver = MagicMock() + + minimizer._gen_fit_results( + np.array([1.0]), 0.5, mock_driver, n_evaluations, max_evaluations + ) + + assert mock_domain_fit_results.success is expected_success + def test_resolve_fitclass_valid(self, minimizer: Bumps) -> None: # When Then fitclass = Bumps._resolve_fitclass('lm') From 78589540eecb05e64f2e57fef2ac85d5f9635582 Mon Sep 17 00:00:00 2001 From: Piotr Rozyczko Date: Wed, 15 Apr 2026 21:59:05 +0200 Subject: [PATCH 07/11] assure fit cancellation enables undo --- src/easyscience/fitting/minimizers/minimizer_bumps.py | 2 ++ src/easyscience/fitting/minimizers/minimizer_dfo.py | 2 ++ src/easyscience/fitting/minimizers/minimizer_lmfit.py | 2 ++ tests/unit/fitting/minimizers/test_minimizer_bumps.py | 3 ++- tests/unit/fitting/minimizers/test_minimizer_dfo.py | 3 ++- tests/unit/fitting/minimizers/test_minimizer_lmfit.py | 3 ++- 6 files changed, 12 insertions(+), 3 deletions(-) diff --git a/src/easyscience/fitting/minimizers/minimizer_bumps.py b/src/easyscience/fitting/minimizers/minimizer_bumps.py index 3f702709..c730c49f 100644 --- a/src/easyscience/fitting/minimizers/minimizer_bumps.py +++ b/src/easyscience/fitting/minimizers/minimizer_bumps.py @@ -220,6 +220,8 @@ def fit( except Exception as e: self._restore_parameter_values() raise FitError(e) + finally: + global_object.stack.enabled = stack_status return results @staticmethod diff --git a/src/easyscience/fitting/minimizers/minimizer_dfo.py b/src/easyscience/fitting/minimizers/minimizer_dfo.py index dbcd6f69..bd4124f3 100644 --- a/src/easyscience/fitting/minimizers/minimizer_dfo.py +++ b/src/easyscience/fitting/minimizers/minimizer_dfo.py @@ -169,6 +169,8 @@ def fit( except Exception as e: self._restore_parameter_values() raise FitError(e) + finally: + global_object.stack.enabled = stack_status return results def convert_to_pars_obj(self, par_list: Optional[list] = None): diff --git a/src/easyscience/fitting/minimizers/minimizer_lmfit.py b/src/easyscience/fitting/minimizers/minimizer_lmfit.py index adb0bf54..37bb6113 100644 --- a/src/easyscience/fitting/minimizers/minimizer_lmfit.py +++ b/src/easyscience/fitting/minimizers/minimizer_lmfit.py @@ -166,6 +166,8 @@ def fit( except Exception as e: self._restore_parameter_values() raise FitError(e) + finally: + global_object.stack.enabled = stack_status return results def _create_iter_callback( diff --git a/tests/unit/fitting/minimizers/test_minimizer_bumps.py b/tests/unit/fitting/minimizers/test_minimizer_bumps.py index 2ce11956..364609cd 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_bumps.py +++ b/tests/unit/fitting/minimizers/test_minimizer_bumps.py @@ -356,7 +356,7 @@ def test_fit_cancellation_restores_parameter_values( # When from easyscience import global_object - global_object.stack.enabled = False + global_object.stack.enabled = True from easyscience.variable import Parameter @@ -401,6 +401,7 @@ def fit_side_effect(*args, **kwargs): minimizer.fit(x=1.0, y=2.0, weights=1, progress_callback=MagicMock(return_value=False)) assert parameter.value == 1.0 + assert global_object.stack.enabled is True def test_build_progress_payload(self, minimizer: Bumps) -> None: # When diff --git a/tests/unit/fitting/minimizers/test_minimizer_dfo.py b/tests/unit/fitting/minimizers/test_minimizer_dfo.py index 5aa711d1..0842b895 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_dfo.py +++ b/tests/unit/fitting/minimizers/test_minimizer_dfo.py @@ -487,7 +487,7 @@ def test_fit_generic_exception_resets_parameters_and_raises_fit_error( parameter values to cached originals and re-raise as FitError.""" from easyscience import global_object - global_object.stack.enabled = False + global_object.stack.enabled = True mock_model = MagicMock() mock_model_function = MagicMock(return_value=mock_model) @@ -506,6 +506,7 @@ def test_fit_generic_exception_resets_parameters_and_raises_fit_error( assert cached_par_1.value == 1.0 assert cached_par_2.value == 2.0 + assert global_object.stack.enabled is True def test_dfo_fit_exception(self, minimizer: DFO, monkeypatch): # When diff --git a/tests/unit/fitting/minimizers/test_minimizer_lmfit.py b/tests/unit/fitting/minimizers/test_minimizer_lmfit.py index 82fd6648..bb2c6458 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_lmfit.py +++ b/tests/unit/fitting/minimizers/test_minimizer_lmfit.py @@ -236,7 +236,7 @@ def test_fit_cancellation_restores_parameter_values(self, minimizer: LMFit) -> N # When from easyscience import global_object - global_object.stack.enabled = False + global_object.stack.enabled = True parameter = MagicMock(Parameter) parameter.value = 10.0 @@ -259,6 +259,7 @@ def fit_side_effect(*args, **kwargs): minimizer.fit(x=1.0, y=2.0, weights=1, progress_callback=MagicMock(return_value=False)) assert parameter.value == 1.0 + assert global_object.stack.enabled is True def test_build_progress_payload(self, minimizer: LMFit) -> None: # When From 665e99eda623cb4c8dcb53da77f5f882f357bc91 Mon Sep 17 00:00:00 2001 From: rozyczko Date: Tue, 21 Apr 2026 08:45:32 +0200 Subject: [PATCH 08/11] removed fit cancellation from the PR. This one is only about the interim updates --- .../fitting/minimizers/minimizer_bumps.py | 18 +---- .../fitting/minimizers/minimizer_lmfit.py | 8 +- src/easyscience/fitting/minimizers/utils.py | 5 -- .../fitting/minimizers/test_minimizer_base.py | 4 - .../minimizers/test_minimizer_bumps.py | 74 ------------------- .../minimizers/test_minimizer_lmfit.py | 40 ++-------- 6 files changed, 9 insertions(+), 140 deletions(-) diff --git a/src/easyscience/fitting/minimizers/minimizer_bumps.py b/src/easyscience/fitting/minimizers/minimizer_bumps.py index c730c49f..6faa9a31 100644 --- a/src/easyscience/fitting/minimizers/minimizer_bumps.py +++ b/src/easyscience/fitting/minimizers/minimizer_bumps.py @@ -23,7 +23,6 @@ from ..available_minimizers import AvailableMinimizers from .minimizer_base import MINIMIZER_PARAMETER_PREFIX from .minimizer_base import MinimizerBase -from .utils import FitCancelled from .utils import FitError from .utils import FitResults @@ -52,7 +51,6 @@ def __init__(self, owner, problem, callback): self._owner = owner self._problem = problem self._callback = callback - self.cancel_requested = False def config_history(self, history): history.requires(step=1, point=1, value=1) @@ -64,9 +62,7 @@ def __call__(self, history): point=np.asarray(history.point[0]), nllf=float(history.value[0]), ) - should_continue = self._callback(payload) - if should_continue is False: - self.cancel_requested = True + self._callback(payload) class Bumps(MinimizerBase): @@ -182,19 +178,14 @@ def fit( step_counter = _StepCounterMonitor() monitors = [step_counter] - progress_monitor = None if progress_callback is not None: - progress_monitor = _BumpsProgressMonitor(self, problem, progress_callback) - monitors.append(progress_monitor) - - abort_test = (lambda: progress_monitor.cancel_requested) if progress_monitor else None + monitors.append(_BumpsProgressMonitor(self, problem, progress_callback)) mapper = SerialMapper.start_mapper(problem, []) driver = FitDriver( fitclass=fitclass, problem=problem, monitors=monitors, - abort_test=abort_test, mapper=mapper, **minimizer_kwargs, ) @@ -208,15 +199,10 @@ def fit( try: x_result, fx = driver.fit() - if progress_monitor is not None and progress_monitor.cancel_requested: - raise FitCancelled() self._set_parameter_fit_result(x_result, driver, stack_status, problem._parameters) results = self._gen_fit_results( x_result, fx, driver, step_counter.last_step, max_evaluations ) - except FitCancelled: - self._restore_parameter_values() - raise except Exception as e: self._restore_parameter_values() raise FitError(e) diff --git a/src/easyscience/fitting/minimizers/minimizer_lmfit.py b/src/easyscience/fitting/minimizers/minimizer_lmfit.py index 37bb6113..acdf4ffe 100644 --- a/src/easyscience/fitting/minimizers/minimizer_lmfit.py +++ b/src/easyscience/fitting/minimizers/minimizer_lmfit.py @@ -18,7 +18,6 @@ from ..available_minimizers import AvailableMinimizers from .minimizer_base import MINIMIZER_PARAMETER_PREFIX from .minimizer_base import MinimizerBase -from .utils import FitCancelled from .utils import FitError from .utils import FitResults @@ -160,9 +159,6 @@ def fit( ) self._set_parameter_fit_result(model_results, stack_status) results = self._gen_fit_results(model_results) - except FitCancelled: - self._restore_parameter_values() - raise except Exception as e: self._restore_parameter_values() raise FitError(e) @@ -179,9 +175,7 @@ def _create_iter_callback( def iter_cb(params, iteration: int, residuals: np.ndarray, *args, **kwargs) -> bool: payload = self._build_progress_payload(params, iteration, residuals) - should_continue = progress_callback(payload) - if should_continue is False: - raise FitCancelled() + progress_callback(payload) return False return iter_cb diff --git a/src/easyscience/fitting/minimizers/utils.py b/src/easyscience/fitting/minimizers/utils.py index da1ab517..c6d462fe 100644 --- a/src/easyscience/fitting/minimizers/utils.py +++ b/src/easyscience/fitting/minimizers/utils.py @@ -101,8 +101,3 @@ def __str__(self) -> str: if self.e is not None: s = f'{self.e}\n' return s + 'Something has gone wrong with the fit' - - -class FitCancelled(Exception): - def __str__(self) -> str: - return 'Fit cancelled by progress callback' diff --git a/tests/unit/fitting/minimizers/test_minimizer_base.py b/tests/unit/fitting/minimizers/test_minimizer_base.py index 2411998d..86ec4292 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_base.py +++ b/tests/unit/fitting/minimizers/test_minimizer_base.py @@ -10,7 +10,6 @@ from easyscience import Parameter from easyscience.fitting.minimizers.minimizer_base import MinimizerBase -from easyscience.fitting.minimizers.utils import FitCancelled from easyscience.fitting.minimizers.utils import FitError @@ -214,6 +213,3 @@ def test_get_method_dict_not_supported_method(self, minimizer: MinimizerBase) -> # Then Expect with pytest.raises(FitError): result = minimizer._get_method_kwargs('not_supported_method') - - def test_fit_cancelled_string(self) -> None: - assert str(FitCancelled()) == 'Fit cancelled by progress callback' diff --git a/tests/unit/fitting/minimizers/test_minimizer_bumps.py b/tests/unit/fitting/minimizers/test_minimizer_bumps.py index 364609cd..cdda289b 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_bumps.py +++ b/tests/unit/fitting/minimizers/test_minimizer_bumps.py @@ -12,7 +12,6 @@ from easyscience.fitting.minimizers.minimizer_bumps import Bumps from easyscience.fitting.minimizers.minimizer_bumps import _BumpsProgressMonitor from easyscience.fitting.minimizers.minimizer_bumps import _StepCounterMonitor -from easyscience.fitting.minimizers.utils import FitCancelled from easyscience.fitting.minimizers.utils import FitError @@ -350,59 +349,6 @@ def test_fit_progress_callback(self, minimizer: Bumps, monkeypatch) -> None: assert isinstance(monitors[0], _StepCounterMonitor) assert isinstance(monitors[1], _BumpsProgressMonitor) - def test_fit_cancellation_restores_parameter_values( - self, minimizer: Bumps, monkeypatch - ) -> None: - # When - from easyscience import global_object - - global_object.stack.enabled = True - - from easyscience.variable import Parameter - - parameter = MagicMock(Parameter) - parameter.value = 10.0 - minimizer._cached_pars = {'alpha': parameter} - minimizer._cached_pars_vals = {'alpha': (1.0, None)} - - mock_driver_instance = MagicMock() - - def fit_side_effect(*args, **kwargs): - # Simulate the monitor triggering cancellation - raise FitCancelled() - - mock_driver_instance.fit.side_effect = fit_side_effect - mock_driver_instance.clip = MagicMock() - mock_FitDriver = MagicMock(return_value=mock_driver_instance) - monkeypatch.setattr( - easyscience.fitting.minimizers.minimizer_bumps, 'FitDriver', mock_FitDriver - ) - - mock_SerialMapper = MagicMock() - mock_SerialMapper.start_mapper = MagicMock(return_value='mapper') - monkeypatch.setattr( - easyscience.fitting.minimizers.minimizer_bumps, 'SerialMapper', mock_SerialMapper - ) - - mock_FitProblem_instance = MagicMock() - mock_FitProblem_instance._parameters = [] - mock_FitProblem = MagicMock(return_value=mock_FitProblem_instance) - monkeypatch.setattr( - easyscience.fitting.minimizers.minimizer_bumps, 'FitProblem', mock_FitProblem - ) - - mock_model = MagicMock() - mock_model_function = MagicMock(return_value=mock_model) - minimizer._make_model = MagicMock(return_value=mock_model_function) - minimizer._resolve_fitclass = MagicMock(return_value=MagicMock(id='amoeba')) - - # Then Expect - with pytest.raises(FitCancelled): - minimizer.fit(x=1.0, y=2.0, weights=1, progress_callback=MagicMock(return_value=False)) - - assert parameter.value == 1.0 - assert global_object.stack.enabled is True - def test_build_progress_payload(self, minimizer: Bumps) -> None: # When mock_problem = MagicMock() @@ -503,7 +449,6 @@ def test_bumps_progress_monitor_calls_callback(self, minimizer: Bumps) -> None: monitor(mock_history) # Expect - assert not monitor.cancel_requested callback.assert_called_once_with({'iteration': 1}) minimizer._build_progress_payload.assert_called_once_with( problem=mock_problem, @@ -512,25 +457,6 @@ def test_bumps_progress_monitor_calls_callback(self, minimizer: Bumps) -> None: nllf=42.0, ) - def test_bumps_progress_monitor_cancel(self, minimizer: Bumps) -> None: - # When - callback = MagicMock(return_value=False) - mock_problem = MagicMock() - - monitor = _BumpsProgressMonitor(minimizer, mock_problem, callback) - minimizer._build_progress_payload = MagicMock(return_value={'iteration': 1}) - - mock_history = MagicMock() - mock_history.step = [5] - mock_history.point = [np.array([1.0])] - mock_history.value = [42.0] - - # Then - monitor(mock_history) - - # Expect - assert monitor.cancel_requested is True - def test_fit_exception_restores_values(self, minimizer: Bumps, monkeypatch) -> None: # When from easyscience import global_object diff --git a/tests/unit/fitting/minimizers/test_minimizer_lmfit.py b/tests/unit/fitting/minimizers/test_minimizer_lmfit.py index bb2c6458..6e43c08f 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_lmfit.py +++ b/tests/unit/fitting/minimizers/test_minimizer_lmfit.py @@ -10,7 +10,6 @@ import easyscience.fitting.minimizers.minimizer_lmfit from easyscience import Parameter from easyscience.fitting.minimizers.minimizer_lmfit import LMFit -from easyscience.fitting.minimizers.utils import FitCancelled from easyscience.fitting.minimizers.utils import FitError @@ -222,44 +221,17 @@ def test_create_iter_callback_no_callback(self, minimizer: LMFit) -> None: # When Then Expect assert minimizer._create_iter_callback(None) is None - def test_create_iter_callback_abort(self, minimizer: LMFit) -> None: + def test_create_iter_callback_invokes_progress(self, minimizer: LMFit) -> None: # When progress_callback = MagicMock(return_value=False) iter_cb = minimizer._create_iter_callback(progress_callback) - # Then Expect - with pytest.raises(FitCancelled): - iter_cb(MagicMock(), 5, np.array([1.0, -2.0])) - progress_callback.assert_called_once() - - def test_fit_cancellation_restores_parameter_values(self, minimizer: LMFit) -> None: - # When - from easyscience import global_object - - global_object.stack.enabled = True - - parameter = MagicMock(Parameter) - parameter.value = 10.0 - minimizer._cached_pars = {'alpha': parameter} - minimizer._cached_pars_vals = {'alpha': (1.0, None)} - - mock_model = MagicMock() - mock_lm_parameter = MagicMock() - mock_lm_parameter.value = 2.0 - mock_lm_parameter.vary = True - - def fit_side_effect(*args, **kwargs): - kwargs['iter_cb']({'palpha': mock_lm_parameter}, 1, np.array([1.0, -2.0])) - - mock_model.fit.side_effect = fit_side_effect - minimizer._make_model = MagicMock(return_value=mock_model) - - # Then Expect - with pytest.raises(FitCancelled): - minimizer.fit(x=1.0, y=2.0, weights=1, progress_callback=MagicMock(return_value=False)) + # Then + result = iter_cb(MagicMock(), 5, np.array([1.0, -2.0])) - assert parameter.value == 1.0 - assert global_object.stack.enabled is True + # Expect — progress callback is notified, but its return value is ignored + progress_callback.assert_called_once() + assert result is False def test_build_progress_payload(self, minimizer: LMFit) -> None: # When From be61cb9235253b8cd95ab9e53822f244bef27820 Mon Sep 17 00:00:00 2001 From: rozyczko Date: Tue, 21 Apr 2026 14:40:16 +0200 Subject: [PATCH 09/11] PR review fixes --- .../fitting/minimizers/minimizer_base.py | 20 +++---- .../fitting/minimizers/minimizer_bumps.py | 58 +++++++++---------- .../fitting/minimizers/minimizer_dfo.py | 45 +++++++------- .../fitting/minimizers/minimizer_lmfit.py | 31 +++++----- .../minimizers/test_minimizer_bumps.py | 47 ++++++--------- .../fitting/minimizers/test_minimizer_dfo.py | 8 ++- 6 files changed, 101 insertions(+), 108 deletions(-) diff --git a/src/easyscience/fitting/minimizers/minimizer_base.py b/src/easyscience/fitting/minimizers/minimizer_base.py index 67e12abd..35527a12 100644 --- a/src/easyscience/fitting/minimizers/minimizer_base.py +++ b/src/easyscience/fitting/minimizers/minimizer_base.py @@ -9,7 +9,6 @@ from typing import Callable from typing import Dict from typing import List -from typing import Optional from typing import Tuple from typing import Union @@ -61,6 +60,7 @@ def name(self) -> str: def _restore_parameter_values(self) -> None: for key in self._cached_pars.keys(): self._cached_pars[key].value = self._cached_pars_vals[key][0] + self._cached_pars[key].error = self._cached_pars_vals[key][1] @abstractmethod def fit( @@ -68,12 +68,12 @@ def fit( x: np.ndarray, y: np.ndarray, weights: np.ndarray, - model: Optional[Callable] = None, - parameters: Optional[Parameter] = None, - method: Optional[str] = None, - tolerance: Optional[float] = None, - max_evaluations: Optional[int] = None, - progress_callback: Optional[Callable[[dict], Optional[bool]]] = None, + model: Callable | None = None, + parameters: List[Parameter] | None = None, + method: str | None = None, + tolerance: float | None = None, + max_evaluations: int | None = None, + progress_callback: Callable[[dict], bool | None] | None = None, **kwargs, ) -> FitResults: """Perform a fit using the engine. @@ -93,7 +93,7 @@ def fit( """ def evaluate( - self, x: np.ndarray, minimizer_parameters: Optional[dict[str, float]] = None, **kwargs + self, x: np.ndarray, minimizer_parameters: dict[str, float] | None = None, **kwargs ) -> np.ndarray: """Evaluate the fit function for values of x. Parameters used are either the latest or user supplied. If the parameters are @@ -122,7 +122,7 @@ def evaluate( return self._fit_function(x, **minimizer_parameters, **kwargs) - def _get_method_kwargs(self, passed_method: Optional[str] = None) -> dict[str, str]: + def _get_method_kwargs(self, passed_method: str | None = None) -> dict[str, str]: if passed_method is not None: if passed_method not in self.supported_methods(): raise FitError(f'Method {passed_method} not available in {self.__class__}') @@ -134,7 +134,7 @@ def _get_method_kwargs(self, passed_method: Optional[str] = None) -> dict[str, s return {} @abstractmethod - def convert_to_pars_obj(self, par_list: Optional[Union[list]] = None): + def convert_to_pars_obj(self, par_list: List[Parameter] | None = None): """Create an engine compatible container with the `Parameters` converted from the base object. diff --git a/src/easyscience/fitting/minimizers/minimizer_bumps.py b/src/easyscience/fitting/minimizers/minimizer_bumps.py index 6faa9a31..8f757e2b 100644 --- a/src/easyscience/fitting/minimizers/minimizer_bumps.py +++ b/src/easyscience/fitting/minimizers/minimizer_bumps.py @@ -4,13 +4,11 @@ import copy from typing import Callable from typing import List -from typing import Optional import numpy as np from bumps.fitters import FIT_AVAILABLE_IDS from bumps.fitters import FITTERS from bumps.fitters import FitDriver -from bumps.mapper import SerialMapper from bumps.monitor import Monitor from bumps.names import Curve from bumps.names import FitProblem @@ -47,16 +45,16 @@ def __call__(self, history): class _BumpsProgressMonitor(Monitor): - def __init__(self, owner, problem, callback): - self._owner = owner + def __init__(self, problem, callback, payload_builder): self._problem = problem self._callback = callback + self._payload_builder = payload_builder def config_history(self, history): history.requires(step=1, point=1, value=1) def __call__(self, history): - payload = self._owner._build_progress_payload( + payload = self._payload_builder( problem=self._problem, iteration=int(history.step[0]), point=np.asarray(history.point[0]), @@ -77,7 +75,7 @@ def __init__( self, obj, #: ObjBase, fit_function: Callable, - minimizer_enum: Optional[AvailableMinimizers] = None, + minimizer_enum: AvailableMinimizers | None = None, ): # todo after constraint changes, add type hint: obj: ObjBase # noqa: E501 """Initialize the fitting engine with a `ObjBase` and an arbitrary fitting function. @@ -107,14 +105,14 @@ def fit( x: np.ndarray, y: np.ndarray, weights: np.ndarray, - model: Optional[Callable] = None, - parameters: Optional[Parameter] = None, - method: Optional[str] = None, - tolerance: Optional[float] = None, - max_evaluations: Optional[int] = None, - progress_callback: Optional[Callable[[dict], Optional[bool]]] = None, - minimizer_kwargs: Optional[dict] = None, - engine_kwargs: Optional[dict] = None, + model: Callable | None = None, + parameters: List[Parameter] | None = None, + method: str | None = None, + tolerance: float | None = None, + max_evaluations: int | None = None, + progress_callback: Callable[[dict], bool | None] | None = None, + minimizer_kwargs: dict | None = None, + engine_kwargs: dict | None = None, **kwargs, ) -> FitResults: """Perform a fit using the BUMPS engine. @@ -159,8 +157,10 @@ def fit( minimizer_kwargs.update(engine_kwargs) if tolerance is not None: - minimizer_kwargs['ftol'] = tolerance - minimizer_kwargs['xtol'] = tolerance + minimizer_kwargs['ftol'] = tolerance # tolerance for change in function value + minimizer_kwargs['xtol'] = ( + tolerance # tolerance for change in parameter value, could be an independent value + ) if max_evaluations is not None: minimizer_kwargs['steps'] = max_evaluations @@ -179,14 +179,16 @@ def fit( step_counter = _StepCounterMonitor() monitors = [step_counter] if progress_callback is not None: - monitors.append(_BumpsProgressMonitor(self, problem, progress_callback)) + if not callable(progress_callback): + raise ValueError("progress_callback must be callable") + monitors.append( + _BumpsProgressMonitor(problem, progress_callback, self._build_progress_payload) + ) - mapper = SerialMapper.start_mapper(problem, []) driver = FitDriver( fitclass=fitclass, problem=problem, monitors=monitors, - mapper=mapper, **minimizer_kwargs, ) driver.clip() @@ -221,11 +223,9 @@ def _build_progress_payload( self, problem, iteration: int, point: np.ndarray, nllf: float ) -> dict: # Use the nllf already computed by the fitter to avoid a costly - # model re-evaluation. For Gaussian likelihoods: - # nllf = sum(residuals**2) / 2 => chi2 = 2 * nllf - chi2 = 2.0 * nllf - dof = problem.dof - reduced_chi2 = chi2 / dof if dof > 0 else chi2 + # model re-evaluation, and let BUMPS apply its own chisq scaling. + chi2 = float(problem.chisq(nllf=nllf, norm=False)) + reduced_chi2 = float(problem.chisq(nllf=nllf, norm=True)) parameter_values = self._current_parameter_snapshot(problem, point) @@ -247,7 +247,7 @@ def _current_parameter_snapshot(self, problem, point: np.ndarray) -> dict: snapshot[dict_name] = float(value) return snapshot - def convert_to_pars_obj(self, par_list: Optional[List] = None) -> List[BumpsParameter]: + def convert_to_pars_obj(self, par_list: List[Parameter] | None = None) -> List[BumpsParameter]: """Create a container with the `Parameters` converted from the base object. @@ -282,7 +282,7 @@ def convert_to_par_object(obj) -> BumpsParameter: fixed=obj.fixed, ) - def _make_model(self, parameters: Optional[List[BumpsParameter]] = None) -> Callable: + def _make_model(self, parameters: List[BumpsParameter] | None = None) -> Callable: """Generate a bumps model from the supplied `fit_function` and parameters in the base object. Note that this makes a callable as it needs to be initialized with *x*, *y*, *weights* @@ -334,9 +334,7 @@ def _set_parameter_fit_result( stderr = driver.stderr() if stack_status: - for name in pars.keys(): - pars[name].value = self._cached_pars_vals[name][0] - pars[name].error = self._cached_pars_vals[name][1] + self._restore_parameter_values() global_object.stack.enabled = True global_object.stack.beginMacro('Fitting routine') @@ -353,7 +351,7 @@ def _gen_fit_results( fx: float, driver: FitDriver, n_evaluations: int = 0, - max_evaluations: Optional[int] = None, + max_evaluations: int | None = None, **kwargs, ) -> FitResults: """Convert fit results into the unified `FitResults` format. diff --git a/src/easyscience/fitting/minimizers/minimizer_dfo.py b/src/easyscience/fitting/minimizers/minimizer_dfo.py index bd4124f3..6b35dedf 100644 --- a/src/easyscience/fitting/minimizers/minimizer_dfo.py +++ b/src/easyscience/fitting/minimizers/minimizer_dfo.py @@ -2,10 +2,11 @@ # SPDX-License-Identifier: BSD-3-Clause from dataclasses import dataclass +from numbers import Integral +import warnings from typing import Callable from typing import Dict from typing import List -from typing import Optional import dfols import numpy as np @@ -47,7 +48,7 @@ def __init__( self, obj, #: ObjBase, fit_function: Callable, - minimizer_enum: Optional[AvailableMinimizers] = None, + minimizer_enum: AvailableMinimizers | None = None, ): # todo after constraint changes, add type hint: obj: ObjBase # noqa: E501 """Initialize the fitting engine with a `ObjBase` and an arbitrary fitting function. @@ -75,13 +76,13 @@ def fit( x: np.ndarray, y: np.ndarray, weights: np.ndarray, - model: Optional[Callable] = None, - parameters: Optional[List[Parameter]] = None, - method: str = None, - tolerance: Optional[float] = None, - max_evaluations: Optional[int] = None, - progress_callback: Optional[Callable[[dict], Optional[bool]]] = None, - callback: Optional[Callable[[DFOCallbackState], None]] = None, + model: Callable | None = None, + parameters: List[Parameter] | None = None, + method: str | None = None, + tolerance: float | None = None, + max_evaluations: int | None = None, + progress_callback: Callable[[dict], bool | None] | None = None, + callback: Callable[[DFOCallbackState], None] | None = None, callback_every: int = 1, callback_on_improvement_only: bool = False, **kwargs, @@ -121,6 +122,9 @@ def fit( if (weights <= 0).any(): raise ValueError('Weights must be strictly positive and non-zero.') + if not isinstance(callback_every, Integral) or isinstance(callback_every, bool): + raise ValueError('callback_every must be a positive integer.') + if callback_every < 1: raise ValueError('callback_every must be a positive integer.') @@ -173,7 +177,7 @@ def fit( global_object.stack.enabled = stack_status return results - def convert_to_pars_obj(self, par_list: Optional[list] = None): + def convert_to_pars_obj(self, par_list: List[Parameter] | None = None): """Required by interface but not needed for DFO-LS.""" pass @@ -184,8 +188,8 @@ def convert_to_par_object(obj) -> None: def _make_model( self, - parameters: Optional[List[Parameter]] = None, - callback: Optional[Callable[[DFOCallbackState], None]] = None, + parameters: List[Parameter] | None = None, + callback: Callable[[DFOCallbackState], None] | None = None, callback_every: int = 1, callback_on_improvement_only: bool = False, ) -> Callable: @@ -226,7 +230,7 @@ def _residuals(pars_values: List[float]) -> np.ndarray: return _outer(self) def _get_callback_parameter_names( - self, parameters: Optional[List[Parameter]] = None + self, parameters: List[Parameter] | None = None ) -> list[str]: if parameters is not None: return [MINIMIZER_PARAMETER_PREFIX + parameter.unique_name for parameter in parameters] @@ -236,7 +240,7 @@ def _get_callback_parameter_names( def _wrap_model_with_callback( model: Callable, parameter_names: list[str], - callback: Optional[Callable[[DFOCallbackState], None]], + callback: Callable[[DFOCallbackState], None] | None, callback_every: int, callback_on_improvement_only: bool, ) -> Callable: @@ -288,7 +292,7 @@ def wrapped_model(pars_values: List[float]) -> np.ndarray: @staticmethod def _make_progress_adapter( - progress_callback: Callable[[dict], Optional[bool]], + progress_callback: Callable[[dict], bool | None], dof: int, ) -> Callable[['DFOCallbackState'], None]: """Create a DFO callback that translates DFOCallbackState into @@ -333,9 +337,7 @@ def _set_parameter_fit_result(self, fit_result, stack_status, ci: float = 0.95) pars = self._cached_pars if stack_status: - for name in pars.keys(): - pars[name].value = self._cached_pars_vals[name][0] - pars[name].error = self._cached_pars_vals[name][1] + self._restore_parameter_values() global_object.stack.enabled = True global_object.stack.beginMacro('Fitting routine') @@ -373,6 +375,9 @@ def _gen_fit_results(self, fit_results, weights, **kwargs) -> FitResults: results.y_err = weights results.n_evaluations = int(fit_results.nf) results.message = str(fit_results.msg) + if not results.success: + warning_message = results.message or 'DFO fit did not succeed.' + warnings.warn(warning_message, UserWarning, stacklevel=2) # results.residual = results.y_obs - results.y_calc # results.goodness_of_fit = fit_results.f @@ -419,8 +424,8 @@ def _dfo_fit( @staticmethod def _prepare_kwargs( - tolerance: Optional[float] = None, - max_evaluations: Optional[int] = None, + tolerance: float | None = None, + max_evaluations: int | None = None, **kwargs, ) -> dict[str:str]: if max_evaluations is not None: diff --git a/src/easyscience/fitting/minimizers/minimizer_lmfit.py b/src/easyscience/fitting/minimizers/minimizer_lmfit.py index acdf4ffe..dbaea071 100644 --- a/src/easyscience/fitting/minimizers/minimizer_lmfit.py +++ b/src/easyscience/fitting/minimizers/minimizer_lmfit.py @@ -3,7 +3,6 @@ from typing import Callable from typing import List -from typing import Optional import numpy as np from lmfit import Model as LMModel @@ -34,7 +33,7 @@ def __init__( self, obj, #: ObjBase, fit_function: Callable, - minimizer_enum: Optional[AvailableMinimizers] = None, + minimizer_enum: AvailableMinimizers | None = None, ): # todo after constraint changes, add type hint: obj: ObjBase # noqa: E501 """Initialize the minimizer with the `ObjBase` and the `fit_function` to be used. @@ -81,14 +80,14 @@ def fit( x: np.ndarray, y: np.ndarray, weights: np.ndarray = None, - model: Optional[LMModel] = None, - parameters: Optional[LMParameters] = None, - method: Optional[str] = None, - tolerance: Optional[float] = None, - max_evaluations: Optional[int] = None, - progress_callback: Optional[Callable[[dict], Optional[bool]]] = None, - minimizer_kwargs: Optional[dict] = None, - engine_kwargs: Optional[dict] = None, + model: LMModel | None = None, + parameters: LMParameters | None = None, + method: str | None = None, + tolerance: float | None = None, + max_evaluations: int | None = None, + progress_callback: Callable[[dict], bool | None] | None = None, + minimizer_kwargs: dict | None = None, + engine_kwargs: dict | None = None, **kwargs, ) -> FitResults: """Perform a fit using the lmfit engine. @@ -168,8 +167,8 @@ def fit( def _create_iter_callback( self, - progress_callback: Optional[Callable[[dict], Optional[bool]]], - ) -> Optional[Callable]: + progress_callback: Callable[[dict], bool | None] | None, + ) -> Callable | None: if progress_callback is None: return None @@ -218,7 +217,7 @@ def _get_fit_kws( minimizer_kwargs['tol'] = tolerance return minimizer_kwargs - def convert_to_pars_obj(self, parameters: Optional[List[Parameter]] = None) -> LMParameters: + def convert_to_pars_obj(self, parameters: List[Parameter] | None = None) -> LMParameters: """Create an lmfit compatible container with the `Parameters` converted from the base object. @@ -254,7 +253,7 @@ def convert_to_par_object(parameter: Parameter) -> LMParameter: brute_step=None, ) - def _make_model(self, pars: Optional[LMParameters] = None) -> LMModel: + def _make_model(self, pars: LMParameters | None = None) -> LMModel: """Generate a lmfit model from the supplied `fit_function` and parameters in the base object. @@ -304,9 +303,7 @@ def _set_parameter_fit_result(self, fit_result: ModelResult, stack_status: bool) pars = self._cached_pars if stack_status: - for name in pars.keys(): - pars[name].value = self._cached_pars_vals[name][0] - pars[name].error = self._cached_pars_vals[name][1] + self._restore_parameter_values() global_object.stack.enabled = True global_object.stack.beginMacro('Fitting routine') for name in pars.keys(): diff --git a/tests/unit/fitting/minimizers/test_minimizer_bumps.py b/tests/unit/fitting/minimizers/test_minimizer_bumps.py index cdda289b..2d811911 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_bumps.py +++ b/tests/unit/fitting/minimizers/test_minimizer_bumps.py @@ -61,12 +61,6 @@ def test_fit(self, minimizer: Bumps, monkeypatch) -> None: easyscience.fitting.minimizers.minimizer_bumps, 'FitDriver', mock_FitDriver ) - mock_SerialMapper = MagicMock() - mock_SerialMapper.start_mapper = MagicMock(return_value='mapper') - monkeypatch.setattr( - easyscience.fitting.minimizers.minimizer_bumps, 'SerialMapper', mock_SerialMapper - ) - # Prepare a mock parameter with .name = 'pmock_parm_1' mock_bumps_param = MagicMock() mock_bumps_param.name = 'pmock_parm_1' @@ -310,12 +304,6 @@ def test_fit_progress_callback(self, minimizer: Bumps, monkeypatch) -> None: easyscience.fitting.minimizers.minimizer_bumps, 'FitDriver', mock_FitDriver ) - mock_SerialMapper = MagicMock() - mock_SerialMapper.start_mapper = MagicMock(return_value='mapper') - monkeypatch.setattr( - easyscience.fitting.minimizers.minimizer_bumps, 'SerialMapper', mock_SerialMapper - ) - mock_bumps_param = MagicMock() mock_bumps_param.name = 'pmock_parm_1' mock_FitProblem_instance = MagicMock() @@ -348,16 +336,19 @@ def test_fit_progress_callback(self, minimizer: Bumps, monkeypatch) -> None: assert len(monitors) == 2 assert isinstance(monitors[0], _StepCounterMonitor) assert isinstance(monitors[1], _BumpsProgressMonitor) + assert monitors[1]._problem is mock_FitProblem_instance + assert monitors[1]._callback is progress_callback + assert monitors[1]._payload_builder == minimizer._build_progress_payload def test_build_progress_payload(self, minimizer: Bumps) -> None: # When mock_problem = MagicMock() - mock_problem.dof = 2 + mock_problem.chisq.side_effect = [25.0, 12.5] mock_problem.labels.return_value = ['palpha', 'pbeta'] mock_problem.getp.return_value = np.array([1.0, 2.0]) point = np.array([1.0, 2.0]) - nllf = 12.5 # chi2 = 2 * 12.5 = 25.0; reduced = 25.0 / 2 = 12.5 + nllf = 12.5 # Then payload = minimizer._build_progress_payload(mock_problem, 7, point, nllf) @@ -371,13 +362,15 @@ def test_build_progress_payload(self, minimizer: Bumps) -> None: 'refresh_plots': False, 'finished': False, } + mock_problem.chisq.assert_any_call(nllf=nllf, norm=False) + mock_problem.chisq.assert_any_call(nllf=nllf, norm=True) # setp should NOT be called – the monitor avoids model re-evaluation mock_problem.setp.assert_not_called() def test_build_progress_payload_keys_match_lmfit(self, minimizer: Bumps) -> None: # When mock_problem = MagicMock() - mock_problem.dof = 2 + mock_problem.chisq.side_effect = [10.0, 5.0] mock_problem.labels.return_value = ['pa'] mock_problem.getp.return_value = np.array([5.0]) @@ -404,9 +397,9 @@ def test_build_progress_payload_keys_match_lmfit(self, minimizer: Bumps) -> None assert payload['finished'] is False def test_build_progress_payload_reduced_chi2_positive_dof(self, minimizer: Bumps) -> None: - # When - dof = 2, nllf = 5.0 -> chi2 = 10.0 -> reduced = 5.0 + # When - use BUMPS chisq helpers for raw and normalized values mock_problem = MagicMock() - mock_problem.dof = 2 + mock_problem.chisq.side_effect = [10.0, 5.0] mock_problem.labels.return_value = ['pa'] mock_problem.getp.return_value = np.array([5.0]) @@ -416,8 +409,12 @@ def test_build_progress_payload_reduced_chi2_positive_dof(self, minimizer: Bumps payload = minimizer._build_progress_payload(mock_problem, 1, np.array([5.0]), nllf=5.0) # Expect - assert payload['chi2'] == 10.0 # 2 * nllf - assert payload['reduced_chi2'] == 5.0 # 10.0 / 2 + assert payload['chi2'] == 10.0 + assert payload['reduced_chi2'] == 5.0 + assert mock_problem.chisq.call_args_list == [ + (( ), {'nllf': 5.0, 'norm': False}), + (( ), {'nllf': 5.0, 'norm': True}), + ] def test_current_parameter_snapshot(self, minimizer: Bumps) -> None: # When @@ -436,9 +433,9 @@ def test_bumps_progress_monitor_calls_callback(self, minimizer: Bumps) -> None: # When callback = MagicMock(return_value=True) mock_problem = MagicMock() + payload_builder = MagicMock(return_value={'iteration': 1}) - monitor = _BumpsProgressMonitor(minimizer, mock_problem, callback) - minimizer._build_progress_payload = MagicMock(return_value={'iteration': 1}) + monitor = _BumpsProgressMonitor(mock_problem, callback, payload_builder) mock_history = MagicMock() mock_history.step = [5] @@ -450,7 +447,7 @@ def test_bumps_progress_monitor_calls_callback(self, minimizer: Bumps) -> None: # Expect callback.assert_called_once_with({'iteration': 1}) - minimizer._build_progress_payload.assert_called_once_with( + payload_builder.assert_called_once_with( problem=mock_problem, iteration=5, point=ANY, @@ -478,12 +475,6 @@ def test_fit_exception_restores_values(self, minimizer: Bumps, monkeypatch) -> N easyscience.fitting.minimizers.minimizer_bumps, 'FitDriver', mock_FitDriver ) - mock_SerialMapper = MagicMock() - mock_SerialMapper.start_mapper = MagicMock(return_value='mapper') - monkeypatch.setattr( - easyscience.fitting.minimizers.minimizer_bumps, 'SerialMapper', mock_SerialMapper - ) - mock_FitProblem_instance = MagicMock() mock_FitProblem_instance._parameters = [] mock_FitProblem = MagicMock(return_value=mock_FitProblem_instance) diff --git a/tests/unit/fitting/minimizers/test_minimizer_dfo.py b/tests/unit/fitting/minimizers/test_minimizer_dfo.py index 0842b895..66be2849 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_dfo.py +++ b/tests/unit/fitting/minimizers/test_minimizer_dfo.py @@ -248,14 +248,15 @@ def test_make_model_callback_every(self, minimizer: DFO) -> None: assert state.evaluation == 2 assert all(state.xk == np.array([1222, 2333])) - def test_fit_callback_every_must_be_positive(self, minimizer: DFO) -> None: + @pytest.mark.parametrize('callback_every', [0, 1.3]) + def test_fit_callback_every_must_be_positive(self, minimizer: DFO, callback_every) -> None: with pytest.raises(ValueError, match='callback_every must be a positive integer'): minimizer.fit( x=np.array([1.0]), y=np.array([1.0]), weights=np.array([1.0]), callback=MagicMock(), - callback_every=0, + callback_every=callback_every, ) def test_set_parameter_fit_result_no_stack_status(self, minimizer: DFO): @@ -367,7 +368,8 @@ def test_gen_fit_results_maxfun_warning_sets_success_false(self, minimizer: DFO, minimizer._p_0 = 'p_0' minimizer.evaluate = MagicMock(return_value='evaluate') - domain_fit_results = minimizer._gen_fit_results(mock_fit_result, 'weights') + with pytest.warns(UserWarning, match='Objective has been called MAXFUN times'): + domain_fit_results = minimizer._gen_fit_results(mock_fit_result, 'weights') assert domain_fit_results.success == False assert domain_fit_results.n_evaluations == 50 From 372de24a5c4a0e6980554014f79d0b67bc1765d4 Mon Sep 17 00:00:00 2001 From: rozyczko Date: Tue, 21 Apr 2026 14:44:27 +0200 Subject: [PATCH 10/11] ruff changes --- src/easyscience/fitting/minimizers/minimizer_bumps.py | 4 ++-- src/easyscience/fitting/minimizers/minimizer_dfo.py | 2 +- tests/unit/fitting/minimizers/test_minimizer_bumps.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/easyscience/fitting/minimizers/minimizer_bumps.py b/src/easyscience/fitting/minimizers/minimizer_bumps.py index 8f757e2b..7fdde0e8 100644 --- a/src/easyscience/fitting/minimizers/minimizer_bumps.py +++ b/src/easyscience/fitting/minimizers/minimizer_bumps.py @@ -157,7 +157,7 @@ def fit( minimizer_kwargs.update(engine_kwargs) if tolerance is not None: - minimizer_kwargs['ftol'] = tolerance # tolerance for change in function value + minimizer_kwargs['ftol'] = tolerance # tolerance for change in function value minimizer_kwargs['xtol'] = ( tolerance # tolerance for change in parameter value, could be an independent value ) @@ -180,7 +180,7 @@ def fit( monitors = [step_counter] if progress_callback is not None: if not callable(progress_callback): - raise ValueError("progress_callback must be callable") + raise ValueError('progress_callback must be callable') monitors.append( _BumpsProgressMonitor(problem, progress_callback, self._build_progress_payload) ) diff --git a/src/easyscience/fitting/minimizers/minimizer_dfo.py b/src/easyscience/fitting/minimizers/minimizer_dfo.py index 6b35dedf..968dca71 100644 --- a/src/easyscience/fitting/minimizers/minimizer_dfo.py +++ b/src/easyscience/fitting/minimizers/minimizer_dfo.py @@ -1,9 +1,9 @@ # SPDX-FileCopyrightText: 2026 EasyScience contributors # SPDX-License-Identifier: BSD-3-Clause +import warnings from dataclasses import dataclass from numbers import Integral -import warnings from typing import Callable from typing import Dict from typing import List diff --git a/tests/unit/fitting/minimizers/test_minimizer_bumps.py b/tests/unit/fitting/minimizers/test_minimizer_bumps.py index 2d811911..2515165d 100644 --- a/tests/unit/fitting/minimizers/test_minimizer_bumps.py +++ b/tests/unit/fitting/minimizers/test_minimizer_bumps.py @@ -412,8 +412,8 @@ def test_build_progress_payload_reduced_chi2_positive_dof(self, minimizer: Bumps assert payload['chi2'] == 10.0 assert payload['reduced_chi2'] == 5.0 assert mock_problem.chisq.call_args_list == [ - (( ), {'nllf': 5.0, 'norm': False}), - (( ), {'nllf': 5.0, 'norm': True}), + ((), {'nllf': 5.0, 'norm': False}), + ((), {'nllf': 5.0, 'norm': True}), ] def test_current_parameter_snapshot(self, minimizer: Bumps) -> None: From 4fc6d4bbc47fa20f9397a137fa64c8cc3e29a72b Mon Sep 17 00:00:00 2001 From: rozyczko Date: Tue, 21 Apr 2026 15:03:38 +0200 Subject: [PATCH 11/11] added unnecessary exception descriptor --- src/easyscience/fitting/minimizers/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/easyscience/fitting/minimizers/utils.py b/src/easyscience/fitting/minimizers/utils.py index c6d462fe..b44dab88 100644 --- a/src/easyscience/fitting/minimizers/utils.py +++ b/src/easyscience/fitting/minimizers/utils.py @@ -48,7 +48,7 @@ def __repr__(self) -> str: chi2_val = self.chi2 reduced_val = self.reduced_chi2 if not np.isfinite(chi2_val) or not np.isfinite(reduced_val): - raise ValueError + raise ValueError('Chi2 or reduced chi2 is not finite') chi2 = f'{chi2_val:.4g}' reduced = f'{reduced_val:.4g}' except Exception: