From f6bc6f320ad9abecf93765713ec87e226affeb36 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Wed, 18 Nov 2020 16:04:08 -0500 Subject: [PATCH 01/13] use strategy 0 for user-provided gradient --- src/pyhf/optimize/opt_minuit.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/pyhf/optimize/opt_minuit.py b/src/pyhf/optimize/opt_minuit.py index 7d519887e3..36cda86d7a 100644 --- a/src/pyhf/optimize/opt_minuit.py +++ b/src/pyhf/optimize/opt_minuit.py @@ -87,17 +87,22 @@ def _minimize( Minimizer Options: maxiter (:obj:`int`): maximum number of iterations. Default is 100000. return_uncertainties (:obj:`bool`): Return uncertainties on the fitted parameters. Default is off. + strategy: (:obj:`int`): See :attr:`iminuit.Minuit.strategy`. Default is to configure based on `do_grad`. Returns: fitresult (scipy.optimize.OptimizeResult): the fit result """ maxiter = options.pop('maxiter', self.maxiter) return_uncertainties = options.pop('return_uncertainties', False) + # 0: Fast, user-provided gradient + # 1: Default, no user-provided gradient + strategy = options.pop('strategy', not(do_grad)) if options: raise exceptions.Unsupported( f"Unsupported options were passed in: {list(options.keys())}." ) + minimizer.strategy = strategy minimizer.migrad(ncall=maxiter) # Following lines below come from: # https://github.com/scikit-hep/iminuit/blob/22f6ed7146c1d1f3274309656d8c04461dde5ba3/src/iminuit/_minimize.py#L106-L125 From 80b683e840bc92132f6137d5f334ddddd5e223bd Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Wed, 18 Nov 2020 16:24:01 -0500 Subject: [PATCH 02/13] add test --- tests/test_optim.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/tests/test_optim.py b/tests/test_optim.py index ec64fda2a8..0bb1f8a08b 100644 --- a/tests/test_optim.py +++ b/tests/test_optim.py @@ -177,6 +177,25 @@ def test_minimize_do_grad_autoconfig(mocker, backend, backend_new): assert shim.call_args[1]['do_grad'] != pyhf.tensorlib.default_do_grad +def test_minuit_strategy_do_grad(mocker, backend): + """ + ref: gh#1172 + + When there is a user-provided gradient, check that one automatically sets + the minuit strategy=0. When there is no user-provided gradient, check that + one automatically sets the minuit strategy=1. + """ + pyhf.set_backend(pyhf.tensorlib, 'minuit') + spy = mocker.spy(pyhf.optimize.minuit_optimizer, '_minimize') + m = pyhf.simplemodels.hepdata_like([50.0], [100.0], [10.0]) + data = pyhf.tensorlib.astensor([125.0] + m.config.auxdata) + + do_grad = pyhf.tensorlib.default_do_grad + pyhf.infer.mle.fit(data, m) + assert spy.call_count == 1 + assert spy.spy_return.minuit.strategy == not do_grad + + @pytest.mark.parametrize( 'optimizer', [pyhf.optimize.scipy_optimizer, pyhf.optimize.minuit_optimizer], From 8cd994632e7a91a86b996f69273cb14ef3e29677 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Wed, 18 Nov 2020 16:25:00 -0500 Subject: [PATCH 03/13] add test --- tests/test_optim.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_optim.py b/tests/test_optim.py index 0bb1f8a08b..81314ccf0a 100644 --- a/tests/test_optim.py +++ b/tests/test_optim.py @@ -193,7 +193,7 @@ def test_minuit_strategy_do_grad(mocker, backend): do_grad = pyhf.tensorlib.default_do_grad pyhf.infer.mle.fit(data, m) assert spy.call_count == 1 - assert spy.spy_return.minuit.strategy == not do_grad + assert not spy.spy_return.minuit.strategy == do_grad @pytest.mark.parametrize( From 48ab33e3ea664e19530930b7ee8441b83b4c5f20 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 18 Nov 2020 21:32:08 +0000 Subject: [PATCH 04/13] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/pyhf/optimize/opt_minuit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyhf/optimize/opt_minuit.py b/src/pyhf/optimize/opt_minuit.py index 36cda86d7a..c5707940b0 100644 --- a/src/pyhf/optimize/opt_minuit.py +++ b/src/pyhf/optimize/opt_minuit.py @@ -96,7 +96,7 @@ def _minimize( return_uncertainties = options.pop('return_uncertainties', False) # 0: Fast, user-provided gradient # 1: Default, no user-provided gradient - strategy = options.pop('strategy', not(do_grad)) + strategy = options.pop('strategy', not (do_grad)) if options: raise exceptions.Unsupported( f"Unsupported options were passed in: {list(options.keys())}." From d89ac4a5aeb4f580ee7634ca737f11c4730e91e2 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Wed, 18 Nov 2020 16:48:54 -0500 Subject: [PATCH 05/13] fix up tests so slightly... --- tests/test_optim.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_optim.py b/tests/test_optim.py index 81314ccf0a..b09b6f4d76 100644 --- a/tests/test_optim.py +++ b/tests/test_optim.py @@ -93,7 +93,7 @@ def test_minimize(tensorlib, precision, optimizer, do_grad, do_stitch): 'no_grad-minuit-jax-64b': [0.5000493563528641, 1.0000043833614634], # do grad, minuit, 32b 'do_grad-minuit-pytorch-32b': [0.5017611384391785, 0.9997190237045288], - 'do_grad-minuit-tensorflow-32b': [0.501288652420044, 1.0000219345092773], + 'do_grad-minuit-tensorflow-32b': [0.5012885928153992, 1.0000673532485962], #'do_grad-minuit-jax-32b': [0.5029529333114624, 0.9991086721420288], 'do_grad-minuit-jax-32b': [0.5007095336914062, 0.9999282360076904], # do grad, minuit, 64b From 632fa6db066152964cc55f7ab1f011906a7cdf66 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Wed, 18 Nov 2020 17:00:05 -0500 Subject: [PATCH 06/13] remove parens --- src/pyhf/optimize/opt_minuit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyhf/optimize/opt_minuit.py b/src/pyhf/optimize/opt_minuit.py index c5707940b0..94e10f9cb5 100644 --- a/src/pyhf/optimize/opt_minuit.py +++ b/src/pyhf/optimize/opt_minuit.py @@ -96,7 +96,7 @@ def _minimize( return_uncertainties = options.pop('return_uncertainties', False) # 0: Fast, user-provided gradient # 1: Default, no user-provided gradient - strategy = options.pop('strategy', not (do_grad)) + strategy = options.pop('strategy', not do_grad) if options: raise exceptions.Unsupported( f"Unsupported options were passed in: {list(options.keys())}." From 9f24b5acbd5318b53cddee5e8d6158bb4fe8aed1 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Wed, 18 Nov 2020 17:09:09 -0500 Subject: [PATCH 07/13] Update src/pyhf/optimize/opt_minuit.py Co-authored-by: Matthew Feickert --- src/pyhf/optimize/opt_minuit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyhf/optimize/opt_minuit.py b/src/pyhf/optimize/opt_minuit.py index 94e10f9cb5..ff2f486f1e 100644 --- a/src/pyhf/optimize/opt_minuit.py +++ b/src/pyhf/optimize/opt_minuit.py @@ -87,7 +87,7 @@ def _minimize( Minimizer Options: maxiter (:obj:`int`): maximum number of iterations. Default is 100000. return_uncertainties (:obj:`bool`): Return uncertainties on the fitted parameters. Default is off. - strategy: (:obj:`int`): See :attr:`iminuit.Minuit.strategy`. Default is to configure based on `do_grad`. + strategy: (:obj:`int`): See :attr:`iminuit.Minuit.strategy`. Default is to configure in response to `do_grad`. Returns: fitresult (scipy.optimize.OptimizeResult): the fit result From fb7bc7fb0e3ad2ee8c6f529ca4ee4056fb655f97 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Wed, 18 Nov 2020 17:19:07 -0500 Subject: [PATCH 08/13] add the hesse call --- src/pyhf/optimize/opt_minuit.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/pyhf/optimize/opt_minuit.py b/src/pyhf/optimize/opt_minuit.py index ff2f486f1e..9df5085691 100644 --- a/src/pyhf/optimize/opt_minuit.py +++ b/src/pyhf/optimize/opt_minuit.py @@ -118,6 +118,8 @@ def _minimize( n = len(x0) hess_inv = default_backend.ones((n, n)) if minimizer.valid: + # Extra call to hesse() after migrad() is always needed for good error estimates. If you pass a user-provided gradient to MINUIT, convergence is faster. + minimizer.hesse() hess_inv = minimizer.np_covariance() unc = None From 4a162ef30efab991c554457b0393156461cd9583 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Wed, 18 Nov 2020 17:21:51 -0500 Subject: [PATCH 09/13] add the other tests to check configuration --- tests/test_optim.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/test_optim.py b/tests/test_optim.py index b09b6f4d76..3d2e978d78 100644 --- a/tests/test_optim.py +++ b/tests/test_optim.py @@ -195,6 +195,14 @@ def test_minuit_strategy_do_grad(mocker, backend): assert spy.call_count == 1 assert not spy.spy_return.minuit.strategy == do_grad + pyhf.infer.mle.fit(data, m, strategy=0) + assert spy.call_count == 2 + assert spy.spy_return.minuit.strategy == 0 + + pyhf.infer.mle.fit(data, m, strategy=1) + assert spy.call_count == 3 + assert spy.spy_return.minuit.strategy == 1 + @pytest.mark.parametrize( 'optimizer', From 9a2da5755fa4789e4c74d6875f405bdb7a698670 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Wed, 18 Nov 2020 17:47:54 -0500 Subject: [PATCH 10/13] better configuration --- src/pyhf/optimize/opt_minuit.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/pyhf/optimize/opt_minuit.py b/src/pyhf/optimize/opt_minuit.py index 9df5085691..755dca48e9 100644 --- a/src/pyhf/optimize/opt_minuit.py +++ b/src/pyhf/optimize/opt_minuit.py @@ -27,10 +27,12 @@ def __init__(self, *args, **kwargs): Args: errordef (:obj:`float`): See minuit docs. Default is 1.0. steps (:obj:`int`): Number of steps for the bounds. Default is 1000. + strategy (:obj:`int`): See :attr:`iminuit.Minuit.strategy`. Default is None. """ self.name = 'minuit' self.errordef = kwargs.pop('errordef', 1) self.steps = kwargs.pop('steps', 1000) + self.strategy = kwargs.pop('strategy', None) super().__init__(*args, **kwargs) def _get_minimizer( @@ -87,7 +89,7 @@ def _minimize( Minimizer Options: maxiter (:obj:`int`): maximum number of iterations. Default is 100000. return_uncertainties (:obj:`bool`): Return uncertainties on the fitted parameters. Default is off. - strategy: (:obj:`int`): See :attr:`iminuit.Minuit.strategy`. Default is to configure in response to `do_grad`. + strategy (:obj:`int`): See :attr:`iminuit.Minuit.strategy`. Default is to configure in response to `do_grad`. Returns: fitresult (scipy.optimize.OptimizeResult): the fit result @@ -96,7 +98,7 @@ def _minimize( return_uncertainties = options.pop('return_uncertainties', False) # 0: Fast, user-provided gradient # 1: Default, no user-provided gradient - strategy = options.pop('strategy', not do_grad) + strategy = options.pop('strategy', self.strategy if self.strategy else not do_grad) if options: raise exceptions.Unsupported( f"Unsupported options were passed in: {list(options.keys())}." From 25a55db66268879c1c34f7c3b3d945a845025e08 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 18 Nov 2020 22:48:21 +0000 Subject: [PATCH 11/13] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/pyhf/optimize/opt_minuit.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/pyhf/optimize/opt_minuit.py b/src/pyhf/optimize/opt_minuit.py index 755dca48e9..8a5f9f3934 100644 --- a/src/pyhf/optimize/opt_minuit.py +++ b/src/pyhf/optimize/opt_minuit.py @@ -98,7 +98,9 @@ def _minimize( return_uncertainties = options.pop('return_uncertainties', False) # 0: Fast, user-provided gradient # 1: Default, no user-provided gradient - strategy = options.pop('strategy', self.strategy if self.strategy else not do_grad) + strategy = options.pop( + 'strategy', self.strategy if self.strategy else not do_grad + ) if options: raise exceptions.Unsupported( f"Unsupported options were passed in: {list(options.keys())}." From 1fae4a1dadc89c3ed74b9a219412672c7c30b7a0 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Wed, 18 Nov 2020 17:53:48 -0500 Subject: [PATCH 12/13] more tests --- tests/test_optim.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/tests/test_optim.py b/tests/test_optim.py index 3d2e978d78..bdff96dc1b 100644 --- a/tests/test_optim.py +++ b/tests/test_optim.py @@ -204,6 +204,27 @@ def test_minuit_strategy_do_grad(mocker, backend): assert spy.spy_return.minuit.strategy == 1 +@pytest.mark.parametrize('strategy', [0, 1]) +def test_minuit_strategy_global(mocker, backend, strategy): + pyhf.set_backend(pyhf.tensorlib, pyhf.optimize.minuit_optimizer(strategy=strategy)) + spy = mocker.spy(pyhf.optimize.minuit_optimizer, '_minimize') + m = pyhf.simplemodels.hepdata_like([50.0], [100.0], [10.0]) + data = pyhf.tensorlib.astensor([125.0] + m.config.auxdata) + + do_grad = pyhf.tensorlib.default_do_grad + pyhf.infer.mle.fit(data, m) + assert spy.call_count == 1 + assert spy.spy_return.minuit.strategy == strategy if do_grad else 1 + + pyhf.infer.mle.fit(data, m, strategy=0) + assert spy.call_count == 2 + assert spy.spy_return.minuit.strategy == 0 + + pyhf.infer.mle.fit(data, m, strategy=1) + assert spy.call_count == 3 + assert spy.spy_return.minuit.strategy == 1 + + @pytest.mark.parametrize( 'optimizer', [pyhf.optimize.scipy_optimizer, pyhf.optimize.minuit_optimizer], From fef57e01713531aa0de9c5bead21dd23f4c36ed0 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Wed, 18 Nov 2020 18:01:21 -0500 Subject: [PATCH 13/13] fix slots --- src/pyhf/optimize/opt_minuit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyhf/optimize/opt_minuit.py b/src/pyhf/optimize/opt_minuit.py index 8a5f9f3934..20511e35aa 100644 --- a/src/pyhf/optimize/opt_minuit.py +++ b/src/pyhf/optimize/opt_minuit.py @@ -10,7 +10,7 @@ class minuit_optimizer(OptimizerMixin): Optimizer that uses iminuit.Minuit.migrad. """ - __slots__ = ['name', 'errordef', 'steps'] + __slots__ = ['name', 'errordef', 'steps', 'strategy'] def __init__(self, *args, **kwargs): """