Skip to content

Commit

Permalink
variance weighting bugfix need test
Browse files Browse the repository at this point in the history
  • Loading branch information
tjlane committed Oct 25, 2024
1 parent d71405a commit 12d3fe1
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 35 deletions.
33 changes: 13 additions & 20 deletions meteor/scale.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,6 @@ def _compute_anisotropic_scale_factors(
return anisotropic_scale_parameters[0] * np.exp(exponential_argument)


def _check_index_consistency(series1: rs.DataSeries, series2: rs.DataSeries) -> None:
if not series1.index.equals(series2.index):
msg = f"indices of `{series1.name}`, `{series2.name}` differ"
raise IndexError(msg)


def compute_scale_factors(
*,
reference_values: rs.DataSeries,
Expand Down Expand Up @@ -94,22 +88,21 @@ def compute_scale_factors(
[1] SCALEIT https://www.ccp4.ac.uk/html/scaleit.html
"""
common_miller_indices: pd.Index = reference_values.index.intersection(values_to_scale.index)

common_reference_values: np.ndarray = reference_values.loc[common_miller_indices].to_numpy()
common_values_to_scale: np.ndarray = values_to_scale.loc[common_miller_indices].to_numpy()

# if we are going to weight the scaling using the uncertainty values, then the weights will be
# inverse_sigma = 1 / sqrt{ sigmaA ** 2 + sigmaB ** 2 }
uncertainty_weights: np.ndarray
if (reference_uncertainties is not None) and (to_scale_uncertainties is not None):
_check_index_consistency(reference_values, reference_uncertainties)
_check_index_consistency(values_to_scale, to_scale_uncertainties)
uncertainty_weights = np.sqrt(
np.square(reference_uncertainties.loc[common_miller_indices].to_numpy())
+ np.square(to_scale_uncertainties.loc[common_miller_indices].to_numpy()),
)
else:
uncertainty_weights = np.array(1.0)
half_root_two = np.array(np.sqrt(2) / 2.0) # weights are one if no uncertainties provided
ref_variance: np.ndarray = (
np.square(reference_uncertainties.loc[common_miller_indices].to_numpy())
if reference_uncertainties is not None
else half_root_two
)
to_scale_variance: np.ndarray = (
to_scale_uncertainties.loc[common_miller_indices].to_numpy()
if to_scale_uncertainties is not None
else half_root_two
)
inverse_variance = 1.0 / (ref_variance + to_scale_variance)

def compute_residuals(scaling_parameters: ScaleParameters) -> np.ndarray:
scale_factors = _compute_anisotropic_scale_factors(
Expand All @@ -118,7 +111,7 @@ def compute_residuals(scaling_parameters: ScaleParameters) -> np.ndarray:
)

difference_after_scaling = scale_factors * common_values_to_scale - common_reference_values
residuals = uncertainty_weights * difference_after_scaling
residuals = inverse_variance * difference_after_scaling

if not isinstance(residuals, np.ndarray):
msg = "scipy optimizers' behavior is unstable unless `np.ndarray`s are used"
Expand Down
15 changes: 0 additions & 15 deletions test/unit/test_scale.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,21 +76,6 @@ def test_compute_scale_factors_anisotropic(miller_dataseries: rs.DataSeries) ->
np.testing.assert_array_almost_equal(scale_factors, miller_dataseries.values)


def test_check_index_consistency(miller_dataseries: rs.DataSeries) -> None:
scale._check_index_consistency(miller_dataseries, miller_dataseries)

modified_series = miller_dataseries.copy()
modified_series.index = pd.MultiIndex.from_tuples(
[
(-100, -100, -100),
]
* 5,
names=["H", "K", "L"],
)
with pytest.raises(IndexError):
scale._check_index_consistency(modified_series, miller_dataseries)


@pytest.mark.parametrize("use_uncertainties", [False, True])
def test_scale_maps(random_difference_map: Map, use_uncertainties: bool) -> None:
multiple = 2.0
Expand Down

0 comments on commit 12d3fe1

Please sign in to comment.