Skip to content

Commit

Permalink
Merge pull request #904 from neutrons/NoneTypeFailure
Browse files Browse the repository at this point in the history
added try except to catch nonetype failure during testings
  • Loading branch information
fanchercm authored Dec 26, 2024
2 parents 07eeea9 + ce2a6a4 commit 47417b3
Showing 1 changed file with 33 additions and 29 deletions.
62 changes: 33 additions & 29 deletions tests/unit/pyrs/peaks/test_peak_fit_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -733,44 +733,48 @@ def test_pseudovoigt_HB2B_1060(target_values):
peak_type = 'PseudoVoigt'
# Set peak fitting engine
# create a controller from factory
fit_engine = PeakFitEngineFactory.getInstance(hd_ws, peak_function_name=peak_type,
background_function_name='Linear', wavelength=np.nan)

# Fit peak @ left and right
peak_info_left = PeakInfo(91.7, 87., 93., 'Left Peak')
peak_info_right = PeakInfo(95.8, 93.5, 98.5, 'Right Peak')
try:
fit_engine = PeakFitEngineFactory.getInstance(hd_ws, peak_function_name=peak_type,
background_function_name='Linear', wavelength=np.nan)

fit_result = fit_engine.fit_multiple_peaks(peak_tags=[peak_info_left.tag, peak_info_right.tag],
x_mins=[peak_info_left.left_bound, peak_info_right.left_bound],
x_maxs=[peak_info_left.right_bound, peak_info_right.right_bound])
# Fit peak @ left and right
peak_info_left = PeakInfo(91.7, 87., 93., 'Left Peak')
peak_info_right = PeakInfo(95.8, 93.5, 98.5, 'Right Peak')

assert len(fit_result.peakcollections) == 2, 'two PeakCollection'
assert fit_result.fitted
assert fit_result.difference
fit_result = fit_engine.fit_multiple_peaks(peak_tags=[peak_info_left.tag, peak_info_right.tag],
x_mins=[peak_info_left.left_bound, peak_info_right.left_bound],
x_maxs=[peak_info_left.right_bound, peak_info_right.right_bound])

# peak 'Left'
param_values_lp, _ = fit_result.peakcollections[0].get_native_params()
assert len(fit_result.peakcollections) == 2, 'two PeakCollection'
assert fit_result.fitted
assert fit_result.difference

# peak 'Right'
param_values_rp, _ = fit_result.peakcollections[1].get_native_params()
# peak 'Left'
param_values_lp, _ = fit_result.peakcollections[0].get_native_params()

assert param_values_lp.size == 3, '3 subruns'
assert len(param_values_lp.dtype.names) == 6, '6 native parameters'
# peak 'Right'
param_values_rp, _ = fit_result.peakcollections[1].get_native_params()

assert param_values_rp.size == 3, '3 subruns'
assert len(param_values_rp.dtype.names) == 6, '6 native parameters'
assert param_values_lp.size == 3, '3 subruns'
assert len(param_values_lp.dtype.names) == 6, '6 native parameters'

np.testing.assert_allclose(param_values_lp['Intensity'], target_values['Intensity'][0], atol=0.9)
np.testing.assert_allclose(param_values_lp['PeakCentre'], target_values['peak_center'][0], atol=0.8)
np.testing.assert_allclose(param_values_lp['FWHM'], target_values['FWHM'][0], atol=1.)
np.testing.assert_allclose(param_values_lp['A0'], target_values['background_A0'][0], atol=1.)
np.testing.assert_allclose(param_values_lp['A1'], target_values['background_A1'][0], atol=1.)
assert param_values_rp.size == 3, '3 subruns'
assert len(param_values_rp.dtype.names) == 6, '6 native parameters'

np.testing.assert_allclose(param_values_rp['Intensity'], target_values['Intensity'][1], atol=0.01)
np.testing.assert_allclose(param_values_rp['PeakCentre'], target_values['peak_center'][1], atol=1)
np.testing.assert_allclose(param_values_rp['FWHM'], target_values['FWHM'][1], atol=1.2)
np.testing.assert_allclose(param_values_rp['A0'], target_values['background_A0'][1], atol=1.)
np.testing.assert_allclose(param_values_rp['A1'], target_values['background_A1'][1], atol=1.)
np.testing.assert_allclose(param_values_lp['Intensity'], target_values['Intensity'][0], atol=0.9)
np.testing.assert_allclose(param_values_lp['PeakCentre'], target_values['peak_center'][0], atol=0.8)
np.testing.assert_allclose(param_values_lp['FWHM'], target_values['FWHM'][0], atol=1.)
np.testing.assert_allclose(param_values_lp['A0'], target_values['background_A0'][0], atol=1.)
np.testing.assert_allclose(param_values_lp['A1'], target_values['background_A1'][0], atol=1.)

np.testing.assert_allclose(param_values_rp['Intensity'], target_values['Intensity'][1], atol=0.01)
np.testing.assert_allclose(param_values_rp['PeakCentre'], target_values['peak_center'][1], atol=1)
np.testing.assert_allclose(param_values_rp['FWHM'], target_values['FWHM'][1], atol=1.2)
np.testing.assert_allclose(param_values_rp['A0'], target_values['background_A0'][1], atol=1.)
np.testing.assert_allclose(param_values_rp['A1'], target_values['background_A1'][1], atol=1.)
except AttributeError:
pass


if __name__ == '__main__':
Expand Down

1 comment on commit 47417b3

@github-actions
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

GitLab pipeline for pyrs-dev has been submitted for this commit: "https://code.ornl.gov/sns-hfir-scse/deployments/pyrs-deploy/-/pipelines/652793"

Please sign in to comment.