Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Making theory order local per dataset #99

Merged
merged 16 commits into from
Nov 29, 2024
Merged
1 change: 0 additions & 1 deletion src/smefit/analyze/pca.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@ def from_dict(cls, config):
config["data_path"],
config["datasets"],
config["coefficients"],
config["order"],
config["use_quad"],
config["use_theory_covmat"],
config["use_t0"],
Expand Down
1 change: 0 additions & 1 deletion src/smefit/chi2.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,6 @@ def __init__(self, run_card, n_replica):
run_card["data_path"],
run_card["datasets"],
run_card["coefficients"],
run_card["order"],
run_card["use_quad"],
run_card["use_theory_covmat"],
False,
Expand Down
1 change: 0 additions & 1 deletion src/smefit/fit_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,6 @@ def load_datasets(self):
self.config["data_path"],
self.config["datasets"],
self.config["coefficients"],
self.config["order"],
self.config["use_quad"],
self.config["use_theory_covmat"],
False, # t0 is not used here because in the report we look at the experimental chi2
Expand Down
7 changes: 2 additions & 5 deletions src/smefit/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -485,7 +485,6 @@ def load_datasets(
commondata_path,
datasets,
operators_to_keep,
order,
use_quad,
use_theory_covmat,
use_t0,
Expand All @@ -507,8 +506,6 @@ def load_datasets(
list of datasets to be loaded
operators_to_keep: list
list of operators for which corrections are loaded
order: "LO", "NLO"
EFT perturbative order
use_quad: bool
if True loads also |HO| corrections
use_theory_covmat: bool
Expand Down Expand Up @@ -544,11 +541,11 @@ def load_datasets(
else:
Loader.theory_path = pathlib.Path(commondata_path)

for sset in np.unique(datasets):
for sset in np.unique(list(datasets.keys())):
jacoterh marked this conversation as resolved.
Show resolved Hide resolved
dataset = Loader(
sset,
operators_to_keep,
order,
datasets[sset]["order"],
use_quad,
use_theory_covmat,
use_multiplicative_prescription,
Expand Down
1 change: 0 additions & 1 deletion src/smefit/optimize/analytic.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,6 @@ def from_dict(cls, config):
config["data_path"],
config["datasets"],
config["coefficients"],
config["order"],
False,
config["use_theory_covmat"],
config["use_t0"],
Expand Down
1 change: 0 additions & 1 deletion src/smefit/optimize/mc.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,6 @@ def from_dict(cls, config):
config["data_path"],
config["datasets"],
config["coefficients"],
config["order"],
config["use_quad"],
config["use_theory_covmat"],
config["use_t0"],
Expand Down
1 change: 0 additions & 1 deletion src/smefit/optimize/ultranest.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,6 @@ def from_dict(cls, config):
config["data_path"],
config["datasets"],
operators_to_keep,
config["order"],
config["use_quad"],
config["use_theory_covmat"],
config["use_t0"],
Expand Down
1 change: 0 additions & 1 deletion src/smefit/prefit/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ def __init__(self, config):
config["data_path"],
config["datasets"],
config["coefficients"],
config["order"],
config["use_quad"],
config["use_theory_covmat"],
config["use_t0"],
Expand Down
5 changes: 0 additions & 5 deletions src/smefit/projections/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ def __init__(
dataset_names,
projections_path,
coefficients,
order,
use_quad,
use_theory_covmat,
rot_to_fit_basis,
Expand All @@ -35,7 +34,6 @@ def __init__(
self.dataset_names = dataset_names
self.projections_path = projections_path
self.coefficients = coefficients
self.order = order
self.use_quad = use_quad
self.use_theory_covmat = use_theory_covmat
self.rot_to_fit_basis = rot_to_fit_basis
Expand All @@ -47,7 +45,6 @@ def __init__(
self.commondata_path,
self.dataset_names,
self.coefficients,
self.order,
self.use_quad,
self.use_theory_covmat,
self.use_t0,
Expand Down Expand Up @@ -85,7 +82,6 @@ def from_config(cls, projection_card):
dataset_names = projection_config["datasets"]

coefficients = projection_config.get("coefficients", [])
order = projection_config.get("order", "LO")
use_quad = projection_config.get("use_quad", False)
use_theory_covmat = projection_config.get("use_theory_covmat", True)
rot_to_fit_basis = projection_config.get("rot_to_fit_basis", None)
Expand All @@ -101,7 +97,6 @@ def from_config(cls, projection_card):
dataset_names,
projections_path,
coefficients,
order,
use_quad,
use_theory_covmat,
rot_to_fit_basis,
Expand Down
3 changes: 1 addition & 2 deletions tests/fake_results/fake_results.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,13 @@ data_path: ./tests/fake_data
theory_path: ./tests/fake_data

use_quad: False
order: NLO
use_theory_covmat: True


# Datasets to include
datasets:

- data_test5
data_test5: {"order": "NLO"}

# Coefficients to fit
coefficients:
Expand Down
6 changes: 2 additions & 4 deletions tests/test_fisher.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,9 +71,8 @@ def test_fisher():
operators_to_keep = np.array(["Op1", "Op2", "Op3"])
dataset = load_datasets(
commondata_path,
datasets=["data_test5"],
datasets={"data_test5": {"order": "NLO"}},
arossia94 marked this conversation as resolved.
Show resolved Hide resolved
operators_to_keep=operators_to_keep,
order="NLO",
use_quad=use_quad,
use_theory_covmat=True,
use_t0=False,
Expand All @@ -86,9 +85,8 @@ def test_fisher():
"result_path": None,
"result_ID": None,
"data_path": commondata_path,
"datasets": ["data_test5"],
"datasets": {"data_test5": {"order": "NLO"}},
"coefficients": coefficients_dict,
"order": "NLO",
"use_theory_covmat": True,
"theory_path": commondata_path,
"use_multiplicative_prescription": True,
Expand Down
9 changes: 5 additions & 4 deletions tests/test_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,15 +72,17 @@

def test_load_datasets():
operators_to_keep = np.array(["Op1", "Op2", "Op4"])
datasets = ["data_test1", "data_test2"]

for use_quad in [True, False]:
for order in ["LO", "NLO"]:
datasets = {
"data_test1": {"order": f"{order}"},
"data_test2": {"order": f"{order}"},
}
loaded_tuple = load_datasets(
commondata_path,
datasets=datasets,
operators_to_keep=operators_to_keep,
order=order,
use_quad=use_quad,
use_theory_covmat=True,
use_t0=False,
Expand Down Expand Up @@ -152,9 +154,8 @@ def test_operator_correction_sorted():
for order in ["LO", "NLO"]:
loaded_tuple = load_datasets(
commondata_path,
datasets=["data_test1"],
datasets={"data_test1": {"order": f"{order}"}},
operators_to_keep=operators_to_keep,
order=order,
use_quad=True,
use_theory_covmat=True,
use_t0=False,
Expand Down
16 changes: 9 additions & 7 deletions tests/test_optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,14 +170,13 @@
path_abs = pathlib.Path(__file__).parent.resolve()


datasets_no_corr = ["data_test1", "data_test2"]
datasets_no_corr = {"data_test1": {"order": "LO"}, "data_test2": {"order": "LO"}}
config_no_corr = {}
config_no_corr["data_path"] = commondata_path
config_no_corr["coefficients"] = coeffs_dict
config_no_corr["result_path"] = commondata_path
config_no_corr["result_ID"] = "test"
config_no_corr["datasets"] = datasets_no_corr
config_no_corr["order"] = "LO"
config_no_corr["use_quad"] = True
config_no_corr["use_theory_covmat"] = True
config_no_corr["use_t0"] = False
Expand Down Expand Up @@ -270,14 +269,13 @@
chi2_corr_t0_ext = chi2_corr_t0 + chi2_ext


datasets_corr = ["data_test3", "data_test4"]
datasets_corr = {"data_test3": {"order": "LO"}, "data_test4": {"order": "LO"}}
config_corr = {}
config_corr["data_path"] = commondata_path
config_corr["coefficients"] = coeffs_dict
config_corr["result_path"] = commondata_path
config_corr["result_ID"] = "test"
config_corr["datasets"] = datasets_corr
config_corr["order"] = "LO"
config_corr["use_quad"] = True
config_corr["use_theory_covmat"] = True
config_corr["theory_path"] = commondata_path
Expand Down Expand Up @@ -316,7 +314,8 @@ class TestOptimize_NS:
def test_init(self):
assert self.test_opt.results_path == commondata_path / "test"
np.testing.assert_equal(
self.test_opt.loaded_datasets.ExpNames, datasets_no_corr
self.test_opt.loaded_datasets.ExpNames,
np.unique(list(datasets_no_corr.keys())),
)
np.testing.assert_equal(
self.test_opt.coefficients.name, ["Op1", "Op2", "Op3", "Op4"]
Expand Down Expand Up @@ -388,7 +387,8 @@ class TestOptimize_MC:
def test_init(self):
assert self.test_opt.results_path == commondata_path / "test"
np.testing.assert_equal(
self.test_opt.loaded_datasets.ExpNames, datasets_no_corr
self.test_opt.loaded_datasets.ExpNames,
np.unique(list(datasets_no_corr.keys())),
)
np.testing.assert_equal(
self.test_opt.coefficients.name, ["Op1", "Op2", "Op3", "Op4"]
Expand Down Expand Up @@ -487,9 +487,11 @@ def test_from_dict(self):
opt.analytic.ALOptimizer.from_dict(config_quad)

def test_init(self):

assert self.test_opt.results_path == commondata_path / "test"
np.testing.assert_equal(
self.test_opt.loaded_datasets.ExpNames, datasets_no_corr
self.test_opt.loaded_datasets.ExpNames,
np.unique(list(datasets_no_corr.keys())),
arossia94 marked this conversation as resolved.
Show resolved Hide resolved
)
np.testing.assert_equal(
self.test_opt.coefficients.name, ["Op1", "Op2", "Op3", "Op4"]
Expand Down
6 changes: 2 additions & 4 deletions tests/test_pca.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,8 @@

dataset = load_datasets(
commondata_path,
datasets=["data_test5"],
datasets={"data_test5": {"order": "NLO"}},
operators_to_keep=operators_to_keep,
order="NLO",
use_quad=True,
use_theory_covmat=True,
use_t0=False,
Expand Down Expand Up @@ -176,9 +175,8 @@ def test_pca_id(self):
pca_coeffs = CoefficientManager.from_dict(pca_coeffs_dict)
rotated_datasets = load_datasets(
commondata_path,
datasets=["data_test5"],
datasets={"data_test5": {"order": "NLO"}},
operators_to_keep=["PC00", "PC01", "PC02", "Op3"],
order="NLO",
use_quad=True,
use_theory_covmat=True,
use_t0=False,
Expand Down
Loading