Skip to content

emulator

multiprobe_framework.emulator

PrintBestValLossCallback

Bases: Callback

Logs the best (lowest) validation loss so far every log_frequency epochs.

Writes output to stderr.

Source code in src/multiprobe_framework/emulator.py
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
class PrintBestValLossCallback(tf.keras.callbacks.Callback):
    """
    Logs the best (lowest) validation loss so far every `log_frequency` epochs.

    Writes output to stderr.
    """

    def __init__(self, log_frequency=50):
        super().__init__()
        self.log_frequency = log_frequency
        self.best_val_loss = float("inf")

    def on_epoch_end(self, epoch, logs=None):
        if logs is None:
            logs = {}
        val_loss = logs.get("val_loss")
        if val_loss is not None:
            # Update best_val_loss if current val_loss is lower
            if val_loss < self.best_val_loss:
                self.best_val_loss = val_loss

            # Every `log_frequency` epochs, print to stderr
            if (epoch + 1) % self.log_frequency == 0:
                print(
                    f"Epoch {epoch + 1}: best val_loss so far = {self.best_val_loss}",
                    file=sys.stderr,
                )

sample_from_hypercube(lhc, prior_ranges, distributions, cov_file=None, mu_file=None)

Samples from a hypercube using a Latin Hypercube design and a given prior distribution.

The prior distribution can be uniform, Gaussian, or multivariate Gaussian.

Parameters:

Name Type Description Default
lhc

Latin Hypercube input from scipy.stats

required
prior_ranges

List of tuples containing the prior ranges for each parameter, either the limits for a uniform distribution or the mean and standard deviation for a Gaussian distribution

required
distributions

The corresponding list of prior distribution to sample from (uniform, Gaussian, or multivariate Gaussian)

required
cov_file

The file containing the covariance matrix for the multivariate Gaussian distribution

None
mu_file

The file containing the mean vector for the multivariate Gaussian distribution

None

Returns:

Type Description

The sampled values from the hypercube

Source code in src/multiprobe_framework/emulator.py
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
def sample_from_hypercube(
    lhc, prior_ranges, distributions, cov_file=None, mu_file=None
):
    """
    Samples from a hypercube using a Latin Hypercube design and a given prior
    distribution.

    The prior distribution can be uniform, Gaussian, or multivariate Gaussian.

    :param lhc: Latin Hypercube input from scipy.stats
    :param prior_ranges: List of tuples containing the prior ranges for each
        parameter, either the limits for a uniform distribution
        or the mean and standard deviation for a Gaussian distribution
    :param distributions: The corresponding list of prior distribution to
        sample from (uniform, Gaussian, or multivariate Gaussian)
    :param cov_file: The file containing the covariance matrix for the
        multivariate Gaussian distribution
    :param mu_file: The file containing the mean vector for the multivariate
        Gaussian distribution

    :return: The sampled values from the hypercube
    """
    num_samples, num_params = lhc.shape
    sampled_values = np.zeros((num_samples, num_params))

    # loop over the parameters and prescribe the correct prior

    for i in range(num_params):
        dist = distributions[i]
        if dist == "flat":
            lower_bound, upper_bound = prior_ranges[i]
            parameter_range = upper_bound - lower_bound
            sampled_values[:, i] = (lhc[:, i] * parameter_range) + lower_bound

        elif dist == "gaussian":
            mu, sigma = prior_ranges[i]
            sampled_values[:, i] = norm.ppf(lhc[:, i], loc=mu, scale=sigma)

        else:
            LOGGER.error(
                "Invalid distribution type. Please choose from 'flat' or 'gaussian'."
            )

    return sampled_values

remove_nan_rows(y_train, x_train)

Remove rows from data_array that contain any NaN values and remove corresponding rows from x_train.

Parameters: - data_array: 2D numpy array of shape (200000, 10185) - x_train: 2D numpy array of shape (200000, 47)

Returns: - Cleaned data_array and x_train with rows containing NaN values removed.

Source code in src/multiprobe_framework/emulator.py
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
def remove_nan_rows(y_train, x_train):
    """
    Remove rows from data_array that contain any NaN values and remove
    corresponding rows from x_train.

    Parameters:
    - data_array: 2D numpy array of shape (200000, 10185)
    - x_train: 2D numpy array of shape (200000, 47)

    Returns:
    - Cleaned data_array and x_train with rows containing NaN values removed.
    """

    nan_row_indices = np.where(np.isnan(y_train).any(axis=1))[0]
    cleaned_data_array = np.delete(y_train, nan_row_indices, axis=0)
    cleaned_x_train = np.delete(x_train, nan_row_indices, axis=0)

    print(
        f"Removed {len(nan_row_indices)} rows with NaN values from the training data."
    )

    return cleaned_data_array, cleaned_x_train

remove_zero_rows(y_train, x_train)

Remove rows from data_array that contain any 0 values and remove corresponding rows from x_train.

Parameters: - data_array: 2D numpy array of shape (200000, 10185) - x_train: 2D numpy array of shape (200000, 47)

Returns: - Cleaned data_array and x_train with rows containing 0 values removed.

Source code in src/multiprobe_framework/emulator.py
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
def remove_zero_rows(y_train, x_train):
    """
    Remove rows from data_array that contain any 0 values and remove
    corresponding rows from x_train.

    Parameters:
    - data_array: 2D numpy array of shape (200000, 10185)
    - x_train: 2D numpy array of shape (200000, 47)

    Returns:
    - Cleaned data_array and x_train with rows containing 0 values removed.
    """
    # zero row indices are where an entire row is equal to 0
    zero_row_indices = np.where(np.all(y_train == 0, axis=1))[0]
    cleaned_data_array = np.delete(y_train, zero_row_indices, axis=0)
    cleaned_x_train = np.delete(x_train, zero_row_indices, axis=0)

    print(
        f"Removed {len(zero_row_indices)} rows "
        f"with all 0 values from the training data."
    )

    return cleaned_data_array, cleaned_x_train

extract_g_indices(spectrum_name)

Extract 'g' indices from the spectrum name.

Returns a list of 'g' indices or an empty list if none are found.

Source code in src/multiprobe_framework/emulator.py
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
def extract_g_indices(spectrum_name):
    """
    Extract 'g' indices from the spectrum name.

    Returns a list of 'g' indices or an empty list if none are found.
    """
    # Check if the spectrum name contains an underscore '_'
    if "_" in spectrum_name:
        prefix, indices_str = spectrum_name.split("_", 1)
        # Split indices_str into individual digits
        indices = [int(digit) for digit in indices_str]
    else:
        prefix = spectrum_name
        indices = []

    # Determine positions of 'g's in the prefix
    g_positions = [pos for pos, char in enumerate(prefix) if char == "g"]

    # If there are no 'g's, return an empty list
    if not g_positions:
        return []

    # Extract corresponding indices
    g_indices = []
    for i in range(len(g_positions)):
        if i < len(indices):
            g_indices.append(indices[i])
        else:
            # If there are fewer indices than 'g's, raise an error
            # or handle appropriately.
            print(f"Warning: Not enough indices for 'g's in spectrum '{spectrum_name}'")
            break
    return g_indices

prepare_inference(spectra, experiments, emu_path, ufalcon_database, ufalcon_database_2, data_vector_database, indices_wl, indices_lss, indices_lss_desi, setup_dict, planck_plus_act=False, free_amplitudes=False, cmb_lensing_cutoff=1, minimizer=False, is_jax=True)

Prepare the inference for the given spectra and experiments.

Parameters:

Name Type Description Default
spectra

The list of spectra to be used for the inference

required
experiments

The list of experiments to be used for the inference

required
emu_path

The path to the directory storing the emulators

required
ufalcon_database

The path to the ufalcon database

required
data_vector_database

The path to the data vector database

required
indices_wl

The indices for the weak lensing spectra

required
indices_lss

The indices for the large scale structure spectra

required
indices_lss_desi

The indices for the large scale structure spectra for DESI emulator

required

Returns:

Type Description

The list of emulator models and additional arguments for the inference

Source code in src/multiprobe_framework/emulator.py
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
def prepare_inference(
    spectra,
    experiments,
    emu_path,
    ufalcon_database,
    ufalcon_database_2,
    data_vector_database,
    indices_wl,
    indices_lss,
    indices_lss_desi,
    setup_dict,
    planck_plus_act=False,
    free_amplitudes=False,
    cmb_lensing_cutoff=1,
    minimizer=False,
    is_jax=True,
):
    """
    Prepare the inference for the given spectra and experiments.

    :param spectra: The list of spectra to be used for the inference
    :param experiments: The list of experiments to be used for the inference
    :param emu_path: The path to the directory storing the emulators
    :param ufalcon_database: The path to the ufalcon database
    :param data_vector_database: The path to the data vector database
    :param indices_wl: The indices for the weak lensing spectra
    :param indices_lss: The indices for the large scale structure spectra
    :param indices_lss_desi: The indices for the large scale structure spectra
                             for DESI emulator

    :return: The list of emulator models and additional arguments for the
             inference
    """

    (
        tt_emu,
        te_emu,
        ee_emu,
        kk_emu,
        wl_emu,
        lss_no_wl_emu,
        bao_emu,
        sne_emu,
        act_desi_emu,
        bao_dr2_emu,
    ) = load_emulators(emu_path, is_jax)

    wl_spectra = set(setup_dict["wl"])
    lss_no_wl_spectra = set(setup_dict["lss_no_wl"])
    act_desi_spectra = set(setup_dict["cmb_lensing_act_cross_desi"])

    emu_model_list = []
    LOGGER.info(
        "Preparing inference for spectra: %s and experiments: %s", spectra, experiments
    )

    if "tt" in spectra:
        emu_model_list.append([tt_emu, te_emu, ee_emu])

        # Add extra models for lollipop pr4 evaluation.
        if "lollipop" in experiments:
            LOGGER.info("Adding lollipop pr4 models")
            emu_model_list.append([tt_emu, te_emu, ee_emu])

        # Add extra models for wmap lowl evaluation.
        if "wmap" in experiments:
            LOGGER.info("Adding wmap lowl models")
            emu_model_list.append([tt_emu, te_emu, ee_emu])

        if "planck_2018_for_act" in experiments:
            LOGGER.info("Adding planck 2018 lowell (wmap-like selection) for act")
            emu_model_list.append([tt_emu, te_emu, ee_emu])

        if "planck_2018_for_act_dr6" in experiments:
            LOGGER.info("Adding planck 2018 lowell (wmap-like selection) for act")
            emu_model_list.append([tt_emu, te_emu, ee_emu])

        if planck_plus_act:
            LOGGER.info("Adding planck full ell plus act")
            emu_model_list.append([tt_emu, te_emu, ee_emu])

        if (
            ("act_dr4" in experiments)
            and ("planck_2018_highell_lowell" in experiments)
            and not planck_plus_act
        ):
            LOGGER.info("act plus planck with lowl ee")
            emu_model_list.append([tt_emu, te_emu, ee_emu])

        if ("act_dr6" in experiments) and ("hillipop" in experiments):
            LOGGER.info("Planck PR4 + ACT DR6")
            emu_model_list.append([tt_emu, te_emu, ee_emu])

        if "planck_sroll2_lowell_ee" in experiments:
            LOGGER.info(
                "adding sroll2 lowl ee - "
                "make sure this is last in your CMB experiments list!"
            )
            emu_model_list.append([ee_emu])

    wl_alone = False
    lss_no_wl_alone = False
    wl_and_lss = False
    act_lensing_cross = False
    act_lensing_cross_with_kids = False

    # Some logic to deal with kk case- whether to include by itself or
    # integrated with lss or in 'kaka' in spectra.

    if "kk" in spectra or "kaka" in spectra:
        if len(spectra) == 4 or len(spectra) == 1:
            emu_model_list.append([kk_emu])
            print("yes kk alone")

        else:
            if (
                (set(spectra) & wl_spectra)
                and not (set(spectra) & lss_no_wl_spectra)
                and not (set(spectra) & act_desi_spectra)
            ):
                emu_model_list.append([wl_emu, kk_emu])
                wl_alone = True
                print("yes wl alone")

            if (set(spectra) & lss_no_wl_spectra) and not (set(spectra) & wl_spectra):
                emu_model_list.append([lss_no_wl_emu, kk_emu])
                lss_no_wl_alone = True
                print("yes lss alone")

            if (set(spectra) & lss_no_wl_spectra) and (set(spectra) & wl_spectra):
                emu_model_list.append([wl_emu, lss_no_wl_emu, kk_emu])
                wl_and_lss = True
                print("yes wl and lss")

            if set(spectra) & act_desi_spectra:
                if "gg_11" not in spectra:
                    emu_model_list.append([act_desi_emu, kk_emu])
                    act_lensing_cross = True
                    print("yes act_desi and kk")
                    act_lensing_cross_with_kids = False
                else:
                    print("kids and act desi")
                    act_lensing_cross = True
                    act_lensing_cross_with_kids = True
                    emu_model_list.append([act_desi_emu, wl_emu, kk_emu])

    else:
        if (set(spectra) & wl_spectra) and not (set(spectra) & lss_no_wl_spectra):
            emu_model_list.append([wl_emu])
            wl_alone = True

        if (set(spectra) & lss_no_wl_spectra) and not (set(spectra) & wl_spectra):
            emu_model_list.append([lss_no_wl_emu])
            lss_no_wl_alone = True

        if (set(spectra) & lss_no_wl_spectra) and (set(spectra) & wl_spectra):
            emu_model_list.append([wl_emu, lss_no_wl_emu])
            wl_and_lss = True

        if set(spectra) & act_desi_spectra:
            emu_model_list.append([act_desi_emu])
            act_lensing_cross = True
            print("yes act_desi alone")

    # if DESI lrgs in spectra need to add the indices for the shot noise marginalization
    if (
        "dd_11" in spectra
        or "dd_22" in spectra
        or "dd_33" in spectra
        or "dd_44" in spectra
    ):
        indices_shot = [
            [np.arange(5), np.arange(5, 10), np.arange(10, 15), np.arange(15, 20)]
        ]
    else:
        indices_shot = [[], [], [], [], [], [], [], []]  # No shot noise marginalization

    if act_lensing_cross_with_kids:
        indices_shot = [
            [np.arange(5), np.arange(5, 10), np.arange(10, 15), np.arange(15, 20)],
            [],
        ]

    indices_list_lss = []
    indices_list_wl = []
    indices_list_desi = []

    # Initialize lists for each mg_i (1 to 5)
    if free_amplitudes:
        indices_mg_linear_wl = [[] for _ in range(5)]  # For mg_1 to mg_5 linear terms
        indices_mg_squared_wl = [[] for _ in range(5)]  # For mg_1 to mg_5 squared terms
        indices_mg_linear_lss = [[] for _ in range(5)]
        indices_list_k_cross = []
        indices_list_t_cross = []
    else:
        indices_list_mg = None
        indices_list_k_cross = None
        indices_list_t_cross = None

    if act_lensing_cross:
        indices_list_ka_cross = []
    else:
        indices_list_ka_cross = None

    for spec in spectra:
        if spec not in ["tt", "te", "ee", "kk", "kaka"]:
            if spec.startswith("g"):
                indices_list_wl.extend(
                    np.arange(
                        indices_wl[f"{spec}_low"], indices_wl[f"{spec}_high"]
                    ).tolist()
                )
            elif set(spectra) & act_desi_spectra:
                indices_list_desi.extend(
                    np.arange(
                        indices_lss_desi[f"{spec}_low"],
                        indices_lss_desi[f"{spec}_high"],
                    ).tolist()
                )
            elif set(spectra) & lss_no_wl_spectra:
                indices_list_lss.extend(
                    np.arange(
                        indices_lss[f"{spec}_low"], indices_lss[f"{spec}_high"]
                    ).tolist()
                )
            else:
                raise ValueError(
                    f"Spectrum {spec} not found in any of the emulators - "
                    "check the spectra and experiments"
                )

        if free_amplitudes:
            if spec not in ["tt", "te", "ee", "kk"]:
                if spec.startswith("k") and not spec.startswith("ka"):
                    indices_list_k_cross.extend(
                        np.arange(
                            indices_lss[f"{spec}_low"], indices_lss[f"{spec}_high"]
                        ).tolist()
                    )

                if "t" in spec:
                    if indices_lss_desi[f"{spec}_low"] is not None:
                        indices_list_t_cross.extend(
                            np.arange(
                                indices_lss_desi[f"{spec}_low"],
                                indices_lss_desi[f"{spec}_high"],
                            ).tolist()
                        )
                    else:
                        indices_list_t_cross.extend(
                            np.arange(
                                indices_lss[f"{spec}_low"], indices_lss[f"{spec}_high"]
                            ).tolist()
                        )
                # Decide which emulator indices to use

                if f"{spec}_low" in indices_wl:
                    indices_dict = indices_wl
                    indices_mg_linear = indices_mg_linear_wl
                    indices_mg_squared = indices_mg_squared_wl
                elif f"{spec}_low" in indices_lss:
                    indices_dict = indices_lss
                    indices_mg_linear = indices_mg_linear_lss
                else:
                    # If the spectrum is not found, skip it or handle the error
                    print(f"Spectrum {spec} not found in emulator indices.")
                    continue

                # Get the indices range for this spectrum
                indices_range = get_indices_range(spec, indices_dict)
                if not indices_range:
                    continue  # Skip if indices not found

                # Extract 'g' indices
                g_indices = extract_g_indices(spec)
                # Count occurrences of each 'g' index
                g_counter = Counter(g_indices)

                # Add indices to the appropriate lists
                for g_idx, count in g_counter.items():
                    if 1 <= g_idx <= 5:
                        idx = g_idx - 1  # Adjust for zero-based indexing
                        if count == 1:
                            indices_mg_linear[idx].extend(indices_range)
                        elif count == 2:
                            indices_mg_squared[idx].extend(indices_range)
                        else:
                            print(
                                f"Unexpected count {count} "
                                f"for g index {g_idx} in {spec}"
                            )
                    else:
                        print(f"g index {g_idx} out of range in {spec}")

        if act_lensing_cross:
            if "ka" in spec:
                if spec not in ["kaka"]:
                    indices_list_ka_cross.extend(
                        np.arange(
                            indices_lss_desi[f"{spec}_low"],
                            indices_lss_desi[f"{spec}_high"],
                        ).tolist()
                    )

            print("indices list ka cross", indices_list_ka_cross)

    if act_lensing_cross:
        indices_list_ka_cross = [indices_list_ka_cross]
    if act_lensing_cross_with_kids:
        indices_list_ka_cross = [indices_list_ka_cross, []]

    print("indices ka_cross", indices_list_ka_cross)

    indices_list_mg = None
    indices_list = None
    if wl_alone:
        indices_list = [indices_list_wl]
        if free_amplitudes:
            print("indices list mg linear", indices_mg_linear_wl)
            print("indices list mg squared", indices_mg_squared_wl)
            indices_list_mg = [[indices_mg_linear_wl, indices_mg_squared_wl]]
            indices_list_k_cross = [[]]
            indices_list_t_cross = [[]]

    if lss_no_wl_alone:
        indices_list = [indices_list_lss]
        if free_amplitudes:
            indices_list_mg = [[indices_mg_linear_lss, [[] for _ in range(5)]]]
            indices_list_k_cross = [indices_list_k_cross]
            indices_list_t_cross = [indices_list_t_cross]

    if wl_and_lss:
        indices_list = [indices_list_wl, indices_list_lss]
        if free_amplitudes:
            indices_list_mg = [
                [indices_mg_linear_wl, indices_mg_squared_wl],
                [indices_mg_linear_lss, [[] for _ in range(5)]],
            ]
            indices_list_k_cross = [[], indices_list_k_cross]
            indices_list_t_cross = [[], indices_list_t_cross]
            print("indices_list_mg", indices_list_mg)
            print("indices_list_k_cross", indices_list_k_cross)
            print("indices_list_t_cross", indices_list_t_cross)

    if act_lensing_cross:
        indices_list = [indices_list_desi]
        print("setting indices list k cross to none for act lensing cross")
        indices_list_k_cross = None
        indices_list_t_cross = [indices_list_t_cross]

    if act_lensing_cross_with_kids:
        indices_list = [indices_list_desi, indices_list_wl]
        indices_list_k_cross = None
        indices_list_t_cross = [indices_list_t_cross]

    if free_amplitudes:
        apply_act_temperature_rescaling = True
        apply_hillipop_pol_rescaling = True
        apply_hillipop_temperature_rescaling = True
        apply_kappa_rescaling = True
        apply_planck_pol_rescaling = True
        apply_planck_temperature_rescaling = True
        apply_wmap_temperature_rescaling = True
        apply_wmap_polarization_rescaling = True

    else:
        apply_act_temperature_rescaling = False
        apply_hillipop_pol_rescaling = False
        apply_hillipop_temperature_rescaling = False
        apply_kappa_rescaling = False
        apply_planck_pol_rescaling = False
        apply_planck_temperature_rescaling = False
        apply_wmap_temperature_rescaling = False
        apply_wmap_polarization_rescaling = False

    # finally if you want to include BAO data add this emulator in at the last
    # position of the list

    if "desi_bao_2024" in experiments:
        emu_model_list.append([bao_emu])

    if "desi_bao_dr2" in experiments:
        emu_model_list.append([bao_dr2_emu])

    if "pantheon_plus" in experiments:
        # assert that this is the last member of the list
        try:
            assert experiments[-1] == "pantheon_plus"
        except AssertionError:
            LOGGER.error("pantheon_plus must be the last experiment in the list")
            raise
        emu_model_list.append([sne_emu])

    print("emulator list", emu_model_list)

    additional_args = {
        "ufalcon_database": ufalcon_database,
        "ufalcon_database_2": ufalcon_database_2,
        "data_vector_database": data_vector_database,
        "indices_list": indices_list,
        "bump_vector_list": None,
        "spectra": [spec for spec in spectra if spec not in ["tt", "te", "ee"]],
        "add_lowell": True,  # planck lowl added
        "add_lowell_tt": True,  # hillipop lowl added
        "lens_exp": False,
        "integrated_model": True,
        "add_kk_official": True,
        "cut_cmblensing_indices": True,
        "cmb_lensing_cutoff": cmb_lensing_cutoff,  # NEW ADDITION- be careful with this
        "indices_list_mg": indices_list_mg,
        "apply_act_temperature_rescaling": apply_act_temperature_rescaling,
        "apply_planck_pol_rescaling": apply_planck_pol_rescaling,
        "apply_hillipop_pol_rescaling": apply_hillipop_pol_rescaling,
        "apply_hillipop_temperature_rescaling": apply_hillipop_temperature_rescaling,
        "apply_kappa_rescaling": apply_kappa_rescaling,
        "apply_planck_temperature_rescaling": apply_planck_temperature_rescaling,
        "apply_wmap_temperature_rescaling": apply_wmap_temperature_rescaling,
        "apply_wmap_polarization_rescaling": apply_wmap_polarization_rescaling,
        "indices_list_k_cross": indices_list_k_cross,
        "indices_list_t_cross": indices_list_t_cross,
        "mc_norm_file": os.path.join(
            os.path.dirname(__file__),
            "../combined_probes_data/survey_data/mc_norm_mean_dict_ka_desi.pkl",
        ),
        "indices_shot": indices_shot,
        "indices_list_ka_cross": indices_list_ka_cross,
    }

    if "tt" not in spectra:
        additional_args["cmb_marginalized"] = True

    if minimizer:
        additional_args["adam_steps"] = 8000

    return emu_model_list, additional_args

load_emulators(emu_path, is_jax=False)

Load the emulators for the weak lensing and large scale structure spectra.

Parameters:

Name Type Description Default
emu_path

The path to the directory storing the emulators

required

Returns:

Type Description

The emulators, if they are not found then an empty emulator object is returned

Source code in src/multiprobe_framework/emulator.py
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
def load_emulators(emu_path, is_jax=False):
    """
    Load the emulators for the weak lensing and large scale structure spectra.

    :param emu_path: The path to the directory storing the emulators

    :return: The emulators, if they are not found then an empty emulator object
             is returned
    """
    if is_jax:
        from UPanda import integrated_model_jax as integrated_model
    else:
        from UPanda import integrated_model
    LOGGER.info("Loading emulators")

    lss_no_wl_emu = integrated_model.IntegratedModel(None, None, None, None)
    try:
        lss_no_wl_emu.restore(os.path.join(emu_path, "lss_no_wl_emu"))
    except Exception as e:
        LOGGER.warning("No lss_no_wl emulator found or loading issue (see below)")
        LOGGER.warning(e)

    wl_emu = integrated_model.IntegratedModel(None, None, None, None)
    try:
        wl_emu.restore(os.path.join(emu_path, "wl_emu"))
    except Exception as e:
        LOGGER.warning("No wl emulator found or loading issue (see below)")
        LOGGER.warning(e)

    tt_emu = integrated_model.IntegratedModel(None, None, None, None)
    try:
        tt_emu.restore(os.path.join(emu_path, "tt_emu"))
    except Exception as e:
        LOGGER.warning("No tt emulator found or loading issue (see below)")
        LOGGER.warning(e)

    te_emu = integrated_model.IntegratedModel(None, None, None, None)
    try:
        te_emu.restore(os.path.join(emu_path, "te_emu"))
    except Exception as e:
        LOGGER.warning("No te emulator found or loading issue (see below)")
        LOGGER.warning(e)

    ee_emu = integrated_model.IntegratedModel(None, None, None, None)
    try:
        ee_emu.restore(os.path.join(emu_path, "ee_emu"))
    except Exception as e:
        LOGGER.warning("No ee emulator found or loading issue (see below)")
        LOGGER.warning(e)

    kk_emu = integrated_model.IntegratedModel(None, None, None, None)
    try:
        kk_emu.restore(os.path.join(emu_path, "kk_emu"))
    except Exception as e:
        LOGGER.warning("No kk emulator found or loading issue (see below)")
        LOGGER.warning(e)

    bao_emu = integrated_model.IntegratedModel(None, None, None, None)
    try:
        bao_emu.restore(os.path.join(emu_path, "bao_emu"))
    except Exception as e:
        LOGGER.warning("No bao emulator found or loading issue (see below)")
        LOGGER.warning(e)

    sne_emu = integrated_model.IntegratedModel(None, None, None, None)
    try:
        sne_emu.restore(os.path.join(emu_path, "sne_emu"))
    except Exception as e:
        LOGGER.warning("No sne emulator found or loading issue (see below)")
        LOGGER.warning(e)

    act_desi_emu = integrated_model.IntegratedModel(None, None, None, None)
    try:
        act_desi_emu.restore(os.path.join(emu_path, "act_desi_emu"))
    except Exception as e:
        LOGGER.warning("No act_desi emulator found or loading issue (see below)")
        LOGGER.warning(e)

    bao_dr2_emu = integrated_model.IntegratedModel(None, None, None, None)
    try:
        bao_dr2_emu.restore(os.path.join(emu_path, "bao_dr2_emu"))
    except Exception as e:
        LOGGER.warning("No bao_dr2 emulator found or loading issue (see below)")
        LOGGER.warning(e)

    return (
        tt_emu,
        te_emu,
        ee_emu,
        kk_emu,
        wl_emu,
        lss_no_wl_emu,
        bao_emu,
        sne_emu,
        act_desi_emu,
        bao_dr2_emu,
    )