Skip to content

covariance

multiprobe_framework.covariance

create_nmt_fields(specind_1, specind_2, mask_1, mask_2, beam, w_beam_I=None, lmax=-1, maps=[None, None])

A function to create the NaMaster fields for a given pair of spectra. This function will create the fields for the given pair of spectra and return them to be used in the covariance calculation.

Parameters:

Name Type Description Default
specind_1

the first spectrum index to be used in the covariance matrix

required
specind_2

the second spectrum index to be used in the covariance matrix

required
mask_1

the mask for the first spectrum index

required
mask_2

the mask for the second spectrum index

required
beam

the beam for the spectra (usually just the pixel window function)

required
w_beam_I

the beam for the Tempature field if specified

None
lmax

maximum multipole for the NMT field object

-1

Returns:

Type Description

f1, f2: the NaMaster fields for the given pair of spectra

Source code in src/multiprobe_framework/covariance.py
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
def create_nmt_fields(
    specind_1,
    specind_2,
    mask_1,
    mask_2,
    beam,
    w_beam_I=None,
    lmax=-1,
    maps=[None, None],
):
    """
    A function to create the NaMaster fields for a given pair of spectra. This
    function will create the fields for the given pair of spectra and return
    them to be used in the covariance calculation.

    :param specind_1: the first spectrum index to be used in the covariance matrix
    :param specind_2: the second spectrum index to be used in the covariance matrix
    :param mask_1: the mask for the first spectrum index
    :param mask_2: the mask for the second spectrum index
    :param beam: the beam for the spectra (usually just the pixel window function)
    :param w_beam_I: the beam for the Tempature field if specified
    :param lmax: maximum multipole for the NMT field object

    :return: f1, f2: the NaMaster fields for the given pair of spectra
    """

    utils.check_specind_exists(specind_1)
    utils.check_specind_exists(specind_2)

    LOGGER.info(f"Beam for the fields: {beam}")

    if lmax > 0:
        beam[0] = beam[0][: lmax + 1]
        beam[2] = beam[2][: lmax + 1]

    if specind_1 not in ["g1", "g2", "g3", "g4", "g5"] and specind_2 not in [
        "g1",
        "g2",
        "g3",
        "g4",
        "g5",
    ]:
        if (specind_1 == "t") & (maps[0] is None):
            w_beam_shape = w_beam_I.shape[0]
            pixel_beam_shape = len(beam[0])

            # pad the beam with zeros to the correct shape
            if w_beam_shape < pixel_beam_shape:
                w_beam_I = np.pad(
                    w_beam_I, (0, len(beam[0]) - w_beam_shape), "constant"
                )
                beam_1 = beam[0] * w_beam_I if w_beam_I is not None else beam[0]

            else:
                beam_1 = beam[0] * w_beam_I[:pixel_beam_shape]

            # ensure the beam is the correct siz
            if lmax > 0:
                beam_1 = beam_1[: lmax + 1]
            f1 = nmt.NmtField(mask_1, maps=maps[0], spin=0, beam=beam_1, lmax=lmax)
        else:
            f1 = nmt.NmtField(mask_1, maps=maps[0], spin=0, beam=beam[0], lmax=lmax)

        if (specind_2 == "t") & (maps[0] is None):
            w_beam_shape = w_beam_I.shape[0]
            pixel_beam_shape = len(beam[0])
            # pad the beam with zeros to the correct shape
            if w_beam_shape < pixel_beam_shape:
                w_beam_I = np.pad(
                    w_beam_I, (0, len(beam[0]) - w_beam_shape), "constant"
                )
                beam_2 = beam[0] * w_beam_I if w_beam_I is not None else beam[0]
            else:
                beam_2 = beam[0] * w_beam_I[:pixel_beam_shape]

            if lmax > 0:
                beam_2 = beam_2[: lmax + 1]
            f2 = nmt.NmtField(mask_2, maps=maps[1], spin=0, beam=beam_2, lmax=lmax)
        else:
            f2 = nmt.NmtField(mask_2, maps=maps[1], spin=0, beam=beam[0], lmax=lmax)

    elif specind_1 in ["g1", "g2", "g3", "g4", "g5"] and specind_2 in [
        "g1",
        "g2",
        "g3",
        "g4",
        "g5",
    ]:
        f1 = nmt.NmtField(mask_1, maps=maps[0], spin=2, beam=beam[2], lmax=lmax)
        f2 = nmt.NmtField(mask_2, maps=maps[1], spin=2, beam=beam[2], lmax=lmax)

    elif specind_2 in ["g1", "g2", "g3", "g4", "g5"]:  # finally the mixed case
        if (specind_1 == "t") & (maps[0] is None):
            w_beam_shape = w_beam_I.shape[0]
            # pad the beam with zeros to the correct shape
            w_beam_I = np.pad(w_beam_I, (0, len(beam[0]) - w_beam_shape), "constant")
            beam_1 = beam[0] * w_beam_I if w_beam_I is not None else beam[0]
            if lmax > 0:
                beam_1 = beam_1[: lmax + 1]
            f1 = nmt.NmtField(mask_1, maps=maps[0], spin=0, beam=beam_1, lmax=lmax)
        else:
            f1 = nmt.NmtField(mask_1, maps=maps[0], spin=0, beam=beam[0], lmax=lmax)

        f2 = nmt.NmtField(mask_2, maps=maps[1], spin=2, beam=beam[2], lmax=lmax)

    else:
        raise ValueError("Invalid spectral index")

    return f1, f2

return_bpws_covweight(ells, ellmin, ellmax, n_bin, method='log', bin_edges=None)

A function to return a set of NaMaster bandpowers with logarithmic spacing for a given range of ells. Again care required for not having too many lowl bins! Here we additionally weight by 2l+1 to provide a cosmic variance weighted estimate

Parameters:

Name Type Description Default
ells

the ells to be binned (should start at lmin and go to lmax)

required
n_bin

the number of bins desired

required
linear

do you actually want linear binning instead of default log bins? NB: This uses an old scheme which needs updating: be careful about which ells exactly are being included in these bins! Note the current function is set-up for tht the ellmin is included in the first bin but ellmax is outside of the last bin!

required

Returns:

Type Description

A set of bandpowers for use in PyMaster and associated weights

Source code in src/multiprobe_framework/covariance.py
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
def return_bpws_covweight(ells, ellmin, ellmax, n_bin, method="log", bin_edges=None):
    """
    A function to return a set of NaMaster bandpowers with logarithmic spacing
    for a given range of ells. Again care required for not having too many lowl
    bins! Here we additionally weight by 2l+1 to provide a cosmic variance
    weighted estimate

    :param ells: the ells to be binned (should start at lmin and go to lmax)
    :param n_bin: the number of bins desired
    :param linear:
        do you actually want linear binning instead of default log bins?
        NB: This uses an old scheme which needs updating:
        be careful about which ells exactly are being included in these bins!

    Note the current function is set-up for tht the ellmin is included in the
    first bin but ellmax is outside of the last bin!

    :return: A set of bandpowers for use in PyMaster and associated weights
    """

    if bin_edges is not None:
        LOGGER.info("using user-supplied bin_edges!")
        bpws = np.full(len(ells), fill_value=-1, dtype=int)

        weights = 2 * ells + 1
        # assume uniform weights
        for i in range(n_bin):
            M = np.logical_and(bin_edges[i] <= ells, ells < bin_edges[i + 1])
            bpws[M] = i

    else:
        if method == "log":
            # Logarithmic bins and avoid rounding error by rounding explicitly.
            # The -.001 offset ensures that the first ell will be included
            # in the fist bin.
            print("ellmin", ellmin)
            print("ellmax", ellmax)

            log_bins = np.geomspace(ellmin - 0.001, ellmax, n_bin + 1, endpoint=True)
            print("log bins", log_bins)

            bpws = np.full(len(ells), fill_value=-1, dtype=int)

            weights = 2 * ells + 1  # weight by the number of independent m-modes
            # assume uniform weights

            ellmin = np.amin(ells)
            for i in range(n_bin):
                M = np.logical_and(log_bins[i] <= ells, ells < log_bins[i + 1])
                bpws[M] = i

        elif method == "linear":
            lin_bins = np.linspace(ellmin - 0.001, ellmax, num=n_bin + 1)

            bpws = np.full(len(ells), fill_value=-1, dtype=int)

            weights = 2 * ells + 1
            # assume uniform weights
            for i in range(n_bin):
                M = np.logical_and(lin_bins[i] <= ells, ells < lin_bins[i + 1])
                bpws[M] = i

        else:
            raise ValueError("method must be either log or linear")

    return bpws, weights

compute_covariance_term(spec, specind_1, specind_2, theta_dict, lmax_noise, lmax_cw, cw, w, setup_dict, pymaster_workspace_path, noise_dict, path_desi=None)

A function to compute the covariance term for a given pair of spectra.

This is a wrapper around the NaMaster gaussian_covariance function which computes the covariance matrix for a given set of bandpowers.

Parameters:

Name Type Description Default
specind_1

the first spectrum to be used in the covariance matrix

required
specind_2

the second spectrum to be used in the covariance matrix

required
theta_nmt

the cosmological parameters to be used in the computation

required
lmax_noise

the maximum ell to be used in the noise power spectrum

required
lmax_cw

the maximum ell to be used in the coupling coefficients

required
cw

the coupling coefficients workspace

required
w

the workspace for the mask

required
setup_dict

the dictionary containing the setup information for the spectra

required
pymaster_workspace_path

the path to the PyMaster workspace

required
noise_dict

the dictionary containing the noise power spectra

required
Source code in src/multiprobe_framework/covariance.py
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
def compute_covariance_term(
    spec,
    specind_1,
    specind_2,
    theta_dict,
    lmax_noise,
    lmax_cw,
    cw,
    w,
    setup_dict,
    pymaster_workspace_path,
    noise_dict,
    path_desi=None,
):
    """
    A function to compute the covariance term for a given pair of spectra.

    This is a wrapper around the NaMaster gaussian_covariance function which
    computes the covariance matrix for a given set of bandpowers.

    :param specind_1: the first spectrum to be used in the covariance matrix
    :param specind_2: the second spectrum to be used in the covariance matrix
    :param theta_nmt: the cosmological parameters to be used in the computation
    :param lmax_noise: the maximum ell to be used in the noise power spectrum
    :param lmax_cw: the maximum ell to be used in the coupling coefficients
    :param cw: the coupling coefficients workspace
    :param w: the workspace for the mask
    :param setup_dict: the dictionary containing the setup information for the spectra
    :param pymaster_workspace_path: the path to the PyMaster workspace
    :param noise_dict: the dictionary containing the noise power spectra
    """

    # get the spin
    if specind_1 not in ["g1", "g2", "g3", "g4", "g5"] and specind_2 not in [
        "g1",
        "g2",
        "g3",
        "g4",
        "g5",
    ]:
        spin = [0, 0]
    elif specind_1 in ["g1", "g2", "g3", "g4", "g5"] and specind_2 in [
        "g1",
        "g2",
        "g3",
        "g4",
        "g5",
    ]:
        spin = [2, 2]
    else:
        spin = [0, 2]

    numb = re.findall(r"\d+", spec)

    if specind_1 == specind_2:  # Auto correlation
        if specind_1[0] != "g":
            spectra_cov = [f"{spec}"]

        else:
            spectra_cov = [f"gg_{numb[0][0]}{numb[0][0]}"]

        classy_theory_nmt_dict, cl_full_dict = theory_code.get_theory_cls(
            theta_dict,
            spectra=spectra_cov,
            path2wsp=pymaster_workspace_path,
            path_desi=path_desi,
            # ellmax=lmax_noise,
        )

        # Add the correct noise terms
        cl_00 = (
            cl_full_dict[f"{spec}"][0:lmax_noise] + noise_dict[specind_1][0:lmax_noise]
        )

        if spin[0] == 0:
            # 0x0 auto correlation case
            covar_term = nmt.gaussian_covariance(
                cw,
                spin[0],
                spin[0],
                spin[0],
                spin[0],
                [cl_00[0 : lmax_cw + 1]],  # TT
                [cl_00[0 : lmax_cw + 1]],
                [cl_00[0 : lmax_cw + 1]],
                [cl_00[0 : lmax_cw + 1]],
                w,
                wb=w,
            )

        else:
            # 2x2 auto correlation case
            cl_bb = 0 * cl_00
            cl_eb = 0 * cl_00
            covar_term = nmt.gaussian_covariance(
                cw,
                spin[0],
                spin[0],
                spin[0],
                spin[0],  # Spins of the 4 fields
                [
                    cl_00[0 : lmax_cw + 1],
                    cl_eb[0 : lmax_cw + 1],
                    cl_eb[0 : lmax_cw + 1],
                    cl_bb[0 : lmax_cw + 1],
                ],  # TT
                [
                    cl_00[0 : lmax_cw + 1],
                    cl_eb[0 : lmax_cw + 1],
                    cl_eb[0 : lmax_cw + 1],
                    cl_bb[0 : lmax_cw + 1],
                ],
                [
                    cl_00[0 : lmax_cw + 1],
                    cl_eb[0 : lmax_cw + 1],
                    cl_eb[0 : lmax_cw + 1],
                    cl_bb[0 : lmax_cw + 1],
                ],
                [
                    cl_00[0 : lmax_cw + 1],
                    cl_eb[0 : lmax_cw + 1],
                    cl_eb[0 : lmax_cw + 1],
                    cl_bb[0 : lmax_cw + 1],
                ],
                w,
                wb=w,
            ).reshape([setup_dict[f"{spec}_n_bin"], 4, setup_dict[f"{spec}_n_bin"], 4])
            covar_term = covar_term[
                :, 0, :, 0
            ]  # select the correct part of the covariance matrix

    else:  # cross-correlation so we need more terms
        spectra_cov = [spec]
        # Now add the required auto_correlations

        if specind_1[0] not in ["g", "d"]:
            spectra_cov += [f"{specind_1}{specind_1}"]
        else:
            spectra_cov += [f"{specind_1[0]}{specind_1[0]}_{numb[0][0]}{numb[0][0]}"]

        if specind_2[0] not in ["g", "d"]:
            spectra_cov += [f"{specind_2}{specind_2}"]

        else:
            if specind_1[0] not in ["g", "d"]:
                spectra_cov += [
                    f"{specind_2[0]}{specind_2[0]}_{numb[0][0]}{numb[0][0]}"
                ]

            else:  # gg-gg xcorr
                spectra_cov += [
                    f"{specind_1[0]}{specind_1[0]}_{numb[0][1]}{numb[0][1]}"
                ]

        # Firstly compute a theoretical Cl
        classy_theory_nmt_dict, cl_full_dict = theory_code.get_theory_cls(
            theta_dict,
            spectra=spectra_cov,
            path2wsp=pymaster_workspace_path,
            path_desi=path_desi,
        )

        cl_dict = {}
        for i, spec_cov in enumerate(spectra_cov):
            cl_spec = cl_full_dict[f"{spec_cov}"][0:lmax_noise]

            if spec_cov == "tt":
                cl_spec = cl_spec * 1e-12

            if i == 0:
                cl_dict[i] = cl_spec[0:lmax_noise]

            elif i == 1:
                cl_dict[i] = cl_spec[0:lmax_noise] + noise_dict[specind_1][0:lmax_noise]

            elif i == 2:
                cl_dict[i] = cl_spec[0:lmax_noise] + noise_dict[specind_2][0:lmax_noise]

        if spin[1] == 0:
            # 0X0 case
            covar_term = nmt.gaussian_covariance(
                cw,
                spin[0],
                spin[0],
                spin[1],
                spin[1],  # Spins of the 4 fields
                [cl_dict[1][0 : lmax_cw + 1]],
                [cl_dict[0][0 : lmax_cw + 1]],
                [cl_dict[0][0 : lmax_cw + 1]],
                [cl_dict[2][0 : lmax_cw + 1]],
                w,
                wb=w,
            )

        else:
            if spin[0] == spin[1]:
                # 2X2 case
                cl_eb = 0 * cl_dict[0]
                cl_bb = 0 * cl_dict[0]
                covar_term = nmt.gaussian_covariance(
                    cw,
                    spin[0],
                    spin[1],
                    spin[0],
                    spin[1],  # Spins of the 4 fields
                    [
                        cl_dict[1][0 : lmax_cw + 1],
                        cl_eb[0 : lmax_cw + 1],
                        cl_eb[0 : lmax_cw + 1],
                        cl_bb[0 : lmax_cw + 1],
                    ],
                    [
                        cl_dict[0][0 : lmax_cw + 1],
                        cl_eb[0 : lmax_cw + 1],
                        cl_eb[0 : lmax_cw + 1],
                        cl_bb[0 : lmax_cw + 1],
                    ],
                    [
                        cl_dict[0][0 : lmax_cw + 1],
                        cl_eb[0 : lmax_cw + 1],
                        cl_eb[0 : lmax_cw + 1],
                        cl_bb[0 : lmax_cw + 1],
                    ],
                    [
                        cl_dict[2][0 : lmax_cw + 1],
                        cl_eb[0 : lmax_cw + 1],
                        cl_eb[0 : lmax_cw + 1],
                        cl_bb[0 : lmax_cw + 1],
                    ],
                    w,
                    wb=w,
                ).reshape(
                    [setup_dict[f"{spec}_n_bin"], 4, setup_dict[f"{spec}_n_bin"], 4]
                )

                covar_term = covar_term[
                    :, 0, :, 0
                ]  # select the correct part of the covariance matrix

            else:
                # 0X2 case
                cl_tb = 0 * cl_dict[0]
                cl_eb = 0 * cl_dict[0]
                cl_bb = 0 * cl_dict[0]
                covar_term = nmt.gaussian_covariance(
                    cw,
                    spin[0],
                    spin[1],
                    spin[0],
                    spin[1],  # Spins of the 4 fields
                    [cl_dict[1][0 : lmax_cw + 1]],
                    [cl_dict[0][0 : lmax_cw + 1], cl_tb[0 : lmax_cw + 1]],
                    [cl_dict[0][0 : lmax_cw + 1], cl_tb[0 : lmax_cw + 1]],
                    [
                        cl_dict[2][0 : lmax_cw + 1],
                        cl_eb[0 : lmax_cw + 1],
                        cl_eb[0 : lmax_cw + 1],
                        cl_bb[0 : lmax_cw + 1],
                    ],
                    w,
                    wb=w,
                ).reshape(
                    [setup_dict[f"{spec}_n_bin"], 2, setup_dict[f"{spec}_n_bin"], 2]
                )

                covar_term = covar_term[
                    :, 0, :, 0
                ]  # select the correct part of the covariance matrix

    return covar_term

kappa_to_gamma(kappa_map, lmax=None)

Transform a kappa map to a gamma map.

This is done following the Kaiser-Squires procedure nicely explained in https://arxiv.org/abs/1703.09233.

Parameters:

Name Type Description Default
kappa_map

the input kappa map

required
lmax

the maximum ell to be used in the transformation

None

Returns:

Type Description

the gamma maps

Source code in src/multiprobe_framework/covariance.py
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
def kappa_to_gamma(kappa_map, lmax=None):
    """
    Transform a kappa map to a gamma map.

    This is done following the Kaiser-Squires procedure nicely explained in
    https://arxiv.org/abs/1703.09233.

    :param kappa_map: the input kappa map
    :param lmax: the maximum ell to be used in the transformation

    :return: the gamma maps
    """

    nside = hp.npix2nside(kappa_map.size)

    if lmax is None:
        lmax = 3 * nside - 1

    # kappa_alm = hp.map2alm(kappa_map, lmax=lmax)
    kappa_alm = hp.map2alm(kappa_map)
    ell = hp.Alm.getlm(lmax)[0]

    # Add the appropriate factor to the kappa_alm
    fac = np.where(
        np.logical_and(ell != 1, ell != 0),
        np.sqrt(((ell + 2.0) * (ell - 1)) / ((ell + 1) * ell)),
        0,
    )
    kappa_alm *= fac
    t, q, u = hp.alm2map(
        [np.zeros_like(kappa_alm), kappa_alm, np.zeros_like(kappa_alm)], nside=nside
    )
    LOGGER.info(f"are there any nans in the gamma maps? {np.isnan(q).any()}")
    return q, u

load_ufalcon_maps(spectra, path_maps, mask_paths_dict, index, path_boss, path_kids, path_cmb, path_act, noise, masked, nside, noise_alone, scale, class_reduced_cls, modulo_200_index, run_no, nbar_g_dict=None, path_maps_desi=None, use_grf=False)

A function to load the UFALCON maps and masks for a given set of spectra.

This function will load the maps and if desired the masks and noise realizations for a given set of spectra. It will return two dictionaries containing the maps and masks for each spectrum.

Source code in src/multiprobe_framework/covariance.py
 524
 525
 526
 527
 528
 529
 530
 531
 532
 533
 534
 535
 536
 537
 538
 539
 540
 541
 542
 543
 544
 545
 546
 547
 548
 549
 550
 551
 552
 553
 554
 555
 556
 557
 558
 559
 560
 561
 562
 563
 564
 565
 566
 567
 568
 569
 570
 571
 572
 573
 574
 575
 576
 577
 578
 579
 580
 581
 582
 583
 584
 585
 586
 587
 588
 589
 590
 591
 592
 593
 594
 595
 596
 597
 598
 599
 600
 601
 602
 603
 604
 605
 606
 607
 608
 609
 610
 611
 612
 613
 614
 615
 616
 617
 618
 619
 620
 621
 622
 623
 624
 625
 626
 627
 628
 629
 630
 631
 632
 633
 634
 635
 636
 637
 638
 639
 640
 641
 642
 643
 644
 645
 646
 647
 648
 649
 650
 651
 652
 653
 654
 655
 656
 657
 658
 659
 660
 661
 662
 663
 664
 665
 666
 667
 668
 669
 670
 671
 672
 673
 674
 675
 676
 677
 678
 679
 680
 681
 682
 683
 684
 685
 686
 687
 688
 689
 690
 691
 692
 693
 694
 695
 696
 697
 698
 699
 700
 701
 702
 703
 704
 705
 706
 707
 708
 709
 710
 711
 712
 713
 714
 715
 716
 717
 718
 719
 720
 721
 722
 723
 724
 725
 726
 727
 728
 729
 730
 731
 732
 733
 734
 735
 736
 737
 738
 739
 740
 741
 742
 743
 744
 745
 746
 747
 748
 749
 750
 751
 752
 753
 754
 755
 756
 757
 758
 759
 760
 761
 762
 763
 764
 765
 766
 767
 768
 769
 770
 771
 772
 773
 774
 775
 776
 777
 778
 779
 780
 781
 782
 783
 784
 785
 786
 787
 788
 789
 790
 791
 792
 793
 794
 795
 796
 797
 798
 799
 800
 801
 802
 803
 804
 805
 806
 807
 808
 809
 810
 811
 812
 813
 814
 815
 816
 817
 818
 819
 820
 821
 822
 823
 824
 825
 826
 827
 828
 829
 830
 831
 832
 833
 834
 835
 836
 837
 838
 839
 840
 841
 842
 843
 844
 845
 846
 847
 848
 849
 850
 851
 852
 853
 854
 855
 856
 857
 858
 859
 860
 861
 862
 863
 864
 865
 866
 867
 868
 869
 870
 871
 872
 873
 874
 875
 876
 877
 878
 879
 880
 881
 882
 883
 884
 885
 886
 887
 888
 889
 890
 891
 892
 893
 894
 895
 896
 897
 898
 899
 900
 901
 902
 903
 904
 905
 906
 907
 908
 909
 910
 911
 912
 913
 914
 915
 916
 917
 918
 919
 920
 921
 922
 923
 924
 925
 926
 927
 928
 929
 930
 931
 932
 933
 934
 935
 936
 937
 938
 939
 940
 941
 942
 943
 944
 945
 946
 947
 948
 949
 950
 951
 952
 953
 954
 955
 956
 957
 958
 959
 960
 961
 962
 963
 964
 965
 966
 967
 968
 969
 970
 971
 972
 973
 974
 975
 976
 977
 978
 979
 980
 981
 982
 983
 984
 985
 986
 987
 988
 989
 990
 991
 992
 993
 994
 995
 996
 997
 998
 999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
def load_ufalcon_maps(
    spectra,
    path_maps,
    mask_paths_dict,
    index,
    path_boss,
    path_kids,
    path_cmb,
    path_act,
    noise,
    masked,
    nside,
    noise_alone,
    scale,
    class_reduced_cls,
    modulo_200_index,
    run_no,
    nbar_g_dict=None,
    path_maps_desi=None,
    use_grf=False,
):
    """
    A function to load the UFALCON maps and masks for a given set of spectra.

    This function will load the maps and if desired the masks and noise
    realizations for a given set of spectra. It will return two dictionaries
    containing the maps and masks for each spectrum.
    """
    masks_dict = {}
    maps_dict = {}

    (
        t_loaded,
        kappa_loaded,
        l_loaded,
        c_loaded,
        g1_loaded,
        g2_loaded,
        g3_loaded,
        g4_loaded,
        g5_loaded,
        d1_loaded,
        d2_loaded,
        d3_loaded,
        d4_loaded,
        ka_loaded,  # New flag for ACT DR6 lensing
    ) = (
        False,
        False,
        False,
        False,
        False,
        False,
        False,
        False,
        False,
        False,
        False,
        False,
        False,
        False,  # Initialize ACT DR6 lensing flag
    )

    for spec in tqdm(spectra):
        LOGGER.info(f"Loading maps for spectrum: {spec}")
        if "t" in spec:
            if not t_loaded:
                m_t = np.load(
                    path_maps + "/isw_map_run_" + str(modulo_200_index) + ".npy"
                )

                if masked:
                    mask_t = np.load(mask_paths_dict["t"])
                    masks_dict["t"] = mask_t

                if noise:
                    cmb_noise_num = str(index % 1000).zfill(3)
                    # cmb_noise_num = str(index % 300).zfill(3)
                    if nside == 2048:
                        extra_string = "005"
                    elif nside == 1024:
                        extra_string = "010"
                    noise_map = hp.fitsfunc.read_map(
                        str(path_cmb)
                        + f"/ffp10_compsep_mc_{nside}/"
                        + f"dx12_v3_commander_cmb_mc_00{cmb_noise_num}_"
                        + f"{extra_string}a_{nside}.fits"
                    )
                    # Follow approach of Ferraro et al.
                    # -> This should be noise dominated anyway!
                    # So neglect ISW component.
                    m_t = noise_map

                    if noise_alone:
                        m_t = noise_map

                # remove monopole
                m_t -= np.mean(m_t)
                maps_dict["t"] = [m_t]
                t_loaded = True

        if "k" in spec:
            if not kappa_loaded:
                m_k = np.load(
                    str(path_maps)
                    + "/kappa_cmb_map_run_"
                    + str(modulo_200_index)
                    + ".npy"
                )

                # Add the class reduced cls to the kappa map to supplement for
                # the missing redshift
                cl_PP_class_reduced = class_reduced_cls["pp"]
                # set random seed
                np.random.seed(index)
                m_p_class = hp.synfast(cl_PP_class_reduced, nside=nside)
                m_k_class = potential_to_kappa(m_p_class)
                m_k += m_k_class

                if masked:
                    mask_k = np.load(mask_paths_dict["k"])
                    masks_dict["k"] = mask_k

                if noise:
                    noise_real = str(index % 300).zfill(3)
                    mf_klm = hp.fitsfunc.read_alm(
                        path_cmb + "/lensing_MV_data/MV/mf_klm.fits"
                    )
                    sim_klm = hp.read_alm(
                        path_cmb + f"/lensing_MV_sim/sim_klm_{noise_real}.fits"
                    )
                    sky_klm = hp.read_alm(
                        path_cmb + f"/lensing_MV_sim_inputs/sky_klm_{noise_real}.fits"
                    )
                    # Subtract input kappa and mean field kappa to get just
                    # the noise term.
                    klm_noise = sim_klm - sky_klm - mf_klm
                    map_noise = hp.alm2map(klm_noise, nside=nside)
                    m_k += map_noise

                    if noise_alone:
                        m_k = map_noise

                # remove monopole
                m_k -= np.mean(m_k)
                maps_dict["k"] = [m_k]
                kappa_loaded = True

        if "l" in spec:
            if not l_loaded:
                m_l = np.load(
                    path_maps + "/delta_map_lowz_run_" + str(modulo_200_index) + ".npy"
                )

                if masked:
                    mask_l = np.load(mask_paths_dict["l"])
                    masks_dict["l"] = mask_l

                if noise:
                    binary_mask_lowz = np.load(mask_paths_dict["l"])
                    ra_lowz, dec_lowz, z_lowz, w_lowz = process_data.get_data_boss(
                        str(path_boss), "lowz"
                    )
                    m_l_poisson = process_data.generate_boss_map(
                        ra_lowz,
                        dec_lowz,
                        w_lowz,
                        binary_mask_lowz,
                        nside,
                        seed_no=int(run_no),
                        mode="random",
                    )
                    m_l += m_l_poisson

                    if noise_alone:
                        m_l = m_l_poisson

                # remove the monopole
                m_l -= np.mean(m_l)
                maps_dict["l"] = [m_l]
                l_loaded = True

        if "c" in spec:
            if not c_loaded:
                m_c = np.load(
                    path_maps + "/delta_map_cmass_run_" + str(modulo_200_index) + ".npy"
                )

                if masked:
                    mask_c = np.load(mask_paths_dict["c"])
                    masks_dict["c"] = mask_c

                if noise:
                    binary_mask_cmass = np.load(mask_paths_dict["c"])
                    ra_cmass, dec_cmass, z_cmass, w_cmass = process_data.get_data_boss(
                        path_boss, "cmass"
                    )
                    m_c_poisson = process_data.generate_boss_map(
                        ra_cmass,
                        dec_cmass,
                        w_cmass,
                        binary_mask_cmass,
                        nside,
                        seed_no=int(run_no),
                        mode="random",
                    )
                    m_c += m_c_poisson

                    if noise_alone:
                        m_c = m_c_poisson

                # remove the monopole
                m_c -= np.mean(m_c)
                maps_dict["c"] = [m_c]
                c_loaded = True

        if spec in np.array(
            [
                "gg_11",
                "gg_12",
                "gg_13",
                "gg_14",
                "gg_15",
                "tg_1",
                "kg_1",
                "cg_1",
                "lg_1",
                "kag_1",
            ]
        ):
            if not g1_loaded:
                kappa_shear_bin = np.load(
                    path_maps
                    + "/kappa_shear_map_bin_1_run_"
                    + str(modulo_200_index)
                    + ".npy"
                )
                m_g_0_bin1, m_g_1_bin1 = kappa_to_gamma(kappa_shear_bin)

                if masked:
                    # load weights for KiDS mask
                    mask_g1 = np.load(mask_paths_dict["g1"])
                    masks_dict["g1"] = mask_g1

                if noise:
                    # Load the noise for the first tomographic bin.
                    e1, e2, w, m_bias, idx = process_data.get_data_kids(
                        path_kids, tomo_bin=1, nside=nside
                    )
                    gamma_0_noise, gamma_1_noise, _ = process_data.generate_kids_map(
                        nside,
                        e1,
                        e2,
                        w,
                        idx,
                        rotate=True,
                        m=m_bias,
                        seed_no=int(run_no) + 1,
                    )
                    m_g_0_bin1 += gamma_0_noise
                    m_g_1_bin1 += gamma_1_noise

                    if noise_alone:
                        m_g_0_bin1 = gamma_0_noise
                        m_g_1_bin1 = gamma_1_noise

                # remove the monopole
                m_g_0_bin1 -= np.mean(m_g_0_bin1)
                m_g_1_bin1 -= np.mean(m_g_1_bin1)
                maps_dict["g1"] = [m_g_0_bin1, m_g_1_bin1]
                g1_loaded = True

        if spec in np.array(
            [
                "gg_12",
                "gg_22",
                "gg_23",
                "gg_24",
                "gg_25",
                "tg_2",
                "kg_2",
                "cg_2",
                "lg_2",
                "kag_2",
            ]
        ):
            if g2_loaded:
                pass

            else:
                kappa_shear_bin = np.load(
                    str(path_maps)
                    + "/kappa_shear_map_bin_2_run_"
                    + str(modulo_200_index)
                    + ".npy"
                )
                m_g_0_bin2, m_g_1_bin2 = kappa_to_gamma(kappa_shear_bin)

                if masked:
                    # load weights for KiDS mask
                    mask_g2 = np.load(mask_paths_dict["g2"])
                    masks_dict["g2"] = mask_g2

                if noise:
                    e1, e2, w, m_bias, idx = process_data.get_data_kids(
                        path_kids, tomo_bin=2, nside=nside
                    )
                    gamma_0_noise, gamma_1_noise, _ = process_data.generate_kids_map(
                        nside,
                        e1,
                        e2,
                        w,
                        idx,
                        rotate=True,
                        m=m_bias,
                        seed_no=int(run_no) + 1,
                    )
                    m_g_0_bin2 += gamma_0_noise
                    m_g_1_bin2 += gamma_1_noise

                    if noise_alone:
                        m_g_0_bin2 = gamma_0_noise
                        m_g_1_bin2 = gamma_1_noise

                # remove the monopole
                m_g_0_bin2 -= np.mean(m_g_0_bin2)
                m_g_1_bin2 -= np.mean(m_g_1_bin2)
                maps_dict["g2"] = [m_g_0_bin2, m_g_1_bin2]
                g2_loaded = True

        if spec in np.array(
            [
                "gg_13",
                "gg_23",
                "gg_33",
                "gg_34",
                "gg_35",
                "tg_3",
                "kg_3",
                "cg_3",
                "lg_3",
                "kag_3",
            ]
        ):
            if g3_loaded:
                pass

            else:
                kappa_shear_bin = np.load(
                    str(path_maps)
                    + "/kappa_shear_map_bin_3_run_"
                    + str(modulo_200_index)
                    + ".npy"
                )
                m_g_0_bin3, m_g_1_bin3 = kappa_to_gamma(kappa_shear_bin)

                if masked:
                    # load weights for KiDS mask
                    mask_g3 = np.load(mask_paths_dict["g3"])
                    masks_dict["g3"] = mask_g3

                if noise:
                    e1, e2, w, m_bias, idx = process_data.get_data_kids(
                        path_kids, tomo_bin=3, nside=nside
                    )
                    gamma_0_noise, gamma_1_noise, _ = process_data.generate_kids_map(
                        nside,
                        e1,
                        e2,
                        w,
                        idx,
                        rotate=True,
                        m=m_bias,
                        seed_no=int(run_no) + 1,
                    )
                    m_g_0_bin3 += gamma_0_noise
                    m_g_1_bin3 += gamma_1_noise

                    if noise_alone:
                        m_g_0_bin3 = gamma_0_noise
                        m_g_1_bin3 = gamma_1_noise

                # remove the monopole
                m_g_0_bin3 -= np.mean(m_g_0_bin3)
                m_g_1_bin3 -= np.mean(m_g_1_bin3)
                maps_dict["g3"] = [m_g_0_bin3, m_g_1_bin3]
                g3_loaded = True

        if spec in np.array(
            [
                "gg_14",
                "gg_24",
                "gg_34",
                "gg_44",
                "gg_45",
                "tg_4",
                "kg_4",
                "cg_4",
                "lg_4",
                "kag_4",
            ]
        ):
            if g4_loaded:
                pass

            else:
                kappa_shear_bin = np.load(
                    str(path_maps)
                    + "/kappa_shear_map_bin_4_run_"
                    + str(modulo_200_index)
                    + ".npy"
                )
                m_g_0_bin4, m_g_1_bin4 = kappa_to_gamma(kappa_shear_bin)

                if masked:
                    mask_g4 = np.load(mask_paths_dict["g4"])
                    masks_dict["g4"] = mask_g4

                if noise:
                    e1, e2, w, m_bias, idx = process_data.get_data_kids(
                        path_kids, tomo_bin=4, nside=nside
                    )
                    gamma_0_noise, gamma_1_noise, _ = process_data.generate_kids_map(
                        nside,
                        e1,
                        e2,
                        w,
                        idx,
                        rotate=True,
                        m=m_bias,
                        seed_no=int(run_no) + 1,
                    )
                    m_g_0_bin4 += gamma_0_noise
                    m_g_1_bin4 += gamma_1_noise

                    if noise_alone:
                        m_g_0_bin4 = gamma_0_noise
                        m_g_1_bin4 = gamma_1_noise

                # remove the monopole
                m_g_0_bin4 -= np.mean(m_g_0_bin4)
                m_g_1_bin4 -= np.mean(m_g_1_bin4)
                maps_dict["g4"] = [m_g_0_bin4, m_g_1_bin4]
                g4_loaded = True

        if spec in np.array(
            [
                "gg_15",
                "gg_25",
                "gg_35",
                "gg_45",
                "gg_55",
                "tg_5",
                "kg_5",
                "cg_5",
                "lg_5",
                "kag_5",
            ]
        ):
            if g5_loaded:
                pass

            else:
                kappa_shear_bin = np.load(
                    str(path_maps)
                    + "/kappa_shear_map_bin_5_run_"
                    + str(modulo_200_index)
                    + ".npy"
                )
                m_g_0_bin5, m_g_1_bin5 = kappa_to_gamma(kappa_shear_bin)

                if masked:
                    mask_g5 = np.load(mask_paths_dict["g5"])
                    masks_dict["g5"] = mask_g5

                if noise:
                    e1, e2, w, m_bias, idx = process_data.get_data_kids(
                        path_kids, tomo_bin=5, nside=nside
                    )
                    gamma_0_noise, gamma_1_noise, _ = process_data.generate_kids_map(
                        nside,
                        e1,
                        e2,
                        w,
                        idx,
                        rotate=True,
                        m=m_bias,
                        seed_no=int(run_no) + 1,
                    )
                    m_g_0_bin5 += gamma_0_noise
                    m_g_1_bin5 += gamma_1_noise

                    if noise_alone:
                        m_g_0_bin5 = gamma_0_noise
                        m_g_1_bin5 = gamma_1_noise

                # remove the monopole
                m_g_0_bin5 -= np.mean(m_g_0_bin5)
                m_g_1_bin5 -= np.mean(m_g_1_bin5)
                maps_dict["g5"] = [m_g_0_bin5, m_g_1_bin5]
                g5_loaded = True

        # finally load in DESI maps signified by d_1 to d_4

        if spec in np.array(["dd_11", "kd_1"]):
            if not d1_loaded:
                if path_maps_desi is None:
                    if not use_grf:
                        m_d1 = np.load(
                            path_maps
                            + "/delta_desi_map_bin_1_run_"
                            + str(modulo_200_index)
                            + ".npy"
                        )
                    else:
                        m_d1 = np.load(
                            path_maps
                            + "/delta_desi_map_grf_bin_1_run_"
                            + str(modulo_200_index)
                            + ".npy"
                        )
                else:
                    if not use_grf:
                        m_d1 = np.load(
                            path_maps_desi
                            + "/delta_desi_map_bin_1_run_"
                            + str(modulo_200_index)
                            + ".npy"
                        )
                    else:
                        m_d1 = np.load(
                            path_maps_desi
                            + "/delta_desi_map_grf_bin_1_run_"
                            + str(modulo_200_index)
                            + ".npy"
                        )

                if masked:
                    mask_d1 = np.load(mask_paths_dict["d1"])
                    masks_dict["d1"] = mask_d1

                if noise:
                    nbar_g_1 = nbar_g_dict["bin_1"]
                    noise_map = poisson_galaxy_counts(
                        mask_d1, nbar_g_1, seed_no=int(run_no) + 1
                    )
                    m_d1 += noise_map

                    if noise_alone:
                        m_d1 = noise_map

                maps_dict["d1"] = [m_d1]
                d1_loaded = True

        if spec in np.array(["dd_22", "kd_2"]):
            if not d2_loaded:
                if path_maps_desi is None:
                    if not use_grf:
                        m_d2 = np.load(
                            path_maps
                            + "/delta_desi_map_bin_2_run_"
                            + str(modulo_200_index)
                            + ".npy"
                        )
                    else:
                        m_d2 = np.load(
                            path_maps
                            + "/delta_desi_map_grf_bin_2_run_"
                            + str(modulo_200_index)
                            + ".npy"
                        )
                else:
                    if not use_grf:
                        m_d2 = np.load(
                            path_maps_desi
                            + "/delta_desi_map_bin_2_run_"
                            + str(modulo_200_index)
                            + ".npy"
                        )
                    else:
                        m_d2 = np.load(
                            path_maps_desi
                            + "/delta_desi_map_grf_bin_2_run_"
                            + str(modulo_200_index)
                            + ".npy"
                        )

                if masked:
                    mask_d2 = np.load(mask_paths_dict["d2"])
                    masks_dict["d2"] = mask_d2

                if noise:
                    nbar_g_2 = nbar_g_dict["bin_2"]
                    noise_map = poisson_galaxy_counts(
                        mask_d2, nbar_g_2, seed_no=int(run_no) + 1
                    )
                    m_d2 += noise_map

                    if noise_alone:
                        m_d2 = noise_map

                maps_dict["d2"] = [m_d2]
                d2_loaded = True

        if spec in np.array(["dd_33", "kd_3"]):
            if not d3_loaded:
                if path_maps_desi is None:
                    if not use_grf:
                        m_d3 = np.load(
                            path_maps
                            + "/delta_desi_map_bin_3_run_"
                            + str(modulo_200_index)
                            + ".npy"
                        )
                    else:
                        m_d3 = np.load(
                            path_maps
                            + "/delta_desi_map_grf_bin_3_run_"
                            + str(modulo_200_index)
                            + ".npy"
                        )
                else:
                    if not use_grf:
                        m_d3 = np.load(
                            path_maps_desi
                            + "/delta_desi_map_bin_3_run_"
                            + str(modulo_200_index)
                            + ".npy"
                        )
                    else:
                        m_d3 = np.load(
                            path_maps_desi
                            + "/delta_desi_map_grf_bin_3_run_"
                            + str(modulo_200_index)
                            + ".npy"
                        )

                if masked:
                    mask_d3 = np.load(mask_paths_dict["d3"])
                    masks_dict["d3"] = mask_d3

                if noise:
                    nbar_g_3 = nbar_g_dict["bin_3"]
                    noise_map = poisson_galaxy_counts(
                        mask_d3, nbar_g_3, seed_no=int(run_no) + 1
                    )
                    m_d3 += noise_map

                    if noise_alone:
                        m_d3 = noise_map

                maps_dict["d3"] = [m_d3]
                d3_loaded = True

        if spec in np.array(["dd_44", "kd_4"]):
            if not d4_loaded:
                if path_maps_desi is None:
                    if not use_grf:
                        m_d4 = np.load(
                            path_maps
                            + "/delta_desi_map_bin_4_run_"
                            + str(modulo_200_index)
                            + ".npy"
                        )
                    else:
                        m_d4 = np.load(
                            path_maps
                            + "/delta_desi_map_grf_bin_4_run_"
                            + str(modulo_200_index)
                            + ".npy"
                        )
                else:
                    if not use_grf:
                        m_d4 = np.load(
                            path_maps_desi
                            + "/delta_desi_map_bin_4_run_"
                            + str(modulo_200_index)
                            + ".npy"
                        )
                    else:
                        m_d4 = np.load(
                            path_maps_desi
                            + "/delta_desi_map_grf_bin_4_run_"
                            + str(modulo_200_index)
                            + ".npy"
                        )

                if masked:
                    mask_d4 = np.load(mask_paths_dict["d4"])
                    masks_dict["d4"] = mask_d4

                if noise:
                    nbar_g_4 = nbar_g_dict["bin_4"]
                    noise_map = poisson_galaxy_counts(
                        mask_d4, nbar_g_4, seed_no=int(run_no) + 1
                    )
                    m_d4 += noise_map

                    if noise_alone:
                        m_d4 = noise_map

                maps_dict["d4"] = [m_d4]
                d4_loaded = True

        if "ka" in spec:
            if not ka_loaded:
                m_ka = np.load(
                    str(path_maps)
                    + "/kappa_cmb_map_run_"
                    + str(modulo_200_index)
                    + ".npy"
                )

                # Add the class reduced cls to the kappa map to supplement for
                # the missing redshift.
                cl_PP_class_reduced = class_reduced_cls["pp"]
                # set random seed
                np.random.seed(index)
                m_p_class = hp.synfast(cl_PP_class_reduced, nside=nside)
                m_k_class = potential_to_kappa(m_p_class)
                m_ka += m_k_class

                if masked:
                    mask_ka = np.load(mask_paths_dict["ka"])
                    masks_dict["ka"] = mask_ka

                if noise:
                    ind = index % 400
                    ind += 1
                    noise_real = str(ind).zfill(
                        4
                    )  # cycle through the 400 ACT simulations
                    sim_klm = hp.read_alm(
                        path_act
                        + "/simulations"
                        + "/kappa_alm_sim_act_dr6_lensing_v1_baseline"
                        + f"_{noise_real}.fits"
                    )
                    sky_klm = hp.read_alm(
                        path_act
                        + f"/sim_inputs/kappa_alm/input_kappa_alm_sim_{noise_real}.fits"
                    ).astype(np.complex128)

                    map_sim = hp.alm2map(sim_klm, nside=nside)
                    map_input = hp.alm2map(sky_klm, nside=nside)
                    # need to rotate into galactic coordinates
                    r = hp.Rotator(coord=["C", "G"])
                    map_noise = (
                        r.rotate_map_alms(map_sim - map_input) * masks_dict["ka"]
                    )
                    m_ka += map_noise

                    if noise_alone:
                        m_ka = map_noise

                maps_dict["ka"] = [m_ka]
                ka_loaded = True

    return maps_dict, masks_dict

get_nmt_cl(spec, maps1, maps2, mask1, mask2, path_wsp, data_dir, nside, save_coupled=False)

Return the NaMaster computed Cl for a set of maps and masks.

Maps should be specified at lists, with the spin of the map implicit in the number of maps passed in the list.

Source code in src/multiprobe_framework/covariance.py
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
def get_nmt_cl(
    spec,
    maps1,
    maps2,
    mask1,
    mask2,
    path_wsp,
    data_dir,
    nside,
    save_coupled=False,
):
    """
    Return the NaMaster computed Cl for a set of maps and masks.

    Maps should be specified at lists, with the spin of the map implicit in the
    number of maps passed in the list.
    """

    beam = {}
    beam[0], beam[2] = hp.pixwin(nside=nside, pol=True)

    spin_1 = utils.get_spin(maps1)
    spin_2 = utils.get_spin(maps2)

    specind_1, specind_2 = utils.get_specinds(spec)

    f_0, f_1 = create_nmt_fields(
        specind_1,
        specind_2,
        mask1,
        mask2,
        beam,
        maps=[maps1, maps2],
        w_beam_I=None,
        lmax=-1,
    )

    # set up the non-deprojection bias steps
    w = nmt.NmtWorkspace()

    w.read_from(path_wsp + f"/pymaster_workspace_{spec}.fits")

    cl_coupled = nmt.compute_coupled_cell(f_0, f_1)

    # Note: save_coupled functionality disabled due to undefined index parameter
    # if save_coupled:
    #     np.save(
    #         data_dir + f"/UFalcon_cls/coupled_cells/cell_spec_{spec}_index_{index}",
    #         cl_coupled,
    #     )

    cl_compute = w.decouple_cell(cl_coupled)

    if (spin_1 == 0) & (spin_2 == 0):
        return cl_compute[0]

    elif (spin_1 == 0) & (spin_2 == 2) or (spin_1 == 2) & (spin_2 == 0):
        return cl_compute[0]

    elif (spin_1 == 2) & (spin_2 == 2):
        cl_compute_ee = cl_compute[0]
        cl_compute_bb = cl_compute[3]
        return cl_compute_ee, cl_compute_bb

poisson_galaxy_counts(mask, nbar_deg2, nside=2048, seed_no=0)

Generate a Poisson realization of galaxy counts per HEALPix pixel given a mask and mean density.

Parameters: - mask (numpy array): Mask array for HEALPix pixels where galaxies are allowed (1 for allowed, 0 for masked). - nbar_deg2 (float): Mean galaxy density in deg^-2. - nside (int): HEALPix resolution parameter, default is 2048.

Returns: - galaxy_counts (numpy array): Array of Poisson-sampled galaxy counts for each pixel, 0 where masked.

Source code in src/multiprobe_framework/covariance.py
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
def poisson_galaxy_counts(mask, nbar_deg2, nside=2048, seed_no=0):
    """
    Generate a Poisson realization of galaxy counts per HEALPix pixel given a
    mask and mean density.

    Parameters:
    - mask (numpy array): Mask array for HEALPix pixels where galaxies are
      allowed (1 for allowed, 0 for masked).
    - nbar_deg2 (float): Mean galaxy density in deg^-2.
    - nside (int): HEALPix resolution parameter, default is 2048.

    Returns:
    - galaxy_counts (numpy array): Array of Poisson-sampled galaxy counts for
      each pixel, 0 where masked.
    """
    # Calculate pixel area in square degrees for the given nside
    pixel_area_deg2 = hp.nside2pixarea(nside, degrees=True)

    # Mean galaxy count per pixel based on density and pixel area
    mean_galaxy_count_per_pixel = nbar_deg2 * pixel_area_deg2

    # Generate Poisson galaxy counts where mask is 1 (unmasked pixels)
    galaxy_counts = np.zeros_like(mask, dtype=int)
    unmasked_pixels = mask > 0
    np.random.seed(seed_no)
    galaxy_counts[unmasked_pixels] = np.random.poisson(
        mean_galaxy_count_per_pixel, size=np.sum(unmasked_pixels)
    )
    overdensity_map = np.full_like(mask, 0.0, dtype=float)
    overdensity_map[unmasked_pixels] = (
        galaxy_counts[unmasked_pixels] - mean_galaxy_count_per_pixel
    ) / mean_galaxy_count_per_pixel

    return overdensity_map