Coverage for estats/stats/CrossStarletPeaks.py: 20%
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# Copyright (C) 2019 ETH Zurich
2# Institute for Particle Physics and Astrophysics
3# Author: Dominik Zuercher
5import numpy as np
6import healpy as hp
7from estats.stats import CrossPeaks
10def context():
11 """
12 Defines the paramters used by the plugin
13 """
14 stat_type = 'convergence-cross'
16 required = ['Starlet_steps', 'Starlet_scales', 'Starlet_selected_scales',
17 'peak_lower_threshold', 'Starlet_sliced_bins', 'NSIDE',
18 'min_count', 'SNR_peaks',
19 'max_SNR']
20 defaults = [1000, [48, 65, 89, 121, 164, 223, 303, 412, 560,
21 761, 1034, 1405, 1910, 2597,
22 3530, 4799, 6523, 8867, 12053, 16384],
23 [48, 65, 89, 121, 164, 223, 303, 412, 560,
24 761, 1034, 1405, 1910, 2597,
25 3530, 4799, 6523, 8867, 12053, 16384],
26 2.5, 15, 1024, 30, False, 100.]
27 types = ['int', 'list', 'list', 'float', 'int', 'int',
28 'int', 'bool', 'float']
29 return required, defaults, types, stat_type
32def CrossStarletPeaks(map_w, weights, ctx):
33 """
34 Performs the starlet-wavelet decomposition of map and counts the local
35 maxima in each filter band.
36 :param map: A Healpix convergence map
37 :param weights: A Healpix map with pixel weights (integer >=0)
38 :param ctx: Context instance
39 :return: Starlet counts (num filter bands, Starlet_steps + 1)
40 """
42 try:
43 from esd import esd
44 except ImportError:
45 raise ImportError(
46 "Did not find esd package. "
47 "It is required for this module to work properly. "
48 "Download from: "
49 "https://cosmo-gitlab.phys.ethz.ch/cosmo_public/esd")
51 # build decomposition
52 # (remove first map that contains remaining small scales)
53 wavelet_counts = np.zeros((len(ctx['Starlet_scales']),
54 ctx['Starlet_steps'] + 1))
56 # count peaks in each filter band
57 wave_iter = esd.calc_wavelet_decomp_iter(
58 map_w, l_bins=ctx['Starlet_scales'])
59 counter = 0
60 for ii, wmap in enumerate(wave_iter):
61 if ii == 0:
62 continue
63 # reapply mask
64 wmap[np.isclose(weights, 0)] = hp.UNSEEN
66 peak_vals = CrossPeaks.CrossPeaks(wmap, weights, ctx)
67 wavelet_counts[counter] = peak_vals
68 counter += 1
70 return wavelet_counts
73def process(data, ctx, scale_to_unity=False):
74 # backwards compatibility for data without map std
75 if data.shape[1] > ctx['CrossPeaks_steps']:
76 data = data[:, :-1]
78 num_of_scales = len(ctx['Starlet_scales'])
80 new_data = np.zeros(
81 (int(data.shape[0] / num_of_scales), data.shape[1]
82 * num_of_scales))
83 for jj in range(int(data.shape[0] / num_of_scales)):
84 new_data[jj, :] = data[jj * num_of_scales:
85 (jj + 1) * num_of_scales, :].ravel()
86 return new_data
89def slice(ctx):
90 # number of datavectors for each scale
91 mult = 1
92 # number of scales
93 num_of_scales = len(ctx['Starlet_scales'])
94 # either mean or sum, for how to assemble the data into the bins
95 operation = 'sum'
97 n_bins_sliced = ctx['Starlet_sliced_bins']
99 # if True assumes that first and last entries of the data vector indicate
100 # the upper and lower boundaries and that binning scheme indicates
101 # bin edges rather than their indices
102 range_mode = True
104 return num_of_scales, n_bins_sliced, operation, mult, range_mode
107def decide_binning_scheme(data, meta, bin, ctx):
108 num_of_scales = len(ctx['Starlet_scales'])
109 n_bins_original = ctx['Starlet_steps']
110 n_bins_sliced = ctx['Starlet_sliced_bins']
112 # get the correct tomographic bins
113 bin_idx = np.zeros(meta.shape[0], dtype=bool)
114 bin_idx[np.where(meta[:, 0] == bin)[0]] = True
115 bin_idx = np.repeat(bin_idx, meta[:, 1].astype(int))
116 data = data[bin_idx, :]
118 # Get bins for each smooting scale
119 bin_centers = np.zeros((num_of_scales, n_bins_sliced))
120 bin_edges = np.zeros((num_of_scales, n_bins_sliced + 1))
121 for scale in range(num_of_scales):
122 # cut correct scale and minimum and maximum kappa values
123 data_act = data[:,
124 n_bins_original * scale:n_bins_original * (scale + 1)]
125 minimum = np.max(data_act[:, 0])
126 maximum = np.min(data_act[:, -1])
127 new_kappa_bins = np.linspace(minimum, maximum, n_bins_sliced + 1)
128 bin_edges[scale, :] = new_kappa_bins
130 bin_centers_act = new_kappa_bins[:-1] + 0.5 * \
131 (new_kappa_bins[1:] - new_kappa_bins[:-1])
132 bin_centers[scale, :] = bin_centers_act
133 return bin_edges, bin_centers
136def filter(ctx):
137 filter = np.zeros(0)
138 for scale in reversed(ctx['Starlet_scales']):
139 if scale in ctx['Starlet_selected_scales']:
140 f = [True] * \
141 ctx['Starlet_sliced_bins']
142 f = np.asarray(f)
143 else:
144 f = [False] * \
145 ctx['Starlet_sliced_bins']
146 f = np.asarray(f)
147 filter = np.append(filter, f)
148 return filter