Coverage for estats/stats/CrossStarletL1Norm.py: 16%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

86 statements  

1# Copyright (C) 2019 ETH Zurich 

2# Institute for Particle Physics and Astrophysics 

3# Author: Dominik Zuercher 

4 

5import numpy as np 

6import healpy as hp 

7from astropy.stats import mad_std 

8 

9 

10def context(): 

11 """ 

12 Defines the paramters used by the plugin 

13 """ 

14 stat_type = 'convergence-cross' 

15 

16 required = ['Starlet_L1_steps', 'Starlet_scales', 

17 'Starlet_L1_selected_scales', 

18 'Starlet_L1_sliced_bins', 'NSIDE', 

19 'min_SL1_SNR', 'max_SL1_SNR'] 

20 defaults = [1000, [48, 65, 89, 121, 164, 223, 303, 412, 560, 

21 761, 1034, 1405, 1910, 2597, 3530, 

22 4799, 6523, 8867, 12053, 16384], 

23 [48, 65, 89, 121, 164, 223, 303, 412, 560, 

24 761, 1034, 1405, 1910, 2597, 3530, 

25 4799, 6523, 8867, 12053, 16384], 

26 15, 1024, -4., 4.] 

27 types = ['int', 'list', 'list', 'int', 'int', 

28 'float', 'float'] 

29 return required, defaults, types, stat_type 

30 

31 

32def CrossStarletL1Norm(map_w, weights, ctx): 

33 """ 

34 Performs Starlet decompostion of map and calculates the L1 norm of 

35 each filter band. 

36 :param map: A Healpix convergence map 

37 :param weights: A Healpix map with pixel weights 

38 :param ctx: Context instance 

39 :return: Starlet L1 norm (num filter bands * Starlet_steps) 

40 """ 

41 

42 try: 

43 from esd import esd 

44 except ImportError: 

45 raise ImportError( 

46 "Did not find esd package. " 

47 "It is required for this module to work properly. " 

48 "Download from: " 

49 "https://cosmo-gitlab.phys.ethz.ch/cosmo_public/esd") 

50 

51 l1_coll = np.zeros((len(ctx['Starlet_scales']), 

52 ctx['Starlet_L1_steps'] + 1)) 

53 

54 # noise map to estimate std of wavelet coeffs 

55 std = mad_std(map_w[map_w > hp.UNSEEN]) 

56 rands = np.random.randn(np.sum(map_w > hp.UNSEEN)) 

57 noise_map = np.full(map_w.size, hp.UNSEEN) 

58 noise_map[map_w > hp.UNSEEN] = rands 

59 

60 # generators for wavelet decompositions 

61 wave_iter = esd.calc_wavelet_decomp_iter( 

62 map_w, l_bins=ctx['Starlet_scales']) 

63 noise_iter = esd.calc_wavelet_decomp_iter( 

64 noise_map, l_bins=ctx['Starlet_scales']) 

65 

66 counter = 0 

67 for ii, maps in enumerate(zip(wave_iter, noise_iter)): 

68 if ii == 0: 

69 continue 

70 

71 wmap = maps[0] 

72 nmap = maps[1] 

73 

74 # redo masking 

75 wmap = wmap[weights > 0.0] 

76 nmap = nmap[weights > 0.0] 

77 

78 noise_est = np.std(nmap) * std 

79 snr = wmap / noise_est 

80 minimum = np.min(snr) 

81 maximum = np.max(snr) 

82 

83 thresholds_snr = np.linspace(minimum, maximum, 

84 ctx['Starlet_L1_steps'] - 1) 

85 digitized = np.digitize(snr, thresholds_snr) 

86 snr_abs = np.abs(snr) 

87 bin_l1_norm = [np.sum(snr_abs[digitized == i]) 

88 for i in range(1, len(thresholds_snr))] 

89 

90 # append min, max and std of the map 

91 bin_l1_norm = np.hstack((np.asarray([minimum]), 

92 bin_l1_norm, np.asarray([maximum]), 

93 np.asarray([std]))) 

94 bin_l1_norm = bin_l1_norm.reshape(1, bin_l1_norm.size) 

95 l1_coll[counter] = bin_l1_norm 

96 counter += 1 

97 return l1_coll 

98 

99 

100def process(data, ctx, scale_to_unity=False): 

101 # backwards compatibility for data without map std 

102 if data.shape[1] > ctx['Starlet_L1_steps']: 

103 data = data[:, :-1] 

104 

105 num_of_scales = len(ctx['Starlet_scales']) 

106 

107 new_data = np.zeros( 

108 (int(data.shape[0] / num_of_scales), data.shape[1] 

109 * num_of_scales)) 

110 for jj in range(int(data.shape[0] / num_of_scales)): 

111 new_data[jj, :] = data[jj * num_of_scales: 

112 (jj + 1) * num_of_scales, :].ravel() 

113 return new_data 

114 

115 

116def slice(ctx): 

117 # number of datavectors for each scale 

118 mult = 1 

119 # number of scales 

120 num_of_scales = len(ctx['Starlet_scales']) 

121 # either mean or sum, for how to assemble the data into the bins 

122 operation = 'sum' 

123 

124 n_bins_sliced = ctx['Starlet_L1_sliced_bins'] 

125 

126 # if True assumes that first and last entries of the data vector indicate 

127 # the upper and lower boundaries and that binning scheme indicates 

128 # bin edges rather than their indices 

129 range_mode = True 

130 

131 return num_of_scales, n_bins_sliced, operation, mult, range_mode 

132 

133 

134def decide_binning_scheme(data, meta, bin, ctx): 

135 num_of_scales = len(ctx['Starlet_scales']) 

136 n_bins_original = ctx['Starlet_L1_steps'] 

137 n_bins_sliced = ctx['Starlet_L1_sliced_bins'] 

138 

139 # get the correct tomographic bins 

140 bin_idx = np.zeros(meta.shape[0], dtype=bool) 

141 bin_idx[np.where(meta[:, 0] == bin)[0]] = True 

142 bin_idx = np.repeat(bin_idx, meta[:, 1].astype(int)) 

143 data = data[bin_idx, :] 

144 

145 # Get bins for each smooting scale 

146 bin_centers = np.zeros((num_of_scales, n_bins_sliced)) 

147 bin_edges = np.zeros((num_of_scales, n_bins_sliced + 1)) 

148 for scale in range(num_of_scales): 

149 # cut correct scale and minimum and maximum kappa values 

150 data_act = data[:, 

151 n_bins_original * scale:n_bins_original * (scale + 1)] 

152 minimum = np.max(data_act[:, 0]) 

153 maximum = np.min(data_act[:, -1]) 

154 new_kappa_bins = np.linspace(minimum, maximum, n_bins_sliced + 1) 

155 bin_edges[scale, :] = new_kappa_bins 

156 

157 bin_centers_act = new_kappa_bins[:-1] + 0.5 * \ 

158 (new_kappa_bins[1:] - new_kappa_bins[:-1]) 

159 bin_centers[scale, :] = bin_centers_act 

160 return bin_edges, bin_centers 

161 

162 

163def filter(ctx): 

164 filter = np.zeros(0) 

165 for scale in reversed(ctx['Starlet_scales']): 

166 if scale in ctx['Starlet_L1_selected_scales']: 

167 f = [True] * \ 

168 ctx['Starlet_L1_sliced_bins'] 

169 f = np.asarray(f) 

170 else: 

171 f = [False] * \ 

172 ctx['Starlet_L1_sliced_bins'] 

173 f = np.asarray(f) 

174 filter = np.append(filter, f) 

175 return filter