Coverage for estats/stats/CrossStarletVoids.py: 20%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

69 statements  

1# Copyright (C) 2019 ETH Zurich 

2# Institute for Particle Physics and Astrophysics 

3# Author: Dominik Zuercher 

4 

5import numpy as np 

6import healpy as hp 

7from estats.stats import CrossVoids 

8 

9 

10def context(): 

11 """ 

12 Defines the paramters used by the plugin 

13 """ 

14 stat_type = 'convergence-cross' 

15 

16 required = ['Starlet_steps', 'Starlet_scales', 'Starlet_selected_scales', 

17 'void_upper_threshold', 'Starlet_sliced_bins', 'NSIDE', 

18 'min_count', 'SNR_voids', 

19 'min_SNR'] 

20 defaults = [1000, [48, 65, 89, 121, 164, 223, 303, 412, 560, 

21 761, 1034, 1405, 1910, 2597, 3530, 

22 4799, 6523, 8867, 12053, 16384], 

23 [48, 65, 89, 121, 164, 

24 223, 303, 412, 560, 

25 761, 1034, 1405, 1910, 2597, 3530, 

26 4799, 6523, 8867, 12053, 16384], 

27 -2.5, 15, 1024, 30, False, -100.] 

28 types = ['int', 'list', 'list', 'float', 'int', 'int', 

29 'int', 'bool', 'float'] 

30 return required, defaults, types, stat_type 

31 

32 

33def CrossStarletVoids(map_w, weights, ctx): 

34 """ 

35 Performs the starlet-wavelet decomposition of map and counts the local 

36 maxima in each filter band. 

37 :param map: A Healpix convergence map 

38 :param weights: A Healpix map with pixel weights (integer >=0) 

39 :param ctx: Context instance 

40 :return: Starlet counts (num filter bands, Starlet_steps + 1) 

41 """ 

42 

43 try: 

44 from esd import esd 

45 except ImportError: 

46 raise ImportError( 

47 "Did not find esd package. " 

48 "It is required for this module to work properly. " 

49 "Download from: " 

50 "https://cosmo-gitlab.phys.ethz.ch/cosmo_public/esd") 

51 

52 # build decomposition 

53 # (remove first map that contains remaining small scales) 

54 wavelet_counts = np.zeros((len(ctx['Starlet_scales']), 

55 ctx['Starlet_steps'] + 1)) 

56 

57 # count peaks in each filter band 

58 wave_iter = esd.calc_wavelet_decomp_iter( 

59 map_w, l_bins=ctx['Starlet_scales']) 

60 counter = 0 

61 for ii, wmap in enumerate(wave_iter): 

62 if ii == 0: 

63 continue 

64 

65 # reapply mask 

66 wmap[np.isclose(weights, 0)] = hp.UNSEEN 

67 

68 void_vals = CrossVoids.CrossVoids(wmap, weights, ctx) 

69 wavelet_counts[counter] = void_vals 

70 counter += 1 

71 

72 return wavelet_counts 

73 

74 

75def process(data, ctx, scale_to_unity=False): 

76 # backwards compatibility for data without map std 

77 if data.shape[1] > ctx['Starlet_steps']: 

78 data = data[:, :-1] 

79 

80 num_of_scales = len(ctx['Starlet_scales']) 

81 

82 new_data = np.zeros( 

83 (int(data.shape[0] / num_of_scales), data.shape[1] 

84 * num_of_scales)) 

85 for jj in range(int(data.shape[0] / num_of_scales)): 

86 new_data[jj, :] = data[jj * num_of_scales: 

87 (jj + 1) * num_of_scales, :].ravel() 

88 return new_data 

89 

90 

91def slice(ctx): 

92 # number of datavectors for each scale 

93 mult = 1 

94 # number of scales 

95 num_of_scales = len(ctx['Starlet_scales']) 

96 # either mean or sum, for how to assemble the data into the bins 

97 operation = 'sum' 

98 

99 n_bins_sliced = ctx['Starlet_sliced_bins'] 

100 

101 # if True assumes that first and last entries of the data vector indicate 

102 # the upper and lower boundaries and that binning scheme indicates 

103 # bin edges rather than their indices 

104 range_mode = True 

105 

106 return num_of_scales, n_bins_sliced, operation, mult, range_mode 

107 

108 

109def decide_binning_scheme(data, meta, bin, ctx): 

110 num_of_scales = len(ctx['Starlet_scales']) 

111 n_bins_original = ctx['Starlet_steps'] 

112 n_bins_sliced = ctx['Starlet_sliced_bins'] 

113 

114 # get the correct tomographic bins 

115 bin_idx = np.zeros(meta.shape[0], dtype=bool) 

116 bin_idx[np.where(meta[:, 0] == bin)[0]] = True 

117 bin_idx = np.repeat(bin_idx, meta[:, 1].astype(int)) 

118 data = data[bin_idx, :] 

119 

120 # Get bins for each smooting scale 

121 bin_centers = np.zeros((num_of_scales, n_bins_sliced)) 

122 bin_edges = np.zeros((num_of_scales, n_bins_sliced + 1)) 

123 for scale in range(num_of_scales): 

124 # cut correct scale and minimum and maximum kappa values 

125 data_act = data[:, 

126 n_bins_original * scale:n_bins_original * (scale + 1)] 

127 minimum = np.max(data_act[:, 0]) 

128 maximum = np.min(data_act[:, -1]) 

129 new_kappa_bins = np.linspace(minimum, maximum, n_bins_sliced + 1) 

130 bin_edges[scale, :] = new_kappa_bins 

131 

132 bin_centers_act = new_kappa_bins[:-1] + 0.5 * \ 

133 (new_kappa_bins[1:] - new_kappa_bins[:-1]) 

134 bin_centers[scale, :] = bin_centers_act 

135 return bin_edges, bin_centers 

136 

137 

138def filter(ctx): 

139 filter = np.zeros(0) 

140 for scale in reversed(ctx['Starlet_scales']): 

141 if scale in ctx['Starlet_selected_scales']: 

142 f = [True] * \ 

143 ctx['Starlet_sliced_bins'] 

144 f = np.asarray(f) 

145 else: 

146 f = [False] * \ 

147 ctx['Starlet_sliced_bins'] 

148 f = np.asarray(f) 

149 filter = np.append(filter, f) 

150 return filter