Coverage for estats/stats/CrossPeaks.py: 91%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

103 statements  

1# Copyright (C) 2019 ETH Zurich 

2# Institute for Particle Physics and Astrophysics 

3# Author: Dominik Zuercher 

4 

5import numpy as np 

6import healpy as hp 

7 

8 

9def context(): 

10 """ 

11 Defines the paramters used by the plugin 

12 """ 

13 stat_type = 'multi' 

14 

15 required = ['CrossPeaks_steps', 

16 'peak_lower_threshold', 'CrossPeaks_sliced_bins', 'NSIDE', 

17 'scales', 'selected_scales', 'min_count', 'SNR_peaks', 

18 'max_SNR'] 

19 defaults = [1000, 2.5, 15, 1024, 

20 [31.6, 29.0, 26.4, 23.7, 21.1, 18.5, 15.8, 13.2, 

21 10.5, 7.9, 5.3, 2.6], [31.6, 29.0, 26.4, 23.7, 21.1, 18.5, 

22 15.8, 13.2, 10.5], 30, False, 100.] 

23 types = ['int', 'float', 'int', 'int', 'list', 'list', 

24 'int', 'bool', 'float'] 

25 return required, defaults, types, stat_type 

26 

27 

28def CrossPeaks(map_w, weights, ctx): 

29 """ 

30 Calculates Peaks on a convergence map. 

31 :param map_w: A Healpix convergence map 

32 :param weights: A Healpix map with pixel weights 

33 :param ctx: Context instance 

34 :return: Peak abundance function 

35 """ 

36 

37 # standard devaition of map_w 

38 sig = np.std(map_w[map_w > hp.pixelfunc.UNSEEN]) 

39 ell = map_w.size 

40 

41 # Exclude last element 

42 nside = hp.get_nside(map_w) 

43 ran = np.arange(ell - 1) 

44 

45 # restrict to mask 

46 ran = ran[weights[:-1] > 0.0] 

47 

48 temp = map_w[-1] 

49 map_w[-1] = hp.pixelfunc.UNSEEN 

50 

51 peaks = np.zeros(0, dtype=np.int32) 

52 

53 # calculate all the neighbours, chunked (slow but slim memory) 

54 num_chunks = 1 

55 for r in np.array_split(ran, num_chunks): 

56 neighbours = hp.get_all_neighbours(nside, r) 

57 

58 edges = np.any(map_w[neighbours] == hp.pixelfunc.UNSEEN, axis=0) 

59 

60 # Get Peak positions (maxima) 

61 ps = np.all(map_w[neighbours] < map_w[r], axis=0) 

62 

63 # Remove pixels which are next to an UNSEEN pixel 

64 ps = np.logical_and(ps, np.logical_not(edges)) 

65 

66 peaks = np.append(peaks, r[ps]) 

67 

68 # Last Value treatment 

69 map_w[-1] = temp 

70 n0 = hp.get_all_neighbours(nside, ell - 1) 

71 if (np.all(map_w[n0] < map_w[ell - 1])): 

72 peaks = np.append(peaks, ell - 1) 

73 n1 = np.reshape(hp.get_all_neighbours(nside, n0).T, (-1)) 

74 peaks2 = np.all(np.reshape(map_w[n1], (-1, 8)) 

75 < map_w[n0].reshape((-1, 1)), axis=1) 

76 peaks2 = n0[peaks2] 

77 peaks = _select(n1, peaks, peaks2) 

78 

79 # get values 

80 # and cutting off below threshold coordinates 

81 peak_vals = map_w[peaks] 

82 

83 if ctx['SNR_peaks']: 

84 sig = np.std(map_w[map_w > hp.pixelfunc.UNSEEN]) 

85 peak_vals /= sig 

86 

87 # restrict to peaks with SNR < 4.0 

88 peak_vals = peak_vals[peak_vals / sig <= ctx['max_SNR']] 

89 

90 # edge case (nothing found) 

91 if len(peak_vals) == 0: 

92 return np.full(ctx['Peaks_steps'] + 1, np.nan) 

93 # Binning for values 

94 if len(peak_vals) > (ctx['min_count'] * 200.): 

95 minimum = np.max(np.partition( 

96 peak_vals, ctx['min_count'])[:ctx['min_count']]) 

97 maximum = np.min(np.partition( 

98 peak_vals, -ctx['min_count'])[-ctx['min_count']:]) 

99 else: 

100 maximum = -1. 

101 minimum = 1. 

102 if maximum < minimum: 

103 minimum = np.min(peak_vals) 

104 maximum = np.max(peak_vals) 

105 peak_bins = np.linspace( 

106 minimum, maximum, ctx['Peaks_steps'] - 1) 

107 

108 peak_vals = np.histogram(peak_vals, bins=peak_bins)[0] 

109 

110 # first and last bins indicate maximum and 

111 # minmum of the kappa range and std 

112 peak_vals = np.hstack((minimum, peak_vals, maximum, np.asarray([sig]))) 

113 peak_vals = peak_vals.reshape(1, peak_vals.size) 

114 return peak_vals 

115 

116 

117def _select(n, p1, p2): 

118 for i in n: 

119 if ((i in p1) and (i not in p2)): 

120 p1 = p1[p1 != i] 

121 if ((i in p2) and (i not in p1)): 

122 p1 = np.append(p1, i) 

123 return p1 

124 

125 

126def process(data, ctx, scale_to_unity=False): 

127 # backwards compatibility for data without map std 

128 if data.shape[1] > ctx['CrossPeaks_steps']: 

129 data = data[:, :-1] 

130 

131 num_of_scales = len(ctx['scales']) 

132 

133 new_data = np.zeros( 

134 (int(data.shape[0] / num_of_scales), data.shape[1] 

135 * num_of_scales)) 

136 for jj in range(int(data.shape[0] / num_of_scales)): 

137 new_data[jj, :] = data[jj * num_of_scales: 

138 (jj + 1) * num_of_scales, :].ravel() 

139 return new_data 

140 

141 

142def slice(ctx): 

143 # number of datavectors for each scale 

144 mult = 1 

145 # number of scales 

146 num_of_scales = len(ctx['scales']) 

147 # either mean or sum, for how to assemble the data into the bins 

148 operation = 'sum' 

149 

150 n_bins_sliced = ctx['CrossPeaks_sliced_bins'] 

151 

152 # if True assumes that first and last entries of the data vector indicate 

153 # the upper and lower boundaries and that binning scheme indicates 

154 # bin edges rather than their indices 

155 range_mode = True 

156 

157 return num_of_scales, n_bins_sliced, operation, mult, range_mode 

158 

159 

160def decide_binning_scheme(data, meta, bin, ctx): 

161 num_of_scales = len(ctx['scales']) 

162 n_bins_original = ctx['CrossPeaks_steps'] 

163 n_bins_sliced = ctx['CrossPeaks_sliced_bins'] 

164 

165 # get the correct tomographic bins 

166 bin_idx = np.zeros(meta.shape[0], dtype=bool) 

167 bin_idx[np.where(meta[:, 0] == bin)[0]] = True 

168 bin_idx = np.repeat(bin_idx, meta[:, 1].astype(int)) 

169 data = data[bin_idx, :] 

170 

171 # Get bins for each smooting scale 

172 bin_centers = np.zeros((num_of_scales, n_bins_sliced)) 

173 bin_edges = np.zeros((num_of_scales, n_bins_sliced + 1)) 

174 for scale in range(num_of_scales): 

175 # cut correct scale and minimum and maximum kappa values 

176 data_act = data[:, 

177 n_bins_original * scale:n_bins_original * (scale + 1)] 

178 minimum = np.max(data_act[:, 0]) 

179 maximum = np.min(data_act[:, -1]) 

180 new_kappa_bins = np.linspace(minimum, maximum, n_bins_sliced + 1) 

181 bin_edges[scale, :] = new_kappa_bins 

182 

183 bin_centers_act = new_kappa_bins[:-1] + 0.5 * \ 

184 (new_kappa_bins[1:] - new_kappa_bins[:-1]) 

185 bin_centers[scale, :] = bin_centers_act 

186 return bin_edges, bin_centers 

187 

188 

189def filter(ctx): 

190 filter = np.zeros(0) 

191 for scale in ctx['scales']: 

192 if scale in ctx['selected_scales']: 

193 f = [True] * \ 

194 ctx['CrossPeaks_sliced_bins'] 

195 f = np.asarray(f) 

196 else: 

197 f = [False] * \ 

198 ctx['CrossPeaks_sliced_bins'] 

199 f = np.asarray(f) 

200 filter = np.append(filter, f) 

201 return filter