Coverage for estats/stats/CrossStarletVoidsDi.py: 20%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

69 statements  

1# Copyright (C) 2019 ETH Zurich 

2# Institute for Particle Physics and Astrophysics 

3# Author: Dominik Zuercher 

4 

5import numpy as np 

6import healpy as hp 

7from estats.stats import CrossVoids 

8 

9 

10def context(): 

11 """ 

12 Defines the paramters used by the plugin 

13 """ 

14 stat_type = 'convergence-cross' 

15 

16 required = ['Starlet_steps', 'Starlet_scalesDi', 

17 'Starlet_selected_scalesDi', 

18 'void_upper_threshold', 'Starlet_sliced_bins', 

19 'NSIDE', 'min_count', 'SNR_voids', 

20 'min_SNR'] 

21 defaults = [1000, [8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096], 

22 [8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096], 

23 -2.5, 15, 1024, 30, False, -100.] 

24 types = ['int', 'list', 'list', 'float', 'int', 'int', 

25 'int', 'bool', 'float'] 

26 return required, defaults, types, stat_type 

27 

28 

29def CrossStarletVoidsDi(map_w, weights, ctx): 

30 """ 

31 Performs the starlet-wavelet decomposition of map and counts the local 

32 maxima in each filter band. 

33 :param map: A Healpix convergence map 

34 :param weights: A Healpix map with pixel weights (integer >=0) 

35 :param ctx: Context instance 

36 :return: Starlet counts (num filter bands, Starlet_steps + 1) 

37 """ 

38 

39 try: 

40 from esd import esd 

41 except ImportError: 

42 raise ImportError( 

43 "Did not find esd package. " 

44 "It is required for this module to work properly. " 

45 "Download from: " 

46 "https://cosmo-gitlab.phys.ethz.ch/cosmo_public/esd") 

47 

48 # build decomposition 

49 # (remove first map that contains remaining small scales) 

50 wavelet_counts = np.zeros((len(ctx['Starlet_scalesDi']), 

51 ctx['Starlet_steps'] + 1)) 

52 

53 # count peaks in each filter band 

54 wave_iter = esd.calc_wavelet_decomp_iter( 

55 map_w, l_bins=ctx['Starlet_scalesDi']) 

56 counter = 0 

57 for ii, wmap in enumerate(wave_iter): 

58 if ii == 0: 

59 continue 

60 # reapply mask 

61 wmap[np.isclose(weights, 0)] = hp.UNSEEN 

62 

63 void_vals = CrossVoids.CrossVoids(wmap, weights, ctx) 

64 wavelet_counts[counter] = void_vals 

65 counter += 1 

66 

67 return wavelet_counts 

68 

69 

70def process(data, ctx, scale_to_unity=False): 

71 # backwards compatibility for data without map std 

72 if data.shape[1] > ctx['Starlet_steps']: 

73 data = data[:, :-1] 

74 

75 num_of_scales = len(ctx['Starlet_scalesDi']) 

76 

77 new_data = np.zeros( 

78 (int(data.shape[0] / num_of_scales), data.shape[1] 

79 * num_of_scales)) 

80 for jj in range(int(data.shape[0] / num_of_scales)): 

81 new_data[jj, :] = data[jj * num_of_scales: 

82 (jj + 1) * num_of_scales, :].ravel() 

83 return new_data 

84 

85 

86def slice(ctx): 

87 # number of datavectors for each scale 

88 mult = 1 

89 # number of scales 

90 num_of_scales = len(ctx['Starlet_scalesDi']) 

91 # either mean or sum, for how to assemble the data into the bins 

92 operation = 'sum' 

93 

94 n_bins_sliced = ctx['Starlet_sliced_bins'] 

95 

96 # if True assumes that first and last entries of the data vector indicate 

97 # the upper and lower boundaries and that binning scheme indicates 

98 # bin edges rather than their indices 

99 range_mode = True 

100 

101 return num_of_scales, n_bins_sliced, operation, mult, range_mode 

102 

103 

104def decide_binning_scheme(data, meta, bin, ctx): 

105 num_of_scales = len(ctx['Starlet_scalesDi']) 

106 n_bins_original = ctx['Starlet_steps'] 

107 n_bins_sliced = ctx['Starlet_sliced_bins'] 

108 

109 # get the correct tomographic bins 

110 bin_idx = np.zeros(meta.shape[0], dtype=bool) 

111 bin_idx[np.where(meta[:, 0] == bin)[0]] = True 

112 bin_idx = np.repeat(bin_idx, meta[:, 1].astype(int)) 

113 data = data[bin_idx, :] 

114 

115 # Get bins for each smooting scale 

116 bin_centers = np.zeros((num_of_scales, n_bins_sliced)) 

117 bin_edges = np.zeros((num_of_scales, n_bins_sliced + 1)) 

118 for scale in range(num_of_scales): 

119 # cut correct scale and minimum and maximum kappa values 

120 data_act = data[:, 

121 n_bins_original * scale:n_bins_original * (scale + 1)] 

122 minimum = np.max(data_act[:, 0]) 

123 maximum = np.min(data_act[:, -1]) 

124 new_kappa_bins = np.linspace(minimum, maximum, n_bins_sliced + 1) 

125 bin_edges[scale, :] = new_kappa_bins 

126 

127 bin_centers_act = new_kappa_bins[:-1] + 0.5 * \ 

128 (new_kappa_bins[1:] - new_kappa_bins[:-1]) 

129 bin_centers[scale, :] = bin_centers_act 

130 return bin_edges, bin_centers 

131 

132 

133def filter(ctx): 

134 filter = np.zeros(0) 

135 for scale in reversed(ctx['Starlet_scalesDi']): 

136 if scale in ctx['Starlet_selected_scalesDi']: 

137 f = [True] * \ 

138 ctx['Starlet_sliced_bins'] 

139 f = np.asarray(f) 

140 else: 

141 f = [False] * \ 

142 ctx['Starlet_sliced_bins'] 

143 f = np.asarray(f) 

144 filter = np.append(filter, f) 

145 return filter