Coverage for estats/stats/CrossStarletMinkowskiDi.py: 19%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

72 statements  

1# Copyright (C) 2019 ETH Zurich 

2# Institute for Particle Physics and Astrophysics 

3# Author: Dominik Zuercher 

4 

5import numpy as np 

6import healpy as hp 

7from estats.stats import CrossMinkowski 

8 

9 

10def context(): 

11 """ 

12 Defines the paramters used by the plugin 

13 """ 

14 stat_type = 'convergence-cross' 

15 

16 required = ['Minkowski_max', 'Minkowski_min', 'Minkowski_steps', 

17 'Minkowski_sliced_bins', 'Starlet_scalesDi', 

18 'Starlet_selected_scalesDi', 

19 'NSIDE', 'no_V0'] 

20 defaults = [4.0, -4.0, 10, 10, [8, 16, 32, 64, 

21 128, 256, 512, 1024, 2048, 4096], 

22 [8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096], 

23 1024, False] 

24 types = ['float', 'float', 'int', 'int', 'list', 'list', 'int', 'bool'] 

25 return required, defaults, types, stat_type 

26 

27 

28def CrossStarletMinkowskiDi(map_w, weights, ctx): 

29 """ 

30 Performs the starlet-wavelet decomposition of map and counts the local 

31 maxima in each filter band. 

32 :param map: A Healpix convergence map 

33 :param weights: A Healpix map with pixel weights (integer >=0) 

34 :param ctx: Context instance 

35 :return: Starlet counts (num filter bands, Starlet_steps + 1) 

36 """ 

37 

38 try: 

39 from esd import esd 

40 except ImportError: 

41 raise ImportError( 

42 "Did not find esd package. " 

43 "It is required for this module to work properly. " 

44 "Download from: " 

45 "https://cosmo-gitlab.phys.ethz.ch/cosmo_public/esd") 

46 

47 wavelet_counts = np.zeros((len(ctx['Starlet_scalesDi']), 

48 ctx['Minkowski_steps'] * 3)) 

49 

50 # count peaks in each filter band 

51 wave_iter = esd.calc_wavelet_decomp_iter( 

52 map_w, l_bins=ctx['Starlet_scalesDi']) 

53 counter = 0 

54 for ii, wmap in enumerate(wave_iter): 

55 if ii == 0: 

56 continue 

57 # reapply mask 

58 wmap[np.isclose(weights, 0)] = hp.UNSEEN 

59 

60 # calc Minkowski functionals 

61 minks = CrossMinkowski.CrossMinkowski(wmap, weights, ctx) 

62 wavelet_counts[counter] = minks 

63 counter += 1 

64 

65 return wavelet_counts 

66 

67 

68def process(data, ctx, scale_to_unity=False): 

69 num_of_scales = len(ctx['Starlet_scalesDi']) 

70 

71 new_data = np.zeros( 

72 (int(data.shape[0] / num_of_scales), data.shape[1] 

73 * num_of_scales)) 

74 for jj in range(int(data.shape[0] / num_of_scales)): 

75 new_data[jj, :] = data[jj * num_of_scales: 

76 (jj + 1) * num_of_scales, :].ravel() 

77 return new_data 

78 

79 

80def slice(ctx): 

81 # number of datavectors for each scale 

82 mult = 3 

83 # number of scales 

84 num_of_scales = len(ctx['Starlet_scalesDi']) 

85 # either mean or sum, for how to assemble the data into the bins 

86 operation = 'mean' 

87 

88 n_bins_sliced = ctx['Minkowski_sliced_bins'] 

89 

90 return num_of_scales, n_bins_sliced, operation, mult 

91 

92 

93def decide_binning_scheme(data, meta, bin, ctx): 

94 # For Minkowski perform simple equal bin width splitting. 

95 # Same splitting for each smoothing scale. 

96 range_edges = [ctx['Minkowski_min'], ctx['Minkowski_max']] 

97 n_bins_original = ctx['Minkowski_steps'] 

98 num_of_scales = len(ctx['Starlet_scalesDi']) 

99 n_bins_sliced = ctx['Minkowski_sliced_bins'] 

100 bin_centers = np.zeros((num_of_scales, n_bins_sliced)) 

101 bin_edge_indices = np.zeros((num_of_scales, n_bins_sliced + 1)) 

102 

103 orig_bin_values = np.linspace( 

104 range_edges[0], range_edges[1], n_bins_original) 

105 

106 per_bin = n_bins_original // n_bins_sliced 

107 remain = n_bins_original % n_bins_sliced 

108 remain_front = remain // 2 

109 remain_back = remain_front + remain % 2 

110 

111 # Get edge indices 

112 bin_edge_indices_temp = np.arange( 

113 remain_front, n_bins_original - remain_back, per_bin) 

114 bin_edge_indices_temp[0] -= remain_front 

115 bin_edge_indices_temp = np.append( 

116 bin_edge_indices_temp, n_bins_original) 

117 

118 # Get bin central values 

119 bin_centers_temp = np.zeros(0) 

120 for jj in range(len(bin_edge_indices_temp) - 1): 

121 bin_centers_temp = np.append(bin_centers_temp, np.nanmean( 

122 orig_bin_values[bin_edge_indices_temp[jj]: 

123 bin_edge_indices_temp[jj + 1]])) 

124 

125 # Assign splitting to each scale 

126 for scale in range(num_of_scales): 

127 bin_centers[scale, :] = bin_centers_temp 

128 bin_edge_indices[scale, :] = bin_edge_indices_temp 

129 

130 return bin_edge_indices, bin_centers 

131 

132 

133def filter(ctx): 

134 filter = np.zeros(0) 

135 for scale in reversed(ctx['Starlet_scalesDi']): 

136 if scale in ctx['Starlet_selected_scalesDi']: 

137 f = [True] * \ 

138 ctx['Minkowski_sliced_bins'] 

139 f = np.asarray(f) 

140 else: 

141 f = [False] * \ 

142 ctx['Minkowski_sliced_bins'] 

143 f = np.asarray(f) 

144 

145 f = np.tile(f, 3) 

146 if ctx['no_V0']: 

147 f[:ctx['Minkowski_sliced_bins']] = False 

148 filter = np.append(filter, f) 

149 return filter