Coverage for estats/stats/CrossShearCLs.py: 95%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

79 statements  

1# Copyright (C) 2019 ETH Zurich 

2# Institute for Particle Physics and Astrophysics 

3# Author: Dominik Zuercher 

4 

5import numpy as np 

6import healpy as hp 

7 

8 

9def context(): 

10 """ 

11 Defines the paramters used by the plugin 

12 """ 

13 stat_type = 'shear' 

14 

15 required = ['lmax', 'CrossShearCLs_sliced_bins', 'CrossShearCLs_min', 

16 'CrossShearCLs_max', 'cl_engine'] 

17 defaults = [1000, 20, 100, 1000, 'anafast'] 

18 types = ['int', 'int', 'int', 'int', 'str'] 

19 return required, defaults, types, stat_type 

20 

21 

22def CrossShearCLs(map_1, map_2, map_1_sec, map_2_sec, weight_map_1, 

23 weight_map_2, ctx): 

24 """ 

25 Calculates cross angular power spectrum of two sets of shear maps. 

26 :param map_1: A Healpix shear map, first shear component. 

27 :param map_2: A Healpix shear map, second shear component. 

28 :param map_1_sec: A second Healpix shear map, first shear component. 

29 :param map_2_sec: A second Healpix shear map, second shear component. 

30 :param weight_map_1: A Healpix map with pixel weights 

31 for the first shear maps 

32 :param weight_map_2: A Healpix map with pixel weights 

33 for the second shear maps 

34 """ 

35 # anafast uses HealPix polarisation -> negate map_1! 

36 # not sure if this is also true for polspice 

37 shear_map_1 = [-map_1, map_2] 

38 shear_map_2 = [-map_1_sec, map_2_sec] 

39 

40 lmax = ctx['lmax'] - 1 

41 

42 # check if weights are set 

43 if len(weight_map_1) == 0: 

44 weight_map_1 = np.ones_like(map_1) 

45 if len(weight_map_2) == 0: 

46 weight_map_2 = np.ones_like(map_1_sec) 

47 

48 if ctx['cl_engine'] == 'anafast': 

49 # proper weight masking 

50 select_seen = weight_map_1 > 0 

51 select_unseen = ~select_seen 

52 select_unseen |= shear_map_1[0] == hp.UNSEEN 

53 select_unseen |= shear_map_1[1] == hp.UNSEEN 

54 maps_0_w = (shear_map_1[0] * weight_map_1).astype(np.float64) 

55 maps_1_w = (shear_map_1[1] * weight_map_1).astype(np.float64) 

56 maps_0_w[select_unseen] = hp.UNSEEN 

57 maps_1_w[select_unseen] = hp.UNSEEN 

58 shear_map_1_w = [maps_0_w, maps_1_w] 

59 

60 select_seen = weight_map_2 > 0 

61 select_unseen = ~select_seen 

62 select_unseen |= shear_map_2[0] == hp.UNSEEN 

63 select_unseen |= shear_map_2[1] == hp.UNSEEN 

64 maps_0_w = (shear_map_2[0] * weight_map_2).astype(np.float64) 

65 maps_1_w = (shear_map_2[1] * weight_map_2).astype(np.float64) 

66 maps_0_w[select_unseen] = hp.UNSEEN 

67 maps_1_w[select_unseen] = hp.UNSEEN 

68 shear_map_2_w = [maps_0_w, maps_1_w] 

69 

70 dummie_map = np.zeros_like(shear_map_2[0]) 

71 

72 _, cl_e1e2, cl_b1b2, _, _, _ = np.array( 

73 hp.sphtfunc.anafast( 

74 (dummie_map, shear_map_1_w[0], shear_map_1_w[1]), 

75 (dummie_map, shear_map_2_w[0], shear_map_2_w[1]), 

76 lmax=lmax)) 

77 Cl = {'cl_EE': cl_e1e2, 'cl_BB': cl_b1b2} 

78 

79 else: 

80 raise Exception("Unknown cl_engine {}".format(ctx['cl_engine'])) 

81 

82 Cl_EE = Cl['cl_EE'] 

83 Cl_EE = Cl_EE.reshape(1, Cl_EE.size) 

84 Cl_BB = Cl['cl_BB'] 

85 Cl_BB = Cl_BB.reshape(1, Cl_BB.size) 

86 return (Cl_EE, Cl_BB) 

87 

88 

89def process(data, ctx, scale_to_unity=False): 

90 if scale_to_unity: 

91 data *= 1e4 

92 return data 

93 

94 

95def slice(ctx): 

96 # number of datavectors for each scale 

97 mult = 1 

98 # number of scales 

99 num_of_scales = 1 

100 # either mean or sum, for how to assemble the data into the bins 

101 operation = 'mean' 

102 

103 n_bins_sliced = ctx['CrossShearCLs_sliced_bins'] 

104 

105 return num_of_scales, n_bins_sliced, operation, mult 

106 

107 

108def decide_binning_scheme(data, meta, bin, ctx): 

109 # Perform simple equal width splitting for Cls. 

110 

111 range_edges = [ctx['CrossShearCLs_min'], ctx['CrossShearCLs_max']] 

112 num_of_scales = 1 

113 n_bins_sliced = ctx['CrossShearCLs_sliced_bins'] 

114 bin_centers = np.zeros((num_of_scales, n_bins_sliced)) 

115 bin_edge_indices = np.zeros((num_of_scales, n_bins_sliced + 1)) 

116 

117 # Cut out desired l range 

118 minimum = range_edges[0] 

119 maximum = range_edges[1] 

120 diff = maximum - minimum 

121 

122 per_bin = diff // n_bins_sliced 

123 remain = diff % n_bins_sliced 

124 remain_front = remain // 2 

125 remain_back = remain_front + remain % 2 

126 

127 # Decide on edge indices 

128 bin_edge_indices_temp = np.arange( 

129 remain_front + minimum, maximum - remain_back, per_bin) 

130 bin_edge_indices_temp[0] -= remain_front 

131 bin_edge_indices_temp = np.append(bin_edge_indices_temp, maximum) 

132 

133 # Decide on central bin values 

134 bin_centers_temp = np.zeros(0) 

135 for jj in range(len(bin_edge_indices_temp) - 1): 

136 bin_centers_temp = np.append( 

137 bin_centers_temp, 

138 np.nanmean(bin_edge_indices_temp[jj:jj + 2])) 

139 

140 # For consistency with other binning scheme 

141 # assign same binning to all scales 

142 for scale in range(num_of_scales): 

143 bin_centers[scale, :] = bin_centers_temp 

144 bin_edge_indices[scale, :] = bin_edge_indices_temp 

145 

146 return bin_edge_indices, bin_centers 

147 

148 

149def filter(ctx): 

150 return [True] * ctx['CrossShearCLs_sliced_bins']