Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# Copyright (C) 2020 ETH Zurich, Institute for Particle Physics and Astrophysics
2# Author: Raphael Sgier and Jörg Herbel
4# construct_lensing_maps.py
6# This file contains example-functions to compute weak lensing maps using UFalcon
8import os
9import argparse
10import yaml
11import numpy as np
12import healpy as hp
13import h5py
14import UFalcon
15from astropy.cosmology import FlatLambdaCDM
16from astropy import constants as const
19def get_single_source_redshifts(zs_str):
20 """
21 Generates an array with discrete redshift steps used for lightcone construction with single-source redshifts
22 :param zs_str: string containing redshift range of the lightcone, i.e. 'z_init, z_final, dz'
23 :return: numpy array containing redshifts from z_init to z_final with steps of size dz
24 """
25 z_init, z_final, dz = list(map(float, zs_str.split(',')))
26 n_digits_round = int(np.ceil(np.amax(np.fabs(np.log10((z_init, z_final, dz))))))
27 zs = np.around(np.arange(z_init, z_final + dz, dz), decimals=n_digits_round)
28 return zs
31def store_output(kappa_maps, single_source_redshifts, paths_out, combine_nz_maps=False):
32 """
33 Routine to store the computed kappa and derived gamma maps for single-source redshifts and n(z)-dist.
34 :param kappa_maps: file containing the computed kappa-maps, shape ((len(lensing_weights), Npix)
35 :param single_source_redshifts: array containing the single-source redshifts
36 :param paths_out: path where to store the new files
37 :param combine_nz_maps: bool: 'True' if all n(z) maps should be stored in a single file, 'False' otherwise
38 :return: stores kappa and corresponding gamma1 and gamma2 maps in a single or separate files
39 """
41 # mean subtraction
42 kappa_maps -= np.mean(kappa_maps, axis=1, keepdims=True)
44 n_nz = len(kappa_maps) - len(single_source_redshifts)
46 # maps from n(z)
47 if n_nz > 0:
49 gamma1_maps = []
50 gamma2_maps = []
52 for i in range(n_nz):
54 print('Working on n(z) map {} / {}'.format(i + 1, n_nz), flush=True)
56 gamma1, gamma2 = UFalcon.utils.kappa_to_gamma(kappa_maps[i])
58 if combine_nz_maps:
59 gamma1_maps.append(gamma1)
60 gamma2_maps.append(gamma2)
62 if not combine_nz_maps:
63 hp.write_map(filename=paths_out[i], m=(kappa_maps[i], gamma1, gamma2),
64 fits_IDL=False,
65 coord='C',
66 overwrite=True)
67 print('Wrote {}'.format(paths_out[i]))
69 if combine_nz_maps:
70 print('Storing all n(z) maps into single file')
71 nz_maps = np.stack((kappa_maps[:n_nz], gamma1_maps, gamma2_maps), axis=1)
72 np.save(paths_out[0], nz_maps)
73 print('Wrote {}'.format(paths_out[0]))
75 # single-source maps
76 if len(single_source_redshifts) > 0:
78 kappa_maps_single_source = kappa_maps[n_nz:]
80 with h5py.File(paths_out[-1], mode='w') as fh5:
82 fh5.create_dataset(name='z', data=single_source_redshifts)
83 for name in ('kappa', 'gamma1', 'gamma2'):
84 fh5.create_dataset(name=name,
85 shape=kappa_maps_single_source.shape,
86 dtype=kappa_maps_single_source.dtype,
87 compression='lzf')
89 for i, kappa_map in enumerate(kappa_maps_single_source):
90 print('Storing single-source kappa map {} / {}'.format(i + 1, len(single_source_redshifts)), flush=True)
91 gamma1, gamma2 = UFalcon.utils.kappa_to_gamma(kappa_map)
92 fh5['kappa'][i] = kappa_map
93 fh5['gamma1'][i] = gamma1
94 fh5['gamma2'][i] = gamma2
97def add_shells_h5(paths_shells, lensing_weighters, nside, boxsize, zs_low, zs_up, cosmo, const, n_particles):
98 """
99 Computes kappa-maps from precomputed shells in hdf5-format containing particles counts
100 :param paths_shells: path to shells
101 :param lensing_weighters: lensing weights for continuous n(z)-dist and single-source redshifts, in this order
102 :param nside: nside of output maps
103 :param boxsize: size of the box in Gigaparsec
104 :param zs_low: lower redshift-bound for the lightcone construction
105 :param zs_up: upper redshift-bound for the lightcone construction
106 :param cosmo: Astropy.Cosmo instance, controls the cosmology used
107 :param const: Astropy.Const instance, used for various constants
108 :param n_particles: total number of parciles used in the N-Body simulation, i.e. (No. of part. per side)^3
109 :return: kappa-maps with shape ((len(lensing_weights), Npix)
110 """
112 # add up shells
113 kappa = np.zeros((len(lensing_weighters), hp.nside2npix(nside)), dtype=np.float32)
115 zs_low_arr = np.arange(zs_low['min'], zs_low['max'], zs_low['delta_z'])
116 zs_up_arr = np.arange(zs_up['min'], zs_up['max'], zs_up['delta_z'])
118 for i_shells, path in enumerate(paths_shells):
120 z_low = zs_low_arr[i_shells]
121 z_up = zs_up_arr[i_shells]
123 print('Processing shells {} / {}, path: {}'.format(i_shells + 1, len(paths_shells), path), flush=True)
125 with h5py.File(path, mode='r') as fh5:
127 # check if nside is ok
128 nside_shells = hp.npix2nside(fh5['shells'].shape[1])
129 assert nside <= nside_shells, 'Requested nside ({}) is larger than nside ({}) of input shells in file {}'. \
130 format(nside, nside_shells, path)
132 # select shells inside redshift range
133 z_shells = fh5['z'][...]
135 ind_shells = np.where((z_shells[:, 0] >= z_low) & (z_shells[:, 1] <= z_up))[0]
137 for c, i_shell in enumerate(ind_shells):
139 print('Shell {} / {}'.format(c + 1, len(ind_shells)), flush=True)
141 # load shell
142 shell = hp.ud_grade(fh5['shells'][i_shell], nside, power=-2).astype(kappa.dtype)
143 z_shell_low, z_shell_up = z_shells[i_shell]
145 shell *= UFalcon.lensing_weights.kappa_prefactor(n_pix=shell.size,
146 n_particles=n_particles,
147 boxsize=boxsize,
148 cosmo=cosmo)
150 # compute lensing weights and add to kappa maps
151 for i_w, lensing_weighter in enumerate(lensing_weighters):
152 kappa[i_w] += shell * lensing_weighter(z_shell_low, z_shell_up, cosmo, const)
154 return kappa
157def add_shells_pkdgrav(dirpath, lensing_weighters_cont, single_source_redshifts, nside, cosmo, const, n_particles, boxsize):
158 """
159 Computes kappa-maps from precomputed shells in fits-format containing particles counts
160 :param dirpath: path to directory containing the fits-files
161 :param lensing_weighters_cont: lensing weights for a continuous, user-defined n(z) distribution
162 :param single_source_redshifts: array containing single-source redshifts
163 :param nside: nside of output maps
164 :param cosmo: Astropy.Cosmo instance, controls the cosmology used
165 :param const: Astropy.Const instance, used for various constants
166 :param n_particles: total number of parciles used in the N-Body simulation, i.e. (No. of part. per side)^3
167 :param boxsize: size of the box in Gigaparsec
168 :return: kappa-maps with shape ((len(lensing_weights), Npix)
169 """
171 # get all shells
172 file_list = list(filter(lambda fn: 'shell_' in fn and os.path.splitext(fn)[1] == '.fits', os.listdir(dirpath)))
174 # extract redshifts
175 z_shells_low = []
176 z_shells_up = []
178 for filename in file_list:
179 filename_split = os.path.splitext(filename)[0].split('_')
180 z_shells_low.append(float(filename_split[-1]))
181 z_shells_up.append(float(filename_split[-2]))
183 # adjust single-source redshifts
184 z_shells = np.unique(z_shells_low + z_shells_up)
186 for i, zs in enumerate(single_source_redshifts):
187 single_source_redshifts[i] = z_shells[np.argmin(np.abs(z_shells - zs))]
189 print('Adjusted single-source redshifts to: {}'.format(single_source_redshifts))
191 lensing_weighters = lensing_weighters_cont + [UFalcon.lensing_weights.Dirac(zs) for zs in single_source_redshifts]
193 # add up
194 kappa = np.zeros((len(lensing_weighters), hp.nside2npix(nside)), dtype=np.float32)
196 for i, filename in enumerate(file_list):
198 path = os.path.join(dirpath, filename)
199 print('Processing shell {} / {}, path: {}'.format(i + 1, len(file_list), path), flush=True)
201 # load shell
202 shell = hp.ud_grade(hp.read_map(path), nside, power=-2).astype(kappa.dtype)
204 shell *= UFalcon.lensing_weights.kappa_prefactor(n_pix=shell.size,
205 n_particles=n_particles,
206 boxsize=boxsize,
207 cosmo=cosmo)
209 # compute lensing weights and add to kappa maps
210 for i_w, lensing_weighter in enumerate(lensing_weighters):
211 kappa[i_w] += shell * lensing_weighter(z_shells_low[i], z_shells_up[i], cosmo, const)
213 return kappa
216def main(path_config, paths_shells, nside, paths_nz, single_source_redshifts, paths_out, combine_nz_maps=False):
217 """
218 main example-function; Constructs lensing maps from precomputed shells containing particle counts
219 :param path_config: path to configuration yaml file
220 :param paths_shells: path to shells
221 :param nside: nside of output maps
222 :param paths_nz: path to n(z) files
223 :param single_source_redshifts: array containing the single-source redshifts (continuous shear)
224 :param paths_out: paths to output files, must contain as many paths as maps are produced
225 :param combine_nz_maps: bool: switch to store all n(z) maps into one file
226 :return: computes and stores kappa, gamma1, gamma2 maps
227 """
229 print('Config: {}'.format(path_config))
230 print('Shells: {}'.format(paths_shells))
231 print('n(z): {}'.format(paths_nz))
232 print('Single-source redshifts: {}'.format(single_source_redshifts))
234 if combine_nz_maps:
235 n_paths_out_nz = int(len(paths_nz) > 0)
236 else:
237 n_paths_out_nz = len(paths_nz)
239 n_paths_out_single_source = int(len(single_source_redshifts) > 0)
241 assert len(paths_out) == n_paths_out_nz + n_paths_out_single_source, \
242 'Number of output paths does not match number of produced maps, should be {} for n(z) maps and {} for ' \
243 'single-source maps'.format(n_paths_out_nz, n_paths_out_single_source)
245 # load config
246 with open(path_config, mode='r') as f:
247 config = yaml.load(f)
249 # get cosmo instance
251 cosmo_params = config.get('cosmology')
252 cosmo = FlatLambdaCDM(H0=cosmo_params['H0'], Om0=cosmo_params['Om0'])
254 # get continuous lensing weighters
255 try:
256 z_lim_low = config['z_low']['min']
257 z_lim_up = config['z_up']['max']
258 except TypeError:
259 z_lim_low = config['z_low']
260 z_lim_up = config['z_up']
262 lensing_weighters = [UFalcon.lensing_weights.Continuous(path_nz,
263 z_lim_low=z_lim_low,
264 z_lim_up=z_lim_up, IA=0.0) for path_nz in paths_nz]
266 # check if shells from h5 file(s) or shells from PKDGRAV
267 if len(paths_shells) == 1 and os.path.isdir(paths_shells[0]):
268 print('Got one input path which is a directory -- assuming shells stored by PKDGRAV in fits-format')
269 kappa = add_shells_pkdgrav(paths_shells[0],
270 lensing_weighters,
271 single_source_redshifts,
272 nside,
273 cosmo,
274 const,
275 config['n_particles'],
276 config['boxsize'])
277 else:
278 print('Assuming shells stored in hdf5-format')
279 lensing_weighters.extend([UFalcon.lensing_weights.Dirac(zs) for zs in single_source_redshifts])
280 kappa = add_shells_h5(paths_shells,
281 lensing_weighters,
282 nside,
283 config['boxsize'],
284 config['z_low'],
285 config['z_up'],
286 cosmo,
287 const,
288 config['n_particles'])
290 # store results
291 store_output(kappa, single_source_redshifts, paths_out, combine_nz_maps=combine_nz_maps)
297if __name__ == '__main__':
299 parser = argparse.ArgumentParser(description='Construct lensing maps',
300 add_help=True)
301 parser.add_argument('--path_config', type=str, required=True, help='configuration yaml file')
302 parser.add_argument('--paths_shells', type=str, nargs='+', required=True, help='paths of shells')
303 parser.add_argument('--nside', type=int, required=True, help='nside of output maps')
304 parser.add_argument('--paths_nz', type=str, nargs='+', default=[], help='paths to n(z) files')
305 parser.add_argument('--single_source_redshifts', type=str, nargs='+', default=[], help='single-source redshifts')
306 parser.add_argument('--paths_out', type=str, required=True, nargs='+', help='paths to output files, must contain '
307 'as many paths as maps are produced')
308 parser.add_argument('--combine_nz_maps', action='store_true', help='switch to store all n(z) maps into one file')
309 args = parser.parse_args()
311 if len(args.single_source_redshifts) == 1 and ',' in args.single_source_redshifts[0]:
312 source_redshifts = get_single_source_redshifts(args.single_source_redshifts[0])
313 else:
314 source_redshifts = np.array(args.single_source_redshifts, dtype=np.float64)
316 main(args.path_config,
317 args.paths_shells,
318 args.nside,
319 args.paths_nz,
320 source_redshifts,
321 args.paths_out,
322 combine_nz_maps=args.combine_nz_maps)