From f1293b8ac67571ae03aa9572fd55c9e11824933e Mon Sep 17 00:00:00 2001 From: Tuomo Salmi Date: Thu, 7 Dec 2023 18:09:01 +0100 Subject: [PATCH] Example of polarized atmosphere data file creation and small testing. --- .../TestRun_PolNum_split.py | 18 +-- .../ixpeobssim/config/model_amsp_xpsi.py | 11 +- .../produce_5D_atmosphere_data.py | 146 ++++++++++++++++++ ...atorIQU_for_azimuthal_invariance_split.pyx | 7 + 4 files changed, 161 insertions(+), 21 deletions(-) create mode 100644 examples/produce_atmos_lookuptable/produce_5D_atmosphere_data.py diff --git a/examples/examples_modeling_tutorial/TestRun_PolNum_split.py b/examples/examples_modeling_tutorial/TestRun_PolNum_split.py index 6e0cbbe8..7702a0a2 100644 --- a/examples/examples_modeling_tutorial/TestRun_PolNum_split.py +++ b/examples/examples_modeling_tutorial/TestRun_PolNum_split.py @@ -228,9 +228,9 @@ def _HotRegion__compute_cellParamVecs(self): omit=False, cede=False, concentric=False, - sqrt_num_cells=32, + sqrt_num_cells=32, #100 min_sqrt_num_cells=10, - max_sqrt_num_cells=64, + max_sqrt_num_cells=64, #100 num_leaves=100, num_rays=200, split=True, @@ -291,9 +291,7 @@ def hot_atmosphere(self, path): with np.load(path, allow_pickle=True) as data_dictionary: NSX = data_dictionary['NSX.npy'] size_reorderme = data_dictionary['size.npy'] - #print(size_reorderme) - #size = (150, 9, 31, 11, 41) size = [size_reorderme[3], size_reorderme[4], size_reorderme[2], size_reorderme[1], size_reorderme[0]] Energy = np.ascontiguousarray(NSX[0:size[0],0]) @@ -310,9 +308,7 @@ def hot_atmosphere_Q(self, path): with np.load(path, allow_pickle=True) as data_dictionary: NSX = data_dictionary['NSX.npy'] size_reorderme = data_dictionary['size.npy'] - #print(size_reorderme) - #size = (150, 9, 31, 11, 41) size = [size_reorderme[3], size_reorderme[4], size_reorderme[2], size_reorderme[1], size_reorderme[0]] Energy = np.ascontiguousarray(NSX[0:size[0],0]) @@ -322,15 +318,15 @@ def hot_atmosphere_Q(self, path): t_e = np.ascontiguousarray([NSX[i*size[0]*size[1]*size[2]*size[3],4] for i in range(size[4])]) intensities = np.ascontiguousarray(NSX[:,5]) - self._hot_atmosphere_Q = (t_e, t_bb, tau, cos_zenith, Energy, 0.1*intensities*(-1.0)) + #self._hot_atmosphere_Q = (t_e, t_bb, tau, cos_zenith, Energy, 0.1*intensities*(-1.0)) + self._hot_atmosphere_Q = (t_e, t_bb, tau, cos_zenith, Energy, intensities) photosphere = CustomPhotosphere_NumA5(hot = hot, elsewhere = elsewhere, stokes=True, values=dict(mode_frequency = spacetime['frequency'])) -photosphere.hot_atmosphere = '/home/tuomo/xpsi/xpsi_bas/input_files/Bobrikova_compton_slab.npz' -#Replace the following file later with the correct one. -#Let's now just pretend that Q data is the same as I data times -0.1. -photosphere.hot_atmosphere_Q = '/home/tuomo/xpsi/xpsi_bas/input_files/Bobrikova_compton_slab.npz' +this_directory = os.path.dirname(os.path.abspath(__file__)) +photosphere.hot_atmosphere = this_directory+'/model_data/Bobrikova_compton_slab_I.npz' +photosphere.hot_atmosphere_Q = this_directory+'/model_data/Bobrikova_compton_slab_Q.npz' photosphere['mode_frequency'] == spacetime['frequency'] diff --git a/examples/examples_modeling_tutorial/ixpeobssim/config/model_amsp_xpsi.py b/examples/examples_modeling_tutorial/ixpeobssim/config/model_amsp_xpsi.py index 1aba2c32..b28fcc61 100644 --- a/examples/examples_modeling_tutorial/ixpeobssim/config/model_amsp_xpsi.py +++ b/examples/examples_modeling_tutorial/ixpeobssim/config/model_amsp_xpsi.py @@ -69,15 +69,6 @@ for i in range(NEnergy): Imod[j,i],Qmod[j,i],Umod[j,i]=photosphere_I[i,j], photosphere_Q[i,j], photosphere_U[i,j] - - -#PA_check = arctan2(-Umod,-Qmod)*90/pi+90 -#print(PA_check[:,200]) -#plt.figure() -#plt.plot(phase, PA_check[:,200], label='Energy = %.2f keV' % energies[200]) -#setup_gca(ymin=-90., ymax=180.0, legend=True, **fmtaxis.pp_pol_ang) -#plt.savefig("test.png") - chi = 0.0 #pulsar rotation axis position angle chi_rad = chi*pi/180.0 @@ -210,7 +201,7 @@ def display(emin=1., emax=12.): # Pulse profile: Flux. plt.figure('%s pulse profile' % __model__) for E in [2., 5., 8.]: - print("E,spec",E,spec(E, phase)) + #print("E,spec",E,spec(E, phase)) plt.plot(phase, spec(E, phase), label='Energy = %.2f keV' % E) setup_gca(ymin=0., legend=True, **fmtaxis.spec) diff --git a/examples/produce_atmos_lookuptable/produce_5D_atmosphere_data.py b/examples/produce_atmos_lookuptable/produce_5D_atmosphere_data.py new file mode 100644 index 00000000..e5346045 --- /dev/null +++ b/examples/produce_atmos_lookuptable/produce_5D_atmosphere_data.py @@ -0,0 +1,146 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Wed Nov 16 10:11:26 2022 + +@author: bas + +Modified by T.Salmi on Dec 7 2023 to save also the Stokes Q parameter. + +This script can be used to convert the atmosphere data files from +https://github.com/AnnaBobrikova/ComptonSlabTables +to a format that can be used in X-PSI. +""" + +from numpy import linspace, logspace, empty, zeros, ones, array, fromfile +from numpy import pi, exp, log, sqrt, sin, cos, arccos, arctan2 +from numpy import absolute, sign, floor, ceil, argmin +from numpy.polynomial.laguerre import laggauss +from numpy.polynomial.legendre import leggauss +from scipy.interpolate import interp1d +from scipy.interpolate import CubicSpline +from scipy.interpolate import interp2d +from scipy.special import kn +from bisect import bisect +import numpy as np +from astropy.io import fits +from numpy import ones, zeros +from scipy import interpolate + +t__e = np.arange(40.0, 202.0, 4.0) #actual range is 40-200 imaginaty units, ~20-100 keV (Te(keV)*1000/511keV is here) +t__bb = np.arange(0.001, 0.0031, 0.0002) #this one is non-physical, we went for way_to_low Tbbs here, I will most probably delete results from too small Tbbs. This is Tbb(keV)/511keV, so these correspond to 0.07 - 1.5 keV, but our calculations don't work correctly for Tbb<<0.5 keV +tau__t = np.arange(0.5, 3.55, 0.1) + +NEnergy_i = 150 +NZenith_i = 9 + +I_mighty = ones((len(t__e), len(t__bb), len(tau__t), NEnergy_i, NZenith_i)) #5D Intensity tensor[Te,Tbb,tau_t, Energy, Mu] +Q_mighty = ones((len(t__e), len(t__bb), len(tau__t), NEnergy_i, NZenith_i)) #5D Q-parameter tensor[Te,Tbb,tau_t, Energy, Mu] + + + +def reading_table(): #routine that just reads fits tables to I_mighty and Q_mighty + path='/home/tuomo/polcslab/ixpe_sim/Bobrikova/ComptonSlabTables/' + global I_mighty + global Q_mighty + p=0 #as we will not go over lenght_Te but just over values within t__e array, we'll need a counter for index corr. to Te + for i in t__e: + hdu = fits.open(path+'CompSlab_%s.fits' %(i), mode='update') + print('With CompSlab_%s.fits still works' %(i)) #just to see sth on the screen while the files are being opened + for ii in range(0,len(t__bb)): #Tbb + for iii in range(0,len(tau__t)): #that's tau_t + science_data = hdu[(iii)*len(t__bb)+ii+1].data #this messy index leads to the correct "list" within FITS file + data = np.transpose(science_data.field(1)) #we need to transpose them, because tables were generated by julia and read in python, and there languages have different "majoring" in rows or columns, afaik + data2 = np.transpose(science_data.field(0)) + for kk in range(0, NEnergy_i): #energy + for kkk in range(0,NZenith_i): #zenith angles + I_mighty[p, ii, iii, kk, kkk] = data[kk, kkk]#no +1 anymore + Q_mighty[p, ii, iii, kk, kkk] = data2[kk, kkk] + p +=1 + + +reading_table() + + +x_l, x_u = -3.7, .3 # lower and upper bounds of the log_10 energy span +NEnergy = 150 # 50# 101 # number of energy points (x) +IntEnergy = np.logspace(x_l,x_u,NEnergy), np.log(1e1)*(x_u-x_l)/(NEnergy-1.) # sample points and weights for integrations over the spectrum computing sorce function +Energy,x_weight=IntEnergy + +from numpy.polynomial.legendre import leggauss + +def init_mu(n = 3): + NMu = n # number of propagation zenith angle cosines (\mu) [0,1] + NZenith = 2*NMu # number of propagation zenith angles (z) [0,pi] + mu = np.empty(NZenith) + m2,mw = leggauss(NMu) + mu[:NMu] = (m2 - 1.)/2 + mu[NMu:NZenith] = (m2 + 1.)/2 + return mu[NMu:NZenith] + +mu = init_mu(9) + + +# I(E < mu < tau < tbb < te) + +INDEX = 0 +intensities_size = 1 +shapes = list(I_mighty.shape) +for shape in shapes: + intensities_size *= shape + +NSX = np.empty((intensities_size,6)) + +print(len(t__e)) +print(len(t__bb)) +print(len(tau__t)) +print(len(mu)) +print(len(Energy)) + +for m in range(len(t__e)): + for l in range(len(t__bb)): + for k in range(len(tau__t)): + for j in range(len(mu)): + for i in range(len(Energy)): + NSX[INDEX,0] = Energy[i] + NSX[INDEX,1] = mu[j] + NSX[INDEX,2] = tau__t[k] + NSX[INDEX,3] = t__bb[l] + NSX[INDEX,4] = t__e[m] + NSX[INDEX,5] = I_mighty[m,l,k,i,j] + INDEX += 1 +path_save='../examples_modeling_tutorial/model_data/' +np.savez_compressed(path_save+'Bobrikova_compton_slab_I.npz', NSX=NSX, size=shapes) + + +# Q(E < mu < tau < tbb < te) + +INDEX = 0 +intensities_size = 1 +shapes = list(I_mighty.shape) +for shape in shapes: + intensities_size *= shape + +NSX = np.empty((intensities_size,6)) + +print(len(t__e)) +print(len(t__bb)) +print(len(tau__t)) +print(len(mu)) +print(len(Energy)) + +for m in range(len(t__e)): + for l in range(len(t__bb)): + for k in range(len(tau__t)): + for j in range(len(mu)): + for i in range(len(Energy)): + NSX[INDEX,0] = Energy[i] + NSX[INDEX,1] = mu[j] + NSX[INDEX,2] = tau__t[k] + NSX[INDEX,3] = t__bb[l] + NSX[INDEX,4] = t__e[m] + NSX[INDEX,5] = Q_mighty[m,l,k,i,j] + INDEX += 1 +np.savez_compressed(path_save+'Bobrikova_compton_slab_Q.npz', NSX=NSX, size=shapes) + + diff --git a/xpsi/cellmesh/integratorIQU_for_azimuthal_invariance_split.pyx b/xpsi/cellmesh/integratorIQU_for_azimuthal_invariance_split.pyx index 31934ea3..3860a367 100644 --- a/xpsi/cellmesh/integratorIQU_for_azimuthal_invariance_split.pyx +++ b/xpsi/cellmesh/integratorIQU_for_azimuthal_invariance_split.pyx @@ -524,6 +524,13 @@ def integrate(size_t numThreads, cos_2chi = cos(2*chi) sin_2chi = sin(2*chi) + #if(ks == 0): + # printf("leaves[_kdx] = %.6e ",leaves[_kdx]/_2pi) + # printf("chi_0 = %.6e ",chi_0) + # printf("chi_1 = %.6e ",chi_1) + # printf("chi_prime = %.6e ",chi_prime) + # printf("PA_tot = %.6e\n",chi) + # printf(".%.8e", count_evals) # count_evals = 1. + count_evals # printf(".%ld",J)