#!/usr/bin/env python __author__ = "Roman Krivonos" __copyright__ = "Space Research Institute (IKI)" import numpy as np import numpy.ma as ma import pandas as pd from astropy.wcs import WCS from astropy import wcs from astropy.io import fits from astropy.table import Table, Column import matplotlib.pyplot as plt import math, sys, os import pickle from astropy.coordinates import SkyCoord # High-level coordinates from astropy.coordinates import ICRS, Galactic, FK4, FK5 # Low-level frames from astropy.coordinates import Angle, Latitude, Longitude # Angles import astropy.units as u from sklearn.linear_model import LinearRegression from sklearn.linear_model import HuberRegressor from sklearn.linear_model import RANSACRegressor from sklearn.linear_model import TheilSenRegressor from sklearn.model_selection import cross_val_score from sklearn.model_selection import RepeatedKFold from astropy.stats import sigma_clip from astropy.stats import sigma_clipped_stats from scipy.stats import norm from scipy.stats import describe from scipy.stats import sem from numpy import absolute from numpy import arange from ridge.utils import * from ridge.config import * enkey = sys.argv[1] #key = sys.argv[2] key="ALL" fn='detcnts.{}.{}.resid.fits'.format(enkey,key) print("Reading {}".format(proddir+fn)) dat = Table.read(proddir+fn, unit_parse_strict='silent') df = dat.to_pandas() #df = df.query('abs(LAT) < {} & abs(LON) < {}'.format(5,5)) n_bins = 80 minmax=[-300,800] sigma=3 maxiters=10 with open(ignored_rev_file, 'rb') as fp: ignored_rev = pickle.load(fp) print(ignored_rev) print("{} orbits ignored".format(len(ignored_rev))) ign=ignored_rev.tolist() hdulist = fits.open(datadir+modelrxte) w = wcs.WCS(hdulist[0].header) smap =hdulist[0].data sx=int(hdulist[0].header['NAXIS1']) sy=int(hdulist[0].header['NAXIS2']) # fill AITOF map indexes ds9x=[] ds9y=[] for i,row in df.iterrows(): lon=row['LON'] lat=row['LAT'] world = SkyCoord(lon,lat, frame=Galactic, unit="deg") ra=world.fk5.ra.deg dec=world.fk5.dec.deg pixcrd = w.wcs_world2pix([(lon,lat)], 1) y=int(pixcrd[0][0]) x=int(pixcrd[0][1]) ds9x.append(x) ds9y.append(y) #print(x,y,smap[y-1,x-1]) df['DS9Y']=ds9x df['DS9X']=ds9y # # initiate 2d arrays # mean_map = np.array([[0.0 for i in range(sx)] for j in range(sy)]) sign_map = np.array([[0.0 for i in range(sx)] for j in range(sy)]) sem_map = np.array([[0.0 for i in range(sx)] for j in range(sy)]) cnt_map = np.array([[0 for i in range(sx)] for j in range(sy)]) # simulations mean_sim_map = np.array([[0.0 for i in range(sx)] for j in range(sy)]) error_sim_map = np.array([[0.0 for i in range(sx)] for j in range(sy)]) sign_sim_map = np.array([[0.0 for i in range(sx)] for j in range(sy)]) mean_sim={} for i in range(sx): for j in range(sy): dkey="{:04d}{:04d}".format(j,i) mean_sim[dkey] = [] obsid_map = {} grxe_map = {} grxe_err_map = {} # redefine simfrac for low number of ScWs in pixel simfrac=2 nsim=100 for i in range(sx): for j in range(sy): dkey="{:04d}{:04d}".format(j,i) world = w.wcs_pix2world([(i+1,j+1)], 1) lon = world[0][0] lat = world[0][1] if(np.isnan(lon) or np.isnan(lat)): continue ds9i=i+1 ds9j=j+1 df0 = df.query('DS9X == {} & DS9Y == {} & REV != @ign'.format(ds9i,ds9j)) if (df0.shape[0] <= nscw_min): continue print("*** *** SUM *** ***") print(np.sum(df0["GRXE"])) # check coordinates #print("***",i+1,j+1,lon,lat,smap[j][i]) #for i0,row0 in df0.iterrows(): # print(row0['LON'],row0['LAT'],row0['GRXE']) sg_mean,sg_sem = get_spec(df0, sigma=sigma, grxe_err_cut=grxe_err_cut, enkey=enkey) nsel = int(df0.shape[0]/simfrac) print("nsel=",nsel,df0.shape[0],len(df0['GRXE'])) for n in range(nsim): df1=df0.sample(nsel) sg_mean1,sg_sem1 = get_spec(df1, grxe_err_cut=grxe_err_cut, enkey=enkey) mean_sim[dkey].append(sg_mean1) print('sg_sem',sg_sem) mean_map[j][i] = sg_mean sem_map[j][i] = sg_sem sign_map[j][i] = sg_mean/sg_sem cnt_map[j][i] = df0.shape[0] """ obsid_map[dkey] = obsid grxe_map[dkey] = grxe grxe_err_map[dkey] = grxe_err """ """ Filter by error map """ # Calculate the percentiles across the x and y dimension perc = np.percentile(sem_map, sem_cut, axis=(0, 1), keepdims=False) print("{} {}: {}% cut of SEM map: {:.2f} mCrab".format(key,enkey,sem_cut,perc)) idx=np.where(sem_map > perc) mean_map[idx]=0.0 sem_map[idx]=0.0 cnt_map[idx]=0 sign_map[idx]=0.0 if not os.path.exists(mapsdir): os.makedirs(mapsdir) hdulist[0].data=mean_map hdulist.writeto(mapsdir+fn.replace(".fits",".mean.fits"),overwrite=True) hdulist[0].data=sem_map hdulist.writeto(mapsdir+fn.replace(".fits",".error.fits"),overwrite=True) hdulist[0].data=cnt_map hdulist.writeto(mapsdir+fn.replace(".fits",".cnt.fits"),overwrite=True) hdulist[0].data=sign_map hdulist.writeto(mapsdir+fn.replace(".fits",".sign.fits"),overwrite=True) for i in range(sx): for j in range(sy): dkey="{:04d}{:04d}".format(j,i) data=mean_sim[dkey] (mu, sg) = norm.fit(data) mean_sim_map[j][i] = mu error_sim_map[j][i] = sg hdulist[0].data=mean_sim_map hdulist.writeto(mapsdir+fn.replace(".fits",".sim.mean.fits"),overwrite=True) hdulist[0].data=error_sim_map hdulist.writeto(mapsdir+fn.replace(".fits",".sim.error.fits"),overwrite=True) sys.exit() print("Prepare data for fine map") obsid_list=[] grxe_list=[] grxe_err_list=[] for i in range(sx): for j in range(sy): """ Use mask """ if not (cnt_map[j][i]>0): continue world = w.wcs_pix2world([(i+1,j+1)], 1) lon = world[0][0] lat = world[0][1] if(np.isnan(lon) or np.isnan(lat)): continue ds9i=i+1 ds9j=j+1 df0 = df.query('DS9X == {} & DS9Y == {}'.format(ds9i,ds9j)) if (len(df0) <= nscw_min): continue dkey="{:04d}{:04d}".format(j,i) for scw in obsid_map[dkey]: obsid_list.append(scw.decode("UTF-8")) for grxe in grxe_map[dkey]: grxe_list.append(grxe) for grxe in grxe_err_map[dkey]: grxe_err_list.append(grxe) coldefs = fits.ColDefs([ fits.Column(name='OBSID', format='11A', array=obsid_list), ]) fout = fn.replace(".fits",".grxe.fits") hdu = fits.BinTableHDU.from_columns(coldefs, name='GRXE') hdu.header['MISSION'] = ('INTEGRAL', '') #hdu.header['TELESCOP'] = (outkey, '') hdu.header['INSTITUT'] = ('IKI', 'Affiliation') hdu.header['AUTHOR'] = ('Roman Krivonos', 'Responsible person') hdu.header['EMAIL'] = ('krivonos@cosmos.ru', 'E-mail') #hdu.add_checksum() print(hdu.columns) hdu.writeto(proddir+fout, overwrite=True) with fits.open(proddir+fout, mode='update') as hdus: hdus[1].add_checksum()