forked from xmm/arches
3507 lines
153 KiB
Python
3507 lines
153 KiB
Python
import os
|
|
import sys
|
|
import csv
|
|
import numpy as np
|
|
from astropy.io import fits
|
|
from astropy import wcs
|
|
from astropy.wcs import WCS
|
|
from astropy.io.fits import update
|
|
from astropy.io.fits import getdata
|
|
import glob
|
|
import json
|
|
from fitsio import FITS
|
|
from pathlib import Path
|
|
import pandas
|
|
import pickle
|
|
from astropy.table import QTable, Table, Column
|
|
from astropy import units as u
|
|
|
|
from astropy.coordinates import SkyCoord # High-level coordinates
|
|
from astropy.coordinates import ICRS, Galactic, FK4, FK5 # Low-level frames
|
|
from astropy.coordinates import Angle, Latitude, Longitude # Angles
|
|
|
|
from astropy.time import Time, TimeDelta, TimezoneInfo, TimeFromEpoch
|
|
|
|
import statistics
|
|
import shutil
|
|
|
|
from uds.astrometry import *
|
|
|
|
MJDREF = 51543.875
|
|
TZ_UTC = TimezoneInfo(utc_offset=0*u.hour)
|
|
TZ_MSK = TimezoneInfo(utc_offset=3*u.hour)
|
|
TIMESTR_FORMAT = '%Y.%m.%d %H:%M:%S'
|
|
|
|
def mission2date_utc(timesec):
|
|
mjdref = Time(MJDREF, format='mjd')
|
|
delta = TimeDelta(timesec, format='sec')
|
|
dtime = mjdref + delta
|
|
return dtime.to_datetime(timezone=TZ_UTC)
|
|
|
|
def mission2date_msk(timesec):
|
|
mjdref = Time(MJDREF, format='mjd')
|
|
delta = TimeDelta(timesec, format='sec')
|
|
dtime = mjdref + delta + 3*u.hour
|
|
return dtime.to_datetime(timezone=TZ_MSK)
|
|
|
|
def create_folder(folder):
|
|
if not (os.path.exists(folder)):
|
|
os.makedirs(folder)
|
|
|
|
def remove_file(filename):
|
|
if(os.path.isfile(filename)==True):
|
|
os.remove(filename)
|
|
|
|
def convert_dr12_to_erosita_flux(rec, field_prefix='SC_'):
|
|
""" Energy conversion from 4XMM 0.2-2.0 keV to eROSITA 0.3-2.3 keV """
|
|
|
|
"""
|
|
1 = 0.2 - 0.5 keV
|
|
2 = 0.5 - 1.0 keV
|
|
3 = 1.0 - 2.0 keV
|
|
4 = 2.0 - 4.5 keV
|
|
5 = 4.5 - 12.0 keV
|
|
"""
|
|
#coeff=3.0103e-11/2.5946e-11
|
|
#coeff = 2.9043e-11/2.9253e-11
|
|
# slope 1.7
|
|
#coeff=2.8416e-11/2.7221e-11
|
|
# slope 3.0
|
|
#coeff=3.8381e-11/4.6927e-11
|
|
# slope 1.5
|
|
#coeff=2.8468e-11/2.6429e-11
|
|
|
|
"""
|
|
========================================================================
|
|
Model wabs<1>*powerlaw<2> Source No.: 1 Active/Off
|
|
Model Model Component Parameter Unit Value
|
|
par comp
|
|
1 1 wabs nH 10^22 2.00000E-02 +/- 0.0
|
|
2 2 powerlaw PhoIndex 2.00000 +/- 0.0
|
|
3 2 powerlaw norm 1.00000E-02 +/- 0.0
|
|
________________________________________________________________________
|
|
|
|
XSPEC12>flux 0.2 2.0
|
|
Model Flux 0.029289 photons (2.9208e-11 ergs/cm^2/s) range (0.20000 - 2.0000 keV)
|
|
XSPEC12>flux 0.3 2.3
|
|
Model Flux 0.023955 photons (2.9017e-11 ergs/cm^2/s) range (0.30000 - 2.3000 keV)
|
|
"""
|
|
coeff=2.9017e-11/2.9208e-11
|
|
|
|
flux = coeff * (rec[field_prefix+'EP_1_FLUX']+rec[field_prefix+'EP_2_FLUX']+rec[field_prefix+'EP_3_FLUX'])
|
|
error = coeff * np.sqrt(rec[field_prefix+'EP_1_FLUX_ERR']**2 + rec[field_prefix+'EP_2_FLUX_ERR']**2 + rec[field_prefix+'EP_3_FLUX_ERR']**2)
|
|
return (flux, error)
|
|
|
|
|
|
def do_evtool_esass(evfile=None,events=None,outfile=None,evlist=None,
|
|
gti=None,region=None,emin=None,emax=None, rmlock=False,
|
|
do_center=False, ra_cen=None, de_cen=None):
|
|
|
|
eventfiles=None
|
|
if(events):
|
|
eventfiles="eventfiles=\'{}\'".format((" ").join(events))
|
|
if(evlist):
|
|
eventfiles="eventfiles=@{}".format(evlist)
|
|
if(evfile):
|
|
eventfiles="eventfiles={}".format(evfile)
|
|
|
|
if not (eventfiles):
|
|
print("ERROR: Event files not provided")
|
|
|
|
if(do_center==True and evfile):
|
|
""" re-center event file """
|
|
if not (ra_cen and de_cen):
|
|
print("Please provide center coordinates")
|
|
sys.exit()
|
|
test_exe('radec2xy')
|
|
cmd=["radec2xy",
|
|
"{}".format(evfile),
|
|
"{}".format(ra_cen),
|
|
"{}".format(de_cen)]
|
|
print((" ").join(cmd))
|
|
os.system((" ").join(cmd))
|
|
|
|
|
|
emin="emin={}".format(emin) if(emin) else ''
|
|
emax="emax={}".format(emax) if(emax) else ''
|
|
gti="gti=\'{}\'".format(gti) if(gti) else ''
|
|
region="region=\'{}\'".format(region) if(region) else ''
|
|
|
|
cmd=["evtool",
|
|
eventfiles,
|
|
gti,region,emin,emax,
|
|
"outfile={}".format(outfile),
|
|
"image=yes",
|
|
"flag=0x2000",
|
|
"pattern=15"
|
|
]
|
|
|
|
print((" ").join(cmd))
|
|
test_exe('evtool')
|
|
os.system((" ").join(cmd))
|
|
|
|
if(rmlock==True):
|
|
lockfiles=outfile.replace(".fits", "*.lock")
|
|
for filename in glob.glob(lockfiles):
|
|
print("Removing {}".format(filename))
|
|
if(os.path.isfile(filename)==True):
|
|
os.remove(filename)
|
|
pass
|
|
|
|
|
|
def set_bit(value, bit):
|
|
return value | (1<<bit)
|
|
|
|
def clear_bit(value, bit):
|
|
return value & ~(1<<bit)
|
|
|
|
def do_badpix_tm6(filename):
|
|
"""
|
|
special corrrection for TM6 camera, needed for two CalPV-UDS observations,
|
|
which shows high background at RAWX>250
|
|
|
|
creates new file with added _badpix.fits
|
|
"""
|
|
|
|
nraw=384
|
|
skip_after=250
|
|
nskip=(nraw - skip_after)
|
|
nrows0=nraw*nskip
|
|
|
|
fout=filename.replace(".fits", "_badpix.fits")
|
|
shutil.copyfile(filename, fout)
|
|
|
|
hdul = fits.open(fout)
|
|
#hdul.info()
|
|
tstart=hdul[1].header['TSTART']
|
|
tstop=hdul[1].header['TSTOP']
|
|
nrows=hdul[1].header['NAXIS2']
|
|
|
|
|
|
f = FITS(fout,'rw')
|
|
data = f['EVENTS'].read()
|
|
|
|
#print(data.dtype)
|
|
|
|
for row in range(nrows):
|
|
if (data['RAWX'][row] >= skip_after):
|
|
data['FLAG'][row]=set_bit(data['FLAG'][row],13)
|
|
#hdr = fits[0].read_header()
|
|
f[1].write(data)
|
|
f[1].write_comment("Changed by Roman Krivonos")
|
|
f[1].write_history("All RAWx>=250 marked as BAD 0x2000")
|
|
|
|
data_badpix = f['BADPIX6'].read()
|
|
data4 = np.zeros(nrows0,dtype=data_badpix.dtype)
|
|
|
|
n=0
|
|
for i in range(skip_after,nraw):
|
|
for j in range(nraw):
|
|
data4['RAWX'][n]=i
|
|
data4['RAWY'][n]=j
|
|
data4['TIMEMIN'][n]=tstart
|
|
data4['TIMEMAX'][n]=tstop
|
|
data4['BADFLAG'][n]=3
|
|
data4['TYPE'][n]=3
|
|
data4['PHAMAX'][n]=12000
|
|
data4['PHAMIN'][n]=0
|
|
n=n+1
|
|
|
|
f[4].append(data4)
|
|
f.close()
|
|
|
|
def init_events(key=None, eband_selected=[0], eband_index=None,
|
|
ra_cen=None, de_cen=None,do_init=True,vign=True,
|
|
emin_kev=None, emax_kev=None, infile_dir=None, outfile_dir=None,
|
|
do_obsmode=False,do_center=False,do_evtool=False,do_expmap=False,attcorr=False):
|
|
expmaps=[]
|
|
expmap_all=[]
|
|
infile_expmap=[]
|
|
emin=[]
|
|
emax=[]
|
|
emin_ev_str=[]
|
|
emax_ev_str=[]
|
|
outfile_evtool=[]
|
|
outfile_expmap=[]
|
|
|
|
outfile_post = '.fits'
|
|
outfile_post_events = '.fits' if (attcorr==False) else '.attcorr.fits'
|
|
|
|
if not (infile_dir):
|
|
print("infile_dir=?")
|
|
return
|
|
|
|
if not (outfile_dir):
|
|
print("outfile_dir=?")
|
|
return
|
|
|
|
print("init events -- reading key {}".format(key))
|
|
|
|
infile="{}/{}.fits".format(infile_dir,key)
|
|
|
|
if(do_obsmode==True and do_init==True):
|
|
""" correct OBS_MODE in files """
|
|
|
|
lockfile="{}/{}.obsmode.lock".format(infile_dir,key)
|
|
if not os.path.isfile(lockfile):
|
|
with fits.open(infile) as hdu:
|
|
for h in hdu:
|
|
extname = h.header['EXTNAME'] if "EXTNAME" in h.header else "None"
|
|
changeto = 'SURVEY' if ('scan' in infile) else 'POINTING'
|
|
if not (h.header['OBS_MODE'] == changeto):
|
|
print("*** {} {} --> {}".format(extname,h.header['OBS_MODE'],changeto))
|
|
h.header['OBS_MODE'] = changeto
|
|
hdu.writeto(infile,overwrite=True)
|
|
Path(lockfile).touch()
|
|
else:
|
|
print("Lock file {} is found, skipping OBS_MODE correction.".format(lockfile))
|
|
pass
|
|
|
|
if(do_center==True and do_init==True and attcorr==False):
|
|
""" re-center original events files """
|
|
if not (ra_cen and de_cen):
|
|
print("Please provide center coordinates")
|
|
sys.exit()
|
|
cmd=["radec2xy",
|
|
"{}".format(infile),
|
|
"{}".format(ra_cen),
|
|
"{}".format(de_cen)]
|
|
|
|
lockfile="{}/{}.radec2xy.lock".format(infile_dir,key)
|
|
if not os.path.isfile(lockfile):
|
|
print((" ").join(cmd))
|
|
test_exe('radec2xy')
|
|
os.system((" ").join(cmd))
|
|
Path(lockfile).touch()
|
|
else:
|
|
print("Lock file {} is found, skipping radec2xy.".format(lockfile))
|
|
pass
|
|
|
|
|
|
outfile_evtool="{}/{}_EventList_en{}{}".format(outfile_dir, key, eband_index, outfile_post_events)
|
|
|
|
cmd=["evtool",
|
|
"eventfiles=%s" %(infile),
|
|
"outfile=%s" %(outfile_evtool),
|
|
"emin=%f" %(emin_kev),
|
|
"emax=%f" %(emax_kev),
|
|
#"region=%s" %(region),
|
|
"image=yes",
|
|
"flag=0x2000",
|
|
"pattern=15"
|
|
]
|
|
# run the command
|
|
if(do_evtool==True and do_init==True):
|
|
#log = subprocess.check_call(cmd)
|
|
print((" ").join(cmd))
|
|
test_exe('evtool')
|
|
os.system((" ").join(cmd))
|
|
""" correct OBS_MODE """
|
|
with fits.open(outfile_evtool) as hdu:
|
|
for h in hdu:
|
|
extname = h.header['EXTNAME'] if "EXTNAME" in h.header else "None"
|
|
changeto = 'SURVEY' if ('scan' in infile) else 'POINTING'
|
|
if not (h.header['OBS_MODE'] == changeto):
|
|
print("*** {} {} --> {}".format(extname,h.header['OBS_MODE'],changeto))
|
|
h.header['OBS_MODE'] = changeto
|
|
hdu.writeto(outfile_evtool,overwrite=True)
|
|
|
|
if(vign==True):
|
|
withvignetting='yes'
|
|
outfile_expmap="{}_ExposureMap_en{}.vign{}".format(os.path.join(outfile_dir,key), eband_index, outfile_post)
|
|
else:
|
|
withvignetting='no'
|
|
outfile_expmap="{}_ExposureMap_en{}.novign{}".format(os.path.join(outfile_dir,key), eband_index, outfile_post)
|
|
|
|
if(do_expmap==True and do_init==True):
|
|
cmd=["expmap",
|
|
"inputdatasets=%s" %(outfile_evtool),
|
|
"emin=%s" %(emin_kev),
|
|
"emax=%s" %(emax_kev),
|
|
"templateimage=%s" %(outfile_evtool),
|
|
"singlemaps=%s" %(outfile_expmap),
|
|
"withvignetting={}".format(withvignetting),
|
|
"withsinglemaps=yes","withfilebadpix=yes",
|
|
"withmergedmaps=no",
|
|
]
|
|
print((" ").join(cmd))
|
|
test_exe('expmap')
|
|
os.system((" ").join(cmd))
|
|
|
|
return outfile_evtool,outfile_expmap
|
|
|
|
def do_expmap_esass(expmaps=None,outfile=None,emin_kev=None,emax_kev=None,refimage=None):
|
|
cmd=["expmap",
|
|
"inputdatasets=\'{}\'".format((" ").join(expmaps)),
|
|
"emin=%s" %(emin_kev),
|
|
"emax=%s" %(emax_kev),
|
|
"templateimage=%s" %(refimage),
|
|
"singlemaps=%s" %(outfile),
|
|
"withvignetting=yes",
|
|
"withsinglemaps=no","withfilebadpix=yes",
|
|
"withmergedmaps=yes",
|
|
]
|
|
print((" ").join(cmd))
|
|
test_exe('expmap')
|
|
os.system((" ").join(cmd))
|
|
|
|
def do_fimgmerge_ftools(maps=None,outfile=None):
|
|
""" takes first map as reference and merges the rest """
|
|
tmp="maps.txt"
|
|
f = open(tmp, "w")
|
|
for jj in range(1,len(maps)):
|
|
f.write("{},0,0\n".format(maps[jj]))
|
|
f.close()
|
|
cmd=["fimgmerge",
|
|
"infile={}".format(maps[0]),
|
|
"list=@{}".format(tmp),
|
|
"outfile={}".format(outfile),
|
|
"clobber=yes",
|
|
]
|
|
print((" ").join(cmd))
|
|
test_exe('fimgmerge')
|
|
os.system((" ").join(cmd))
|
|
|
|
def runme(cmd, local_run=True, cwd='/srg/work/krivonos/erosita/UDS-paper/uds/scripts'):
|
|
|
|
print()
|
|
print((" ").join(cmd))
|
|
print()
|
|
|
|
if(local_run == True):
|
|
os.system((" ").join(cmd))
|
|
else:
|
|
|
|
print("***************************************************")
|
|
print("*** Run the above command on the other computer ***")
|
|
print("***************************************************")
|
|
#wait = input('Waiting for input... ')
|
|
|
|
# see https://confluence.atlassian.com/bitbucketserverkb/ssh-rsa-key-rejected-with-message-no-mutual-signature-algorithm-1026057701.html
|
|
os.system("ssh -o PubkeyAcceptedKeyTypes=+ssh-rsa 10.5.2.13 \"esass ; cd {} ; {}\" ".format(cwd,(" ").join(cmd)))
|
|
|
|
|
|
#os.system((" ").join(cmd))
|
|
|
|
|
|
def test_exe(program):
|
|
""" Tests if executable exists in PATH """
|
|
import os
|
|
def is_exe(fpath):
|
|
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
|
|
|
|
fpath, fname = os.path.split(program)
|
|
if fpath:
|
|
if is_exe(program):
|
|
return program
|
|
else:
|
|
for path in os.environ["PATH"].split(os.pathsep):
|
|
exe_file = os.path.join(path, program)
|
|
if is_exe(exe_file):
|
|
return exe_file
|
|
|
|
print("\n*** Command {} not found ***\n".format(program))
|
|
sys.exit()
|
|
|
|
def save_ds9reg(filename,scale=60):
|
|
if(os.path.isfile(filename)==True):
|
|
fout=filename.replace(".fits", ".reg")
|
|
hdul = fits.open(filename)
|
|
tbdata = hdul[1].data
|
|
with open(fout, 'w') as writer:
|
|
for rec in tbdata:
|
|
writer.write("fk5;circle({}, {}, {})\n".format(rec['ra'],rec['dec'],rec['radec_err']/scale))
|
|
|
|
def save_ermldet_ds9reg(filename,scale=60,id_instr=1,id_band=1,ext_like=0.0,label='det_like',dl=10.0):
|
|
if(os.path.isfile(filename)==True):
|
|
#fout=filename.replace(".fits", ".instr{}.{}.reg".format(id_instr,label))
|
|
fout=filename.replace(".fits", ".{}.reg".format(label))
|
|
hdul = fits.open(filename)
|
|
tbdata = hdul[1].data
|
|
with open(fout, 'w') as writer:
|
|
for rec in tbdata:
|
|
if not (rec['id_inst'] == id_instr and rec['id_band'] == id_band):
|
|
continue
|
|
if (rec['det_like'] < dl):
|
|
continue
|
|
#if (rec['ext_like'] > ext_like):
|
|
# continue
|
|
if(abs(rec[label])>0.0):
|
|
writer.write("fk5;circle({}, {}, {}) # text={{{}}}\n".format(rec['ra'],rec['dec'],rec['radec_err']/scale,rec[label]))
|
|
|
|
def save_catprep_ds9reg(filename,scale=60,id_instr=1,id_band=1,ext_like=0.0,label='det_like_0'):
|
|
if(os.path.isfile(filename)==True):
|
|
#fout=filename.replace(".fits", ".instr{}.{}.reg".format(id_instr,label))
|
|
fout=filename.replace(".fits", ".{}.reg".format(label))
|
|
hdul = fits.open(filename)
|
|
tbdata = hdul[1].data
|
|
with open(fout, 'w') as writer:
|
|
for rec in tbdata:
|
|
if (rec['ext_like'] > ext_like):
|
|
continue
|
|
if(abs(rec[label])>0.0):
|
|
writer.write("fk5;circle({}, {}, {}) # text={{{:.1f}}}\n".format(rec['ra'],rec['dec'],rec['radec_err']/scale,rec[label]))
|
|
|
|
|
|
def crossmatch_shu2019(filename,refimage=None,crval=None,devmax=30,dlmin=6.0,dlmax=10000,ext_like=0.0,outkey='shu2019', catalog=None, errlim=10.0):
|
|
if(os.path.isfile(filename)==False):
|
|
print("File not found {}".format(filename))
|
|
print("Start cross-match with Gaia-unWISE")
|
|
|
|
if not crval:
|
|
print("ERROR: Please provide center coordinates array: crval=")
|
|
print("NOTES: See uds.config.wcslist")
|
|
return
|
|
|
|
if not refimage:
|
|
print("ERROR: Please provide reference image: refimage=")
|
|
return
|
|
|
|
hdulist = fits.open(refimage)
|
|
w = WCS(hdulist[0].header)
|
|
w.wcs.crval=crval
|
|
naxis1=hdulist[0].header['NAXIS1']
|
|
naxis2=hdulist[0].header['NAXIS2']
|
|
|
|
if not catalog:
|
|
print("ERROR: Please provide reference catalog: catalog=")
|
|
return
|
|
|
|
|
|
hdul = fits.open(catalog)
|
|
tbdata_ref = hdul[1].data
|
|
cat=[]
|
|
for rec in tbdata_ref:
|
|
cat.append({"ra":rec['ra'],"dec":rec['dec'],
|
|
"pmra_err":rec['pmra_err'] if(rec['pmra_err']>0) else 1.0,
|
|
"pmdec_err":rec['pmdec_err'] if(rec['pmdec_err']>0) else 1.0,
|
|
"gaia":rec["gaia_sourceid"],"unwise":rec["unwise_objid"]})
|
|
|
|
|
|
print("Reading {}".format(filename))
|
|
hdul = fits.open(filename)
|
|
tbdata_src = hdul[1].data
|
|
|
|
srcs=[]
|
|
for rec in tbdata_src:
|
|
if (rec['ext_like'] > ext_like):
|
|
continue
|
|
|
|
if not (rec['radec_err'] > 0.0):
|
|
continue
|
|
|
|
srcs.append({"id":rec['id_src'],
|
|
"ra":rec['ra'],
|
|
"det_like":rec['det_like_0'],
|
|
"ext_like":rec['ext_like'],
|
|
"dec":rec['dec'],
|
|
"radec_err":rec['radec_err']})
|
|
|
|
fout=filename.replace(".fits", ".{}.cross.fits".format(outkey))
|
|
print("Output file: {}".format(fout))
|
|
if(os.path.isfile(fout)==True):
|
|
os.remove(fout)
|
|
|
|
t = Table(names=('ID','CNT',
|
|
'DET_LIKE', 'EXT_LIKE',
|
|
'SRC_RA', 'SRC_DEC','RADEC_ERR',
|
|
'REF_RA', 'REF_DEC','PMRA_ERR', 'PMDEC_ERR',
|
|
'SEP','GAIA','UNWISE'),
|
|
dtype=('i4','i4','f4', 'f4','f8','f8','f8','f8', 'f8','f8', 'f8','f4','i8','S16'))
|
|
|
|
d = {}
|
|
skip = {}
|
|
|
|
# do correlation
|
|
for src in srcs:
|
|
src_crd = SkyCoord(src['ra'], src['dec'], frame=FK5(), unit="deg")
|
|
sep=0
|
|
|
|
for scat in cat:
|
|
scat_crd = SkyCoord(scat['ra'], scat['dec'], frame=FK5(), unit="deg")
|
|
sep = src_crd.separation(scat_crd).arcsec
|
|
if(sep < devmax):
|
|
sid=src['id']
|
|
if(sid in d.keys()):
|
|
d[sid]=d[sid]+1
|
|
skip[sid]=1
|
|
else:
|
|
d[sid]=1
|
|
t.add_row((src['id'], d[sid], src['det_like'], src['ext_like'],
|
|
src['ra'], src['dec'], src['radec_err'],
|
|
scat['ra'], scat['dec'], scat['pmra_err'], scat['pmdec_err'],
|
|
sep, scat['gaia'], scat['unwise']))
|
|
|
|
t.write(fout, format='fits')
|
|
|
|
src_tab = Table(names=('ID','RA', 'DEC','RA_ERR','DEC_ERR','RADEC_ERR'), dtype=('i4','f8','f8','f8','f8','f8'))
|
|
src_fout=fout.replace(".cross.fits", ".src.fits")
|
|
|
|
ref_fout=fout.replace(".cross.fits", ".ref.fits")
|
|
ref_tab = Table(names=('GAIA','RA', 'DEC','RA_ERR','DEC_ERR'), dtype=('i8','f8','f8','f8','f8'))
|
|
|
|
hdul = fits.open(fout)
|
|
tbdata = hdul[1].data
|
|
|
|
srcs=[]
|
|
src_map=np.zeros((naxis1, naxis2))
|
|
ref_map=np.zeros((naxis1, naxis2))
|
|
delta_ra = []
|
|
delta_dec = []
|
|
delta_sep = []
|
|
for rec in tbdata:
|
|
if not (rec['det_like'] > dlmin and rec['det_like'] < dlmax and rec['radec_err'] < errlim):
|
|
continue
|
|
if (rec['id'] in skip.keys()):
|
|
print("Skip ID={} DET_LIKE={:.2f}".format(rec['id'],rec['det_like']))
|
|
else:
|
|
print("Take ID={} DET_LIKE={:.2f}".format(rec['id'],rec['det_like']))
|
|
src_crd = SkyCoord(rec['src_ra'], rec['src_dec'], frame=FK5(), unit="deg")
|
|
ref_crd = SkyCoord(rec['ref_ra'], rec['ref_dec'], frame=FK5(), unit="deg")
|
|
src_x, src_y = w.world_to_pixel(src_crd)
|
|
|
|
src_map[int(np.round(src_y)), int(np.round(src_x))] = 1
|
|
|
|
#print(rec['src_ra'], rec['src_dec'],"-->",src_x, src_y)
|
|
|
|
# check
|
|
#sky = w.pixel_to_world(src_x, src_y)
|
|
#print(sky)
|
|
|
|
|
|
ref_x, ref_y = w.world_to_pixel(ref_crd)
|
|
ref_map[int(np.round(ref_y)), int(np.round(ref_x))]=1
|
|
|
|
src_tab.add_row((rec['id'],rec['src_ra'],rec['src_dec'],
|
|
np.sqrt(0.5*rec['radec_err']**2)/3600,
|
|
np.sqrt(0.5*rec['radec_err']**2)/3600,
|
|
rec['radec_err']))
|
|
ref_tab.add_row((rec['gaia'],rec['ref_ra'],rec['ref_dec'],
|
|
rec['pmra_err']/3600,
|
|
rec['pmdec_err']/3600))
|
|
delta_ra.append(rec['src_ra']-rec['ref_ra'])
|
|
delta_dec.append(rec['src_dec']-rec['ref_dec'])
|
|
sep=src_crd.separation(ref_crd).arcsec
|
|
delta_sep.append(sep)
|
|
print("delta [arcsec] RA, Dec, sep {:.4f} {:.4f}".format((rec['src_ra']-rec['ref_ra'])*3600,
|
|
(rec['src_dec']-rec['ref_dec'])*3600,sep))
|
|
|
|
tstart, tstop = read_tstart_tstop(infile=refimage)
|
|
date_obs = mission2date_utc(tstart)
|
|
date_end = mission2date_utc(tstop)
|
|
stat_fout=fout.replace(".cross.fits", ".dat")
|
|
with open(stat_fout, 'w') as writer:
|
|
writer.write("[DATE-OBS {}, DATE-END {}] nref, mean [arcsec] dRA, dDec, mean/median sep (mean/median): {} & {:.4f} & {:.4f} & {:.4f}/{:.4f}\n".format(
|
|
date_obs,date_end,len(delta_ra),
|
|
statistics.mean(delta_ra)*3600,
|
|
statistics.mean(delta_dec)*3600,
|
|
statistics.mean(delta_sep),
|
|
statistics.median(delta_sep)))
|
|
|
|
src_tab.write(src_fout, format='fits', overwrite=True)
|
|
with fits.open(src_fout) as f:
|
|
f[0].verify('fix')
|
|
f[1].verify('fix')
|
|
with fits.open(src_fout, 'update') as f:
|
|
f[0].header=f[0].header+w.to_header()
|
|
f[0].data=src_map
|
|
#f[1].header=f[1].header+w.to_header()
|
|
|
|
ref_tab.write(ref_fout, format='fits', overwrite=True)
|
|
with fits.open(ref_fout) as f:
|
|
f[0].verify('fix')
|
|
f[1].verify('fix')
|
|
with fits.open(ref_fout, 'update') as f:
|
|
f[0].header=f[0].header+w.to_header()
|
|
f[0].data=ref_map
|
|
#f[1].header=f[1].header+w.to_header()
|
|
|
|
def crossmatch_dr12(filename,devmax=30,ext_like=0.0,outkey='dr12', catalog=None):
|
|
flux_scale=1e-15
|
|
if(os.path.isfile(filename)==False):
|
|
print("File not found {}".format(filename))
|
|
|
|
print("Start cross-match with 4XMM-DR12 {}".format(catalog))
|
|
|
|
if not catalog:
|
|
print("ERROR: Please provide reference catalog: catalog=")
|
|
return
|
|
|
|
# Read DR12 catalog
|
|
hdul = fits.open(catalog)
|
|
tbdata_ref = hdul[1].data
|
|
cat=[]
|
|
for rec in tbdata_ref:
|
|
flux, error = convert_dr12_to_erosita_flux(rec)
|
|
cat.append({"ra":rec['sc_ra'],"dec":rec['sc_dec'],"4xmm":rec["srcid"],
|
|
"flux":flux, "flux_error":error,
|
|
"ep1_flux":rec['SC_EP_1_FLUX'],"ep1_flux_err":rec['SC_EP_1_FLUX_ERR'],
|
|
"ep2_flux":rec['SC_EP_2_FLUX'],"ep2_flux_err":rec['SC_EP_2_FLUX_ERR'],
|
|
"ep3_flux":rec['SC_EP_3_FLUX'],"ep3_flux_err":rec['SC_EP_3_FLUX_ERR'],
|
|
"iauname":rec["IAUNAME"]})
|
|
|
|
|
|
# read input eROSITA catalog
|
|
hdul = fits.open(filename)
|
|
tbdata_src = hdul[1].data
|
|
|
|
srcs=[]
|
|
for rec in tbdata_src:
|
|
if (rec['ext_like'] > ext_like):
|
|
continue
|
|
srcs.append({"name":rec['name'],
|
|
"id":rec['id_src'],
|
|
"ra":rec['ra'],
|
|
"det_like":rec['det_like'],
|
|
"flux":rec['ml_flux'],
|
|
"flux_error":rec['ml_flux_err'],
|
|
"ext_like":rec['ext_like'],
|
|
"ext":rec['ext'],
|
|
"dec":rec['dec'],
|
|
"ape_cts":rec['APE_CTS_0'],
|
|
"ape_exp":rec['APE_EXP_0'],
|
|
"ape_bkg":rec['APE_BKG_0'],
|
|
"ml_cts":rec['ML_CTS'],
|
|
"radec_err":rec['radec_err']})
|
|
|
|
|
|
fout=filename.replace(".fits", ".{}.cross.fits".format(outkey))
|
|
print("Output file: {}".format(fout))
|
|
|
|
if(os.path.isfile(fout)==True):
|
|
os.remove(fout)
|
|
|
|
t = Table(names=('ID','CNT',
|
|
'DET_LIKE', 'EXT_LIKE',
|
|
'SRC_RA', 'SRC_DEC','SRC_FLUX','SRC_FLUX_ERROR',
|
|
'DR12_RA', 'DR12_DEC',
|
|
'SEP','DR12_SRCID','DR12_NAME','DR12_FLUX','DR12_FLUX_ERROR',
|
|
'SC_EP_1_FLUX','SC_EP_1_FLUX_ERR',
|
|
'SC_EP_2_FLUX','SC_EP_2_FLUX_ERR',
|
|
'SC_EP_3_FLUX','SC_EP_3_FLUX_ERR',
|
|
'APE_CTS','APE_EXP','APE_BKG','ML_CTS'),
|
|
dtype=('i4','i4','f4', 'f4','f8', 'f8','f8', 'f8','f8', 'f8','f4','i8','S16','f8','f8','f8','f8','f8','f8','f8','f8','f8','f8','f8','f8'))
|
|
|
|
d = {}
|
|
skip = {}
|
|
|
|
|
|
freg = open(filename.replace(".fits",".{}.missed.reg".format(outkey)), "w")
|
|
ftex = open(filename.replace(".fits",".{}.missed.tex".format(outkey)), "w")
|
|
|
|
# do correlation
|
|
for src in srcs:
|
|
src_crd = SkyCoord(src['ra'], src['dec'], frame=FK5(), unit="deg")
|
|
sep=0
|
|
found=False
|
|
for scat in cat:
|
|
scat_crd = SkyCoord(scat['ra'], scat['dec'], frame=FK5(), unit="deg")
|
|
sep = src_crd.separation(scat_crd).arcsec
|
|
|
|
devmax0=devmax
|
|
if(src['ext']>0.0 and src['ext']>devmax):
|
|
devmax0=src['ext']
|
|
print("Use extended radius {}".format(devmax0))
|
|
if(sep < devmax0):
|
|
sid=src['id']
|
|
found=True
|
|
if(sid in d.keys()):
|
|
d[sid]=d[sid]+1
|
|
skip[sid]=1
|
|
else:
|
|
d[sid]=1
|
|
t.add_row((src['id'], d[sid], src['det_like'], src['ext_like'],
|
|
src['ra'], src['dec'],
|
|
src['flux'], src['flux_error'],
|
|
scat['ra'], scat['dec'],
|
|
sep, scat['4xmm'], scat['iauname']
|
|
,scat['flux'],scat['flux_error'],
|
|
scat['ep1_flux'],scat['ep1_flux_err'],
|
|
scat['ep2_flux'],scat['ep2_flux_err'],
|
|
scat['ep3_flux'],scat['ep3_flux_err'],
|
|
src['ape_cts'], src['ape_exp'],src['ape_bkg'],
|
|
src['ml_cts']))
|
|
if(found == False):
|
|
print("Not found in DR12: {}".format(src['name']))
|
|
freg.write("fk5;point({:.4f} {:.4f}) # text={{{}}}\n".format(src['ra'],src['dec'],src['name']))
|
|
ftex.write("{} & {:.5f} & {:.5f} & {:.2f} & {:.2f} $\\pm$ {:.2f} & \\\\\n".format(src['name'].replace('SRGe',''),
|
|
src['ra'],src['dec'],
|
|
src['det_like'],
|
|
src['flux']/flux_scale,
|
|
src['flux_error']/flux_scale))
|
|
t.write(fout, format='fits')
|
|
freg.close()
|
|
ftex.close()
|
|
|
|
def do_adapt_ciao(infile=None,outfile=None,expmap=None,function='tophat',expcut=None):
|
|
if not (infile and expmap and outfile):
|
|
print("ERROR: Please provide input and output files")
|
|
return
|
|
|
|
radfile=infile.replace(".fits", ".adapt.scale.fits")
|
|
sumfile=infile.replace(".fits", ".sum.scale.fits")
|
|
test_exe('dmimgadapt')
|
|
cmd=["dmimgadapt",
|
|
"infile={}".format(infile),
|
|
"outfile={}".format(outfile),
|
|
"function={}".format(function),
|
|
"minrad=1",
|
|
"maxrad=30",
|
|
"numrad=30",
|
|
"radscale=log",
|
|
"counts=30",
|
|
"radfile={}".format(radfile),
|
|
"sumfile={}".format(sumfile),
|
|
"clobber=yes",]
|
|
remove_file(outfile)
|
|
print((" ").join(cmd))
|
|
os.system((" ").join(cmd))
|
|
|
|
cmd=["dmimgadapt",
|
|
"infile={}".format(infile),
|
|
"innormfile={}".format(expmap),
|
|
"outfile={}".format(outfile),
|
|
"function={}".format(function),
|
|
"minrad=1",
|
|
"maxrad=30",
|
|
"numrad=30",
|
|
"radscale=log",
|
|
"counts=30",
|
|
"inradfile={}".format(radfile),
|
|
"sumfile={}".format(sumfile),
|
|
"clobber=yes",]
|
|
remove_file(outfile)
|
|
print((" ").join(cmd))
|
|
os.system((" ").join(cmd))
|
|
|
|
if (expcut):
|
|
cntmap_data, cntmap_hdr = fits.getdata(outfile, ext=0, header=True)
|
|
expmap_data, expmap_hdr = fits.getdata(expmap, ext=0, header=True)
|
|
index = np.where(expmap_data < expcut)
|
|
cntmap_data[index]=0.0
|
|
fits.writeto(outfile, cntmap_data, cntmap_hdr, overwrite=True)
|
|
|
|
|
|
"""
|
|
dmimgadapt artmap_ait.fits adapt.fits tophat 1 30 30 log 30 radfile=map.scale.fits sumfile=sum.fits clobber=yes
|
|
#dmimgadapt artmap_ait.fits adapt.fits func=gaussian min=1.0 max=30 num=30 rad=linear counts=30 sumfile=sum.fits normfile=norm.fits radfile=scale.fits
|
|
"""
|
|
def read_tstart_tstop(infile=None):
|
|
"""
|
|
looks like evtool does not set START-TSTOP values correctly. Take these times from event list.
|
|
"""
|
|
if not infile:
|
|
print("ERROR: Please provide input file: infile=")
|
|
return
|
|
#print("Reading {}".format(infile))
|
|
hdul = fits.open(infile)
|
|
tbdata_ref = hdul[1].data
|
|
tstart=tbdata_ref['TIME'][0]
|
|
tstop=tbdata_ref['TIME'][-1]
|
|
#print("*** TSTART diff {:.1f} ks".format((tstart-hdul[1].header['TSTART'])/1000))
|
|
#print("*** TSTOP diff {:.1f} ks".format((hdul[1].header['TSTOP']-tstop)/1000))
|
|
|
|
return tstart, tstop
|
|
|
|
def make_rate_map(cntmap=None, expmap=None, outfile=None):
|
|
|
|
if not (cntmap and expmap and outfile):
|
|
print("ERROR: Please provide counts, exposure ane output file")
|
|
return
|
|
|
|
cntmap_data, cntmap_hdr = fits.getdata(cntmap, ext=0, header=True)
|
|
expmap_data, expmap_hdr = fits.getdata(expmap, ext=0, header=True)
|
|
rate = np.zeros(expmap_data.shape)
|
|
|
|
index = np.where(expmap_data > 0.0)
|
|
rate[index]=cntmap_data[index]/expmap_data[index]
|
|
fits.writeto(outfile, rate, overwrite=True)
|
|
|
|
def wcs_astro_corr(catalog=None):
|
|
|
|
xfm=catalog.replace(".fits", ".xfm")
|
|
|
|
src_list=catalog.replace(".fits", ".shu2019.src.fits")
|
|
if not (os.path.isfile(src_list)==True):
|
|
print("Not found {}".format(src_list))
|
|
print("Did you run cross-match?")
|
|
sys.exit()
|
|
|
|
ref_list=catalog.replace(".fits", ".shu2019.ref.fits")
|
|
if not (os.path.isfile(ref_list)==True):
|
|
print("Not found {}".format(ref_list))
|
|
print("Did you run cross-match?")
|
|
sys.exit()
|
|
|
|
log=catalog.replace(".fits", ".xfm.log")
|
|
|
|
#do_astro_corr_rota(src=src_list, ref=ref_list, catalog=catalog)
|
|
do_astro_corr(src=src_list, ref=ref_list, catalog=catalog)
|
|
|
|
def wcs_match_ciao(catalog=None, method='trans',radius=15,residlim=7,residfac=2.0,residtype=0):
|
|
|
|
xfm=catalog.replace(".fits", ".xfm")
|
|
|
|
src_list=catalog.replace(".fits", ".shu2019.src.fits")
|
|
if not (os.path.isfile(src_list)==True):
|
|
print("Not found {}".format(src_list))
|
|
print("Did you run cross-match?")
|
|
sys.exit()
|
|
|
|
ref_list=catalog.replace(".fits", ".shu2019.ref.fits")
|
|
if not (os.path.isfile(ref_list)==True):
|
|
print("Not found {}".format(ref_list))
|
|
print("Did you run cross-match?")
|
|
sys.exit()
|
|
|
|
log=catalog.replace(".fits", ".xfm.log")
|
|
|
|
cmd=["wcs_match",
|
|
"infile={}".format(src_list),
|
|
"refsrcfile={}".format(ref_list),
|
|
"outfile={}".format(xfm),
|
|
"wcsfile={}".format(src_list),
|
|
"logfile={}".format(log),
|
|
"radius={}".format(radius),
|
|
"residlim={}".format(residlim),
|
|
"residfac={}".format(residfac),
|
|
"residtype={}".format(residtype),
|
|
"verbose=1",
|
|
"method={}".format(method),
|
|
"clobber=yes",
|
|
]
|
|
test_exe('wcs_match')
|
|
os.system((" ").join(cmd))
|
|
print((" ").join(cmd))
|
|
|
|
def wcs_update_ciao(events,ext=2,crval=None,transformfile=None,clean=True):
|
|
""" Prepares eRosita attitude file for Chandra's wcs_update command """
|
|
|
|
if not (transformfile):
|
|
print("ERROR: Please provide transformfile")
|
|
return
|
|
|
|
if not (os.path.isfile(transformfile)==True):
|
|
print("Not found {}".format(transformfile))
|
|
print("Did you run wcs_update_ciao?")
|
|
sys.exit()
|
|
|
|
fnaspsol=events.replace(".fits", ".aspsol")
|
|
fntmp=events.replace(".fits", ".tmp")
|
|
fnattcor=events.replace(".fits", ".attcorr.fits")
|
|
hdul = fits.open(events,mode='readonly')
|
|
corratt=hdul[ext]
|
|
naxis1=corratt.header['NAXIS1']
|
|
naxis2=corratt.header['NAXIS2']
|
|
corratt_extname=corratt.header['EXTNAME']
|
|
|
|
""" create new empty Q_ATT column """
|
|
q_att=np.zeros((naxis2,4))
|
|
cols = []
|
|
cols.append(fits.Column(name='q_att', format='4D', array=q_att))
|
|
|
|
orig_cols = corratt.columns
|
|
new_cols = fits.ColDefs(cols)
|
|
|
|
hdu0 = fits.PrimaryHDU()
|
|
|
|
hdu0.header['EXTNAME'] = ('ASPSOL', 'Name of this binary table extension')
|
|
hdu0.header['HDUNAME'] = ('ASPSOL', 'ASCDM block name')
|
|
|
|
if(crval):
|
|
hdu0.header['RA_NOM']=(crval[0], '[deg] Pointing RA')
|
|
hdu0.header['DEC_NOM']=(crval[1], '[deg] Pointing Dec')
|
|
hdu0.header['ROLL_NOM']=(0.0 , '[deg] Pointing Roll')
|
|
|
|
hdu0.header['RA_PNT']=(crval[0], '[deg] Nominal RA')
|
|
hdu0.header['DEC_PNT']=(crval[1], '[deg] Nominal Dec')
|
|
hdu0.header['ROLL_PNT']=(0.0 , '[deg] Nominal Roll')
|
|
|
|
hdu = fits.BinTableHDU.from_columns(orig_cols + new_cols, header=hdu0.header)
|
|
hdu.writeto(fnaspsol, overwrite=True)
|
|
|
|
"""
|
|
First, update attitude information in ASPSOL (Chandra notation),
|
|
which corresponds to CORRATT (eRosita notation)
|
|
"""
|
|
cmd=["wcs_update",
|
|
"infile={}".format(fnaspsol),
|
|
"outfile={}".format(fntmp),
|
|
"transformfile={}".format(transformfile),
|
|
"clobber=yes",
|
|
]
|
|
os.system((" ").join(cmd))
|
|
print((" ").join(cmd))
|
|
|
|
"""
|
|
Replace attitude extension
|
|
"""
|
|
shutil.copyfile(events, fnattcor)
|
|
data, hdr = getdata(fntmp, 1, header=True)
|
|
hdr['EXTNAME']=corratt_extname
|
|
update(fnattcor, data, ext, header=hdr)
|
|
|
|
"""
|
|
Second, calculate new RA/Dec for events using updated attitude, using evatt from eSASS
|
|
"""
|
|
cmd=["evatt",
|
|
"EVENTFILE={}".format(fnattcor),
|
|
"ATTFILE={}".format(fnattcor),
|
|
"GTIFILE={}".format(fnattcor),
|
|
]
|
|
os.system((" ").join(cmd))
|
|
print((" ").join(cmd))
|
|
|
|
if(clean==True):
|
|
remove_file(fnaspsol)
|
|
remove_file(fntmp)
|
|
return fnattcor
|
|
|
|
def wcs_update_shift(events,flog=None,ext=2,crval=None,clean=True):
|
|
test_exe('evatt')
|
|
fnattcor=events.replace(".fits", ".attcorr.fits")
|
|
|
|
if not (os.path.isfile(flog)==True):
|
|
print("Not found {}".format(flog))
|
|
print("Did you run do_astro_corr?")
|
|
sys.exit()
|
|
|
|
with open(flog) as f:
|
|
first_line = f.readline().strip('\n')
|
|
print(first_line)
|
|
ar=first_line.split()
|
|
shift_ra=float(ar[2])
|
|
shift_dec=float(ar[3])
|
|
print("Apply shift: RA={}'' Dec={}''".format(shift_ra, shift_dec))
|
|
|
|
shutil.copyfile(events, fnattcor)
|
|
data, hdr = getdata(fnattcor, ext, header=True)
|
|
hdr['COR_RA']=ar[2]
|
|
hdr['COR_DEC']=ar[3]
|
|
hdr['COR_FILE']=os.path.split(flog)[1]
|
|
data['RA'] -= shift_ra/3600.0
|
|
data['DEC'] -= shift_dec/3600.0
|
|
update(fnattcor, data, ext, header=hdr)
|
|
|
|
"""
|
|
Calculate new RA/Dec for events using updated attitude, using evatt from eSASS
|
|
"""
|
|
|
|
cmd=["evatt",
|
|
"EVENTFILE={}".format(fnattcor),
|
|
"ATTFILE={}".format(fnattcor),
|
|
"GTIFILE={}".format(fnattcor),
|
|
]
|
|
|
|
print((" ").join(cmd))
|
|
os.system((" ").join(cmd))
|
|
return(fnattcor)
|
|
|
|
def create_detmask_merged(expmaps,outfile,minval=None):
|
|
"""
|
|
creates mask from each exposure and then stacks each masks.
|
|
Header is taken from first exposure file.
|
|
"""
|
|
tmlist={}
|
|
for expmap in expmaps:
|
|
|
|
hdul = fits.open(expmap)
|
|
emap = hdul[0].data
|
|
ehdr = hdul[0].header
|
|
instrume = ehdr['INSTRUME']
|
|
threshold = minval if(minval) else 0
|
|
mask = np.where(emap > threshold, 1, 0)
|
|
|
|
print("--- detmask {} -- {}".format(expmap,instrume))
|
|
if not (instrume in tmlist.keys()):
|
|
tmlist[instrume]=1
|
|
else:
|
|
pass
|
|
|
|
if 'merged_mask' in locals():
|
|
merged_mask = np.add(merged_mask, mask)
|
|
else:
|
|
merged_mask = mask
|
|
merged_hdr = ehdr
|
|
|
|
if 'merged_hdr' in locals():
|
|
index=1
|
|
for tm in tmlist.keys():
|
|
merged_hdr["INSTRUM{}".format(index)]=tm
|
|
index=index+1
|
|
merged_hdr['INSTRUME']='merged'
|
|
merged_hdr['NINST']=len(tmlist)
|
|
merged_hdr['OBS_MODE']=' '
|
|
if 'merged_mask' in locals() and 'merged_hdr' in locals():
|
|
fits.writeto(outfile, np.where(merged_mask > 0, 1, 0), header=merged_hdr, overwrite=True)
|
|
|
|
def create_expmap_merged(expmaps,outfile,scale=7.0):
|
|
"""
|
|
Adds exposure from TMs. Header is taken from first exposure file.
|
|
"""
|
|
tmlist={}
|
|
for expmap in expmaps:
|
|
|
|
hdul = fits.open(expmap)
|
|
emap = np.divide(hdul[0].data, scale)
|
|
ehdr = hdul[0].header
|
|
instrume = ehdr['INSTRUME']
|
|
|
|
print("--- expmap {} -- {}".format(expmap,instrume))
|
|
if not (instrume in tmlist.keys()):
|
|
tmlist[instrume]=1
|
|
else:
|
|
pass
|
|
|
|
if 'merged_map' in locals():
|
|
merged_map = np.add(merged_map, emap)
|
|
else:
|
|
merged_map = emap
|
|
merged_hdr = ehdr
|
|
|
|
if 'merged_hdr' in locals():
|
|
index=1
|
|
for tm in tmlist.keys():
|
|
merged_hdr["INSTRUM{}".format(index)]=tm
|
|
index=index+1
|
|
merged_hdr['INSTRUME']='merged'
|
|
merged_hdr['NINST']=len(tmlist)
|
|
merged_hdr['OBS_MODE']=' '
|
|
if 'merged_map' in locals() and 'merged_hdr' in locals():
|
|
fits.writeto(outfile, merged_map, header=merged_hdr, overwrite=True)
|
|
|
|
def do_erbackmap_esass(image,expimage,boxlist,detmask,emin,emax,outfile_backmap,cheese_mask):
|
|
test_exe('erbackmap')
|
|
cmd=["erbackmap",
|
|
"image=%s" %(image),
|
|
"expimage=%s" %(expimage),
|
|
"boxlist={}".format(boxlist),
|
|
"detmask=%s" %(detmask),
|
|
"emin=%s" %(emin),
|
|
"emax=%s" %(emax),
|
|
"bkgimage=%s" %(outfile_backmap),
|
|
"cheesemask=%s" %(cheese_mask),
|
|
"idband=1",
|
|
"scut=0.001",
|
|
"mlmin=6",
|
|
"maxcut=0.5",
|
|
"fitmethod=smooth smoothval=15",
|
|
"snr=40.",
|
|
]
|
|
|
|
remove_file(cheese_mask)
|
|
remove_file(outfile_backmap)
|
|
os.system((" ").join(cmd))
|
|
print((" ").join(cmd))
|
|
|
|
def filter_catprep(filename, expcut=100,dlmin=6.0,dlmax=10,scale=60*60,ext_like=0.0,outkey='main'):
|
|
if(os.path.isfile(filename)==False):
|
|
print("File not found {}".format(filename))
|
|
print("Filter catprep {}".format(filename))
|
|
fout_selected=filename.replace(".fits", ".{}.selected.reg".format(outkey))
|
|
fout_skip=filename.replace(".fits", ".{}.skip.reg".format(outkey))
|
|
fout_extended=filename.replace(".fits", ".extended.reg")
|
|
|
|
hdul = fits.open(filename)
|
|
tbdata = hdul[1].data
|
|
|
|
catsel=[]
|
|
catskip=[]
|
|
catext=[]
|
|
skip_count=0
|
|
selected_count=0
|
|
keepalive_count=0
|
|
print("Print of extended sources:")
|
|
for rec in tbdata:
|
|
if (rec['ext_like'] > ext_like):
|
|
catext.append({'ra':rec['ra'],'dec':rec['dec'],'radec_err':rec['radec_err'],
|
|
'det_like':rec['det_like_0'],'ext_like':rec['ext_like'],
|
|
'ext':rec['ext'],'ext_err':rec['ext_err'],
|
|
'src_id':rec['id_src']})
|
|
print("{:.2f} {} {} & {:.4f} & {:.4f} & {:.2f} & {:.2f} & {:.2f} $\pm$ {:.2f} \\\\".format(rec['ra'],rec['id_src'],
|
|
make_source_name(rec['ra'], rec['dec']), rec['ra'], rec['dec'],
|
|
rec['det_like_0'], rec['ext_like'],
|
|
rec['ext'], rec['ext_err']))
|
|
|
|
if ((rec['det_like_0'] > dlmin and rec['det_like_0'] < dlmax)):
|
|
catsel.append({'ra':rec['ra'],'dec':rec['dec'],'radec_err':rec['radec_err'],'det_like':rec['det_like_0'],
|
|
'src_id':rec['id_src']})
|
|
selected_count=selected_count + 1
|
|
else:
|
|
catskip.append({'ra':rec['ra'],'dec':rec['dec'],'radec_err':rec['radec_err'],'det_like':rec['det_like_0'],
|
|
'src_id':rec['id_src']})
|
|
skip_count = skip_count+1
|
|
|
|
print("total={} skip_count={} selected={}".format(len(tbdata),skip_count,selected_count))
|
|
with open(fout_selected, 'w') as writer:
|
|
for rec in catsel:
|
|
radius = rec['radec_err']/scale if not (np.isnan(rec['radec_err'])) else 15/scale
|
|
writer.write("fk5;circle({}, {}, {}) # color=white text={{{} {:.2f}}}\n".format(rec['ra'],rec['dec'],radius,
|
|
rec['src_id'],rec['det_like']))
|
|
|
|
with open(fout_skip, 'w') as writer:
|
|
for rec in catskip:
|
|
writer.write("fk5;circle({}, {}, {}) # color=red text={{{} {:.2f}}}\n".format(rec['ra'],rec['dec'],rec['radec_err']/scale,
|
|
rec['src_id'],rec['det_like']))
|
|
|
|
with open(fout_extended, 'w') as writer:
|
|
for rec in catext:
|
|
writer.write("fk5;circle({}, {}, {}) # color=magenta text={{{} dl:{:.1f} el:{:.1f}}}\n".format(rec['ra'],
|
|
rec['dec'],
|
|
rec['ext']/scale,
|
|
rec['src_id'],
|
|
rec['det_like'],
|
|
rec['ext_like']))
|
|
def filter_mllist(filename, expcut=100,dlcut=6.0,dlmin=6.0,dlmax=10,scale=60*60,ext_like=0.0):
|
|
if(os.path.isfile(filename)==False):
|
|
print("File not found {}".format(filename))
|
|
print("Filter mllist {}".format(filename))
|
|
fout_selected=filename.replace(".fits", ".selected.reg")
|
|
fout_skip=filename.replace(".fits", ".skip.reg")
|
|
fout_extended=filename.replace(".fits", ".extended.reg")
|
|
|
|
hdul = fits.open(filename)
|
|
tbdata = hdul[1].data
|
|
|
|
""" create list of unique source IDs """
|
|
id_src_set = set(tbdata['id_src'])
|
|
src_id_list = list(id_src_set)
|
|
catsel=[]
|
|
catskip=[]
|
|
catext=[]
|
|
skip_count=0
|
|
selected_count=0
|
|
keepalive_count=0
|
|
for src_id in src_id_list:
|
|
|
|
tab_tm0=tbdata[(tbdata['id_src'] == src_id) & (tbdata['id_inst'] == 0) & (tbdata['id_band'] == 0)]
|
|
""" averaged over all instruments and bands """
|
|
|
|
ext_tm0=tbdata[(tbdata['id_src'] == src_id) & (tbdata['id_inst'] == 0) & (tbdata['id_band'] == 0) & (tbdata['ext_like'] > ext_like)]
|
|
""" averaged over all instruments and bands """
|
|
if(len(ext_tm0)>0):
|
|
catext.append({'ra':ext_tm0['ra'][0],'dec':ext_tm0['dec'][0],'radec_err':ext_tm0['radec_err'][0],
|
|
'det_like':ext_tm0['det_like'][0],'ext_like':ext_tm0['ext_like'][0],
|
|
'ext':ext_tm0['ext'][0],'ext_err':ext_tm0['ext_err'][0],
|
|
'src_id':ext_tm0['id_src'][0]})
|
|
|
|
mask_src_id = (tbdata['id_src'] == src_id) & (tbdata['id_inst'] == 0)
|
|
""" get all source records except merged one """
|
|
|
|
|
|
tab_src=tbdata[(tbdata['id_src'] == src_id) & (tbdata['id_inst'] != 0) & (tbdata['id_band'] == 1) & (tbdata['ml_exp'] > expcut)]
|
|
""" only individual instruments, and only first energy band is selected here """
|
|
|
|
|
|
keepalive=False
|
|
mask = tab_src['det_like'] > dlcut
|
|
if (any(mask) and tab_tm0['det_like'] < dlmin):
|
|
keepalive=True
|
|
keepalive_count=keepalive_count+1
|
|
|
|
|
|
if(keepalive):
|
|
tab_src=tbdata[(tbdata['id_src'] == src_id) & (tbdata['id_inst'] != 0) & (tbdata['id_band'] == 1)]
|
|
print("KEEP ALIVE ID={} DL={:.2f} | radec: {:.4f} {:.4f} DL0={:.2f} TEXP={:.1f}".format(src_id, tab_tm0['det_like'][0], tab_tm0['ra'][0], tab_tm0['dec'][0],
|
|
tab_src['det_like'][0],tab_src['ml_exp'][0]))
|
|
|
|
|
|
|
|
|
|
|
|
if ((tab_tm0['det_like'] > dlmin and tab_tm0['det_like'] < dlmax) or keepalive):
|
|
catsel.append({'ra':tab_tm0['ra'][0],'dec':tab_tm0['dec'][0],'radec_err':tab_tm0['radec_err'][0],'det_like':tab_tm0['det_like'][0],
|
|
'src_id':tab_tm0['id_src'][0]})
|
|
selected_count=selected_count + 1
|
|
else:
|
|
catskip.append({'ra':tab_tm0['ra'][0],'dec':tab_tm0['dec'][0],'radec_err':tab_tm0['radec_err'][0],'det_like':tab_tm0['det_like'][0],
|
|
'src_id':tab_tm0['id_src'][0]})
|
|
skip_count = skip_count+1
|
|
|
|
|
|
print("total={} skip_count={} keepalive_count={} selected={}".format(len(src_id_list),skip_count,keepalive_count,selected_count))
|
|
with open(fout_selected, 'w') as writer:
|
|
for rec in catsel:
|
|
writer.write("fk5;circle({}, {}, {}) # color=white text={{{} {:.2f}}}\n".format(rec['ra'],rec['dec'],rec['radec_err']/scale,
|
|
rec['src_id'],rec['det_like']))
|
|
|
|
with open(fout_skip, 'w') as writer:
|
|
for rec in catskip:
|
|
writer.write("fk5;circle({}, {}, {}) # color=red text={{{} {:.2f}}}\n".format(rec['ra'],rec['dec'],rec['radec_err']/scale,
|
|
rec['src_id'],rec['det_like']))
|
|
|
|
with open(fout_extended, 'w') as writer:
|
|
for rec in catext:
|
|
writer.write("fk5;circle({}, {}, {}) # color=magenta text={{{} {:.2f} {:.2f}}}\n".format(rec['ra'],rec['dec'],rec['ext']/scale,
|
|
rec['src_id'],rec['det_like'],rec['ext_like']))
|
|
|
|
|
|
def create_sensmap(sensmap=None,expmap=None,backmap=None,detmask=None,areatab=None,
|
|
emin=None,emax=None,ecf=None, detlike=10):
|
|
test_exe('ersensmap')
|
|
cmd=['ersensmap',
|
|
"expimages=\'{}\'".format(expmap),
|
|
"bkgimages=\'{}\'".format(backmap),
|
|
"detmasks=\'{}\'".format(detmask),
|
|
"sensimage={}".format(sensmap),
|
|
"emin=\'{}\'".format(emin),
|
|
"emax=\'{}\'".format(emax),
|
|
"ecf=\'{}\'".format(ecf),
|
|
"area_table={}".format(areatab),
|
|
"method=APER",
|
|
"aper_type=BOX",
|
|
"aper_size=4.5",
|
|
"likemin={}".format(detlike),
|
|
"extlikemin=6.0",
|
|
"ext=6.0",
|
|
"extentmodel=beta",
|
|
"detmask_flag=yes",
|
|
"shapelet_flag=no",
|
|
"photon_flag=no",
|
|
"ext_flag=no",
|
|
"extlike_flag=no",
|
|
"compress_flag=no",
|
|
"area_flag=yes",]
|
|
remove_file(sensmap)
|
|
remove_file(areatab)
|
|
os.system((" ").join(cmd))
|
|
print((" ").join(cmd))
|
|
|
|
def group_spectra(path,group_min=25):
|
|
test_exe('grppha')
|
|
flist=glob.glob(path)
|
|
for f in flist:
|
|
fout=f.replace(".fits", ".fits.grp")
|
|
print(f)
|
|
cmd=["grppha",
|
|
"infile={}".format(f),
|
|
"outfile={}".format(fout),
|
|
"comm=\'GROUP MIN {}\'".format(group_min),
|
|
"tempc=EXIT",
|
|
"clobber=yes",
|
|
"chatter=0", # with chatter <= 5 being very quite and chatter >= 20 very verbose.
|
|
]
|
|
print((" ").join(cmd))
|
|
os.system((" ").join(cmd))
|
|
|
|
def check_ermldet_forced(infile):
|
|
|
|
if(os.path.isfile(infile)==False):
|
|
print("File not found {}".format(infile))
|
|
|
|
print("reading mllist {}".format(infile))
|
|
|
|
hdul = fits.open(infile)
|
|
tbdata = hdul[1].data
|
|
hdul.close()
|
|
|
|
for rec in tbdata:
|
|
idsrc=rec['ID_SRC']
|
|
boxid = rec['BOX_ID_SRC']
|
|
if(idsrc != boxid):
|
|
print("The ermldet catalog in forced mode should contain only unique sources.")
|
|
print("Use fitpos_flag=no, fitext_flag=no, nmulsou=1")
|
|
print("{} != {}".format(idsrc,boxid))
|
|
return False
|
|
return True
|
|
|
|
def correct_srcid_ermldet_forced(infile):
|
|
""" for some reason, ermldet looses source order """
|
|
if(os.path.isfile(infile)==False):
|
|
print("File not found {}".format(infile))
|
|
|
|
print("reading mllist {}".format(infile))
|
|
|
|
hdul = fits.open(infile,'update')
|
|
hdul[1].data['ID_SRC']=hdul[1].data['BOX_ID_SRC']
|
|
hdul.close()
|
|
|
|
|
|
def correct_ermldet_column(infile, colname):
|
|
""" take every third value and give it to 1-i and i-2 elements """
|
|
n=3
|
|
hdul = fits.open(infile, 'update')
|
|
arr = hdul[1].data[colname]
|
|
for idx, x in enumerate(arr):
|
|
if( (idx+1) % n == 0):
|
|
if np.isnan(x):
|
|
print("WARNING! {}: {} {}".format(colname,idx,x))
|
|
continue
|
|
arr[idx-1]=x
|
|
arr[idx-2]=x
|
|
else:
|
|
pass
|
|
#ml_rate_lowerr = hdul[1].data['ML_RATE_LOWERR']
|
|
hdul.close()
|
|
|
|
|
|
def correct_fluxerr_ermldet_forced(infile):
|
|
""" fill out upper and lower errors """
|
|
if(os.path.isfile(infile)==False):
|
|
print("File not found {}".format(infile))
|
|
|
|
print("reading mllist {}".format(infile))
|
|
|
|
correct_ermldet_column(infile, 'ML_RATE_UPERR')
|
|
correct_ermldet_column(infile, 'ML_RATE_LOWERR')
|
|
correct_ermldet_column(infile, 'ML_FLUX_LOWERR')
|
|
correct_ermldet_column(infile, 'ML_FLUX_UPERR')
|
|
correct_ermldet_column(infile, 'ML_CTS_LOWERR')
|
|
correct_ermldet_column(infile, 'ML_CTS_UPERR')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def read_forced_catalog(infile,sensmap=None):
|
|
""" Read forced catalog and the corresponding sensitivity map """
|
|
if(os.path.isfile(infile)==False):
|
|
print("File not found {}".format(infile))
|
|
|
|
if(sensmap):
|
|
if(os.path.isfile(sensmap)==False):
|
|
print("File not found {}".format(sensmap))
|
|
|
|
|
|
cat={}
|
|
hdul = fits.open(infile)
|
|
tab = hdul[1].data
|
|
hdul.close()
|
|
|
|
for rec in tab:
|
|
key = rec['detUID']
|
|
cat[key]={'ra':rec['RA'], 'dec':rec['DEC'],
|
|
'radec_err':rec['RADEC_ERR'],
|
|
'det_like':rec['DET_LIKE_0'],
|
|
|
|
'ml_cts':rec['ML_CTS_0'],
|
|
'ml_cts_err':rec['ML_CTS_ERR_0'],
|
|
'ml_cts_lowerr':rec['ML_CTS_LOWERR_0'],
|
|
'ml_cts_uperr':rec['ML_CTS_UPERR_0'],
|
|
|
|
'ml_bkg':rec['ML_BKG_0'],
|
|
'ml_exp':rec['ML_EXP_1'],
|
|
|
|
'ml_rate':rec['ML_RATE_0'],
|
|
'ml_rate_err':rec['ML_RATE_ERR_0'],
|
|
'ml_rate_lowerr':rec['ML_RATE_LOWERR_0'],
|
|
'ml_rate_uperr':rec['ML_RATE_UPERR_0'],
|
|
|
|
'ml_flux':rec['ML_FLUX_0'],
|
|
'ml_flux_err':rec['ML_FLUX_ERR_0'],
|
|
'ml_flux_lowerr':rec['ML_FLUX_LOWERR_0'],
|
|
'ml_flux_uperr':rec['ML_FLUX_UPERR_0'],
|
|
|
|
'ext':rec['EXT'],
|
|
'ext_err':rec['EXT_ERR'],
|
|
'ext_lowerr':rec['EXT_LOWERR'],
|
|
'ext_uperr':rec['EXT_UPERR'],
|
|
'ext_like':rec['EXT_LIKE'],
|
|
|
|
'ape_cts':rec['APE_CTS_1'],
|
|
'ape_bkg':rec['APE_BKG_1'],
|
|
'ape_exp':rec['APE_EXP_1'],
|
|
'ape_radius':rec['APE_RADIUS_1'],
|
|
'ape_pois':rec['APE_POIS_1'],
|
|
|
|
'sens':0.0}
|
|
|
|
print("Reading forced catalog {} nrows={}".format(infile,len(tab)))
|
|
|
|
if(sensmap):
|
|
print("reading sensmap {}".format(sensmap))
|
|
hdul = fits.open(sensmap)
|
|
sens = hdul[0].data
|
|
hdr = hdul[0].header
|
|
hdul.close()
|
|
wcs = WCS(hdr)
|
|
for key in cat.keys():
|
|
ra=cat[key]['ra']
|
|
dec=cat[key]['dec']
|
|
crd = SkyCoord(ra, dec, frame=FK5(), unit="deg")
|
|
pix = wcs.wcs_world2pix([(ra, dec),], 1)
|
|
px=round(pix[0][0])-1
|
|
py=round(pix[0][1])-1
|
|
cat[key]['sens']=sens[py,px]
|
|
|
|
return cat
|
|
|
|
def make_euds_catalog(infile=None, rawcat=None, dlmin=6.0,dlmax=10,scale=60*60,ext_like=0.0,outkey='main',
|
|
emin=None, emax=None, eband=None, #ecf=1.0,
|
|
infile_en00cat=None,
|
|
infile_en01cat=None,
|
|
infile_en02cat=None,
|
|
infile_en03cat=None,
|
|
infile_en06cat=None,
|
|
infile_en00sens=None,
|
|
infile_en01sens=None,
|
|
infile_en02sens=None,
|
|
infile_en03sens=None,
|
|
infile_en06sens=None,
|
|
srcs_forced=None):
|
|
if(os.path.isfile(infile)==False):
|
|
print("File not found {}".format(infile))
|
|
|
|
print("reading infile {}".format(infile))
|
|
fout_selected=infile.replace(".fits", ".{}.selected.csv".format(outkey))
|
|
remove_file(fout_selected)
|
|
fout_skip=infile.replace(".fits", ".{}.skip.reg".format(outkey))
|
|
fout_extended=infile.replace(".fits", ".extended.reg")
|
|
|
|
|
|
|
|
|
|
""" read forced catalogs and corresponding sensitivity maps """
|
|
en00cat=read_forced_catalog(infile_en00cat,sensmap=infile_en00sens)
|
|
en01cat=read_forced_catalog(infile_en01cat,sensmap=infile_en01sens)
|
|
en02cat=read_forced_catalog(infile_en02cat,sensmap=infile_en02sens)
|
|
en03cat=read_forced_catalog(infile_en03cat,sensmap=infile_en03sens)
|
|
en06cat=read_forced_catalog(infile_en06cat,sensmap=infile_en06sens)
|
|
|
|
hdul = fits.open(infile)
|
|
tbdata = hdul[1].data
|
|
hdul.close()
|
|
|
|
catsel=[]
|
|
catskip=[]
|
|
catext=[]
|
|
skip_count=0
|
|
selected_count=0
|
|
keepalive_count=0
|
|
for rec in tbdata:
|
|
""" Go through forced en0 catalog """
|
|
|
|
src_crd = SkyCoord(rec['ra'], rec['dec'], frame=FK5(), unit="deg")
|
|
key = rec['detUID']
|
|
ikey=int(key.replace('ML',''))
|
|
|
|
|
|
if not (key in en01cat.keys()):
|
|
print("{} not found in {}".format(rec['detUID'],infile_en01cat))
|
|
sys.exit()
|
|
else:
|
|
crd = SkyCoord(en01cat[key]['ra'], en01cat[key]['dec'], frame=FK5(), unit="deg")
|
|
sep = src_crd.separation(crd).arcsec
|
|
if(sep > 1.0):
|
|
print("***")
|
|
print("*** ERROR: {} significant offset: {:.4f} arcsec ***".format(key,sep))
|
|
print("***")
|
|
sys.exit()
|
|
pass
|
|
|
|
forced_name=None
|
|
if (key in en00cat):
|
|
""" take non-forced catalog """
|
|
forced=False
|
|
if not ((en00cat[key]['det_like'] > dlmin and en00cat[key]['det_like'] < dlmax)):
|
|
continue
|
|
pass
|
|
else:
|
|
""" take forced catalog (XMM-added sources) """
|
|
forced=True
|
|
if not ((rec['det_like_0'] > dlmin and rec['det_like_0'] < dlmax)):
|
|
continue
|
|
""" print name of the XMM source in the final catalog """
|
|
forced_name=None
|
|
for sfkey in srcs_forced.keys():
|
|
if(srcs_forced[sfkey][3] == ikey):
|
|
forced_name=sfkey
|
|
|
|
pass
|
|
|
|
""" extended information is not listed in forced catalog, take it from non-forced """
|
|
|
|
catsel.append({'name':make_source_name(rec['ra'], rec['dec']),
|
|
'ra':rec['ra'],
|
|
'dec':rec['dec'],
|
|
'radec_err':rec['radec_err'] if (forced) else en00cat[key]['radec_err'],
|
|
'det_like':rec['det_like_0'] if (forced) else en00cat[key]['det_like'],
|
|
|
|
'ext': 0.0 if (forced) else en00cat[key]['ext'],
|
|
'ext_err': 0.0 if (forced) else en00cat[key]['ext_err'],
|
|
'ext_lowerr':0.0 if (forced) else en00cat[key]['ext_lowerr'],
|
|
'ext_uperr': 0.0 if (forced) else en00cat[key]['ext_uperr'],
|
|
'ext_like': 0.0 if (forced) else en00cat[key]['ext_like'],
|
|
|
|
'src_id':rec['id_src'],
|
|
'ml_bkg':rec['ml_bkg_0'],
|
|
'ml_exp':rec['ml_exp_1'],
|
|
|
|
'ml_cts': rec['ml_cts_0'] if (forced) else en00cat[key]['ml_cts'],
|
|
'ml_cts_err': rec['ml_cts_err_0'] if (forced) else en00cat[key]['ml_cts_err'],
|
|
'ml_cts_lowerr': rec['ml_cts_lowerr_0'] if (forced) else en00cat[key]['ml_cts_lowerr'],
|
|
'ml_cts_uperr': rec['ml_cts_uperr_0'] if (forced) else en00cat[key]['ml_cts_uperr'],
|
|
|
|
'ml_rate': rec['ml_rate_0'] if (forced) else en00cat[key]['ml_rate'],
|
|
'ml_rate_err': rec['ml_rate_err_0'] if (forced) else en00cat[key]['ml_rate_err'],
|
|
'ml_rate_lowerr': rec['ml_rate_lowerr_0'] if (forced) else en00cat[key]['ml_rate_lowerr'],
|
|
'ml_rate_uperr': rec['ml_rate_uperr_0'] if (forced) else en00cat[key]['ml_rate_uperr'],
|
|
|
|
'ml_flux': rec['ml_flux_0'] if (forced) else en00cat[key]['ml_flux'],
|
|
'ml_flux_err': rec['ml_flux_err_0'] if (forced) else en00cat[key]['ml_flux_err'],
|
|
'ml_flux_lowerr': rec['ml_flux_lowerr_0'] if (forced) else en00cat[key]['ml_flux_lowerr'],
|
|
'ml_flux_uperr': rec['ml_flux_uperr_0'] if (forced) else en00cat[key]['ml_flux_uperr'],
|
|
|
|
'ape_cts': rec['ape_cts_1'] if (forced) else en00cat[key]['ape_cts'],
|
|
'ape_bkg': rec['ape_bkg_1'] if (forced) else en00cat[key]['ape_bkg'],
|
|
'ape_exp': rec['ape_exp_1'] if (forced) else en00cat[key]['ape_exp'],
|
|
'ape_radius': rec['ape_radius_1'] if (forced) else en00cat[key]['ape_radius'],
|
|
'ape_pois': rec['ape_pois_1'] if (forced) else en00cat[key]['ape_pois'],
|
|
|
|
'en01_dl':en01cat[key]['det_like'] if(key in en01cat) else None,
|
|
'en02_dl':en02cat[key]['det_like'] if(key in en02cat) else None,
|
|
'en03_dl':en03cat[key]['det_like'] if(key in en03cat) else None,
|
|
'en06_dl':en06cat[key]['det_like'] if(key in en06cat) else None,
|
|
|
|
'en01_ml_rate':en01cat[key]['ml_rate'] if(key in en01cat) else None,
|
|
'en02_ml_rate':en02cat[key]['ml_rate'] if(key in en02cat) else None,
|
|
'en03_ml_rate':en03cat[key]['ml_rate'] if(key in en03cat) else None,
|
|
'en06_ml_rate':en06cat[key]['ml_rate'] if(key in en06cat) else None,
|
|
|
|
'en01_ml_rate_err':en01cat[key]['ml_rate_err'] if(key in en01cat) else None,
|
|
'en02_ml_rate_err':en02cat[key]['ml_rate_err'] if(key in en02cat) else None,
|
|
'en03_ml_rate_err':en03cat[key]['ml_rate_err'] if(key in en03cat) else None,
|
|
'en06_ml_rate_err':en06cat[key]['ml_rate_err'] if(key in en06cat) else None,
|
|
|
|
'en01_ml_rate_lowerr':en01cat[key]['ml_rate_lowerr'] if(key in en01cat) else None,
|
|
'en02_ml_rate_lowerr':en02cat[key]['ml_rate_lowerr'] if(key in en02cat) else None,
|
|
'en03_ml_rate_lowerr':en03cat[key]['ml_rate_lowerr'] if(key in en03cat) else None,
|
|
'en06_ml_rate_lowerr':en06cat[key]['ml_rate_lowerr'] if(key in en06cat) else None,
|
|
|
|
'en01_ml_rate_uperr':en01cat[key]['ml_rate_uperr'] if(key in en01cat) else None,
|
|
'en02_ml_rate_uperr':en02cat[key]['ml_rate_uperr'] if(key in en02cat) else None,
|
|
'en03_ml_rate_uperr':en03cat[key]['ml_rate_uperr'] if(key in en03cat) else None,
|
|
'en06_ml_rate_uperr':en06cat[key]['ml_rate_uperr'] if(key in en06cat) else None,
|
|
|
|
'en01_ml_exp':en01cat[key]['ml_exp'] if(key in en01cat) else None,
|
|
'en02_ml_exp':en02cat[key]['ml_exp'] if(key in en02cat) else None,
|
|
'en03_ml_exp':en03cat[key]['ml_exp'] if(key in en03cat) else None,
|
|
'en06_ml_exp':en06cat[key]['ml_exp'] if(key in en06cat) else None,
|
|
|
|
'en01_ml_bkg':en01cat[key]['ml_bkg'] if(key in en01cat) else None,
|
|
'en02_ml_bkg':en02cat[key]['ml_bkg'] if(key in en02cat) else None,
|
|
'en03_ml_bkg':en03cat[key]['ml_bkg'] if(key in en03cat) else None,
|
|
'en06_ml_bkg':en06cat[key]['ml_bkg'] if(key in en06cat) else None,
|
|
|
|
'en01_cts':en01cat[key]['ml_cts'] if(key in en01cat) else None,
|
|
'en02_cts':en02cat[key]['ml_cts'] if(key in en02cat) else None,
|
|
'en03_cts':en03cat[key]['ml_cts'] if(key in en03cat) else None,
|
|
'en06_cts':en06cat[key]['ml_cts'] if(key in en06cat) else None,
|
|
|
|
'en01_cts_err':en01cat[key]['ml_cts_err'] if(key in en01cat) else None,
|
|
'en02_cts_err':en02cat[key]['ml_cts_err'] if(key in en02cat) else None,
|
|
'en03_cts_err':en03cat[key]['ml_cts_err'] if(key in en03cat) else None,
|
|
'en06_cts_err':en06cat[key]['ml_cts_err'] if(key in en06cat) else None,
|
|
|
|
'en01_flux':en01cat[key]['ml_flux'] if(key in en01cat) else None,
|
|
'en02_flux':en02cat[key]['ml_flux'] if(key in en02cat) else None,
|
|
'en03_flux':en03cat[key]['ml_flux'] if(key in en03cat) else None,
|
|
'en06_flux':en06cat[key]['ml_flux'] if(key in en06cat) else None,
|
|
|
|
'en01_flux_err':en01cat[key]['ml_flux_err'] if(key in en01cat) else None,
|
|
'en02_flux_err':en02cat[key]['ml_flux_err'] if(key in en02cat) else None,
|
|
'en03_flux_err':en03cat[key]['ml_flux_err'] if(key in en03cat) else None,
|
|
'en06_flux_err':en06cat[key]['ml_flux_err'] if(key in en06cat) else None,
|
|
|
|
'en01_flux_lowerr':en01cat[key]['ml_flux_lowerr'] if(key in en01cat) else None,
|
|
'en02_flux_lowerr':en02cat[key]['ml_flux_lowerr'] if(key in en02cat) else None,
|
|
'en03_flux_lowerr':en03cat[key]['ml_flux_lowerr'] if(key in en03cat) else None,
|
|
'en06_flux_lowerr':en06cat[key]['ml_flux_lowerr'] if(key in en06cat) else None,
|
|
|
|
'en01_flux_uperr':en01cat[key]['ml_flux_uperr'] if(key in en01cat) else None,
|
|
'en02_flux_uperr':en02cat[key]['ml_flux_uperr'] if(key in en02cat) else None,
|
|
'en03_flux_uperr':en03cat[key]['ml_flux_uperr'] if(key in en03cat) else None,
|
|
'en06_flux_uperr':en06cat[key]['ml_flux_uperr'] if(key in en06cat) else None,
|
|
|
|
'en01_sens':en01cat[key]['sens'] if(key in en01cat) else None,
|
|
'en02_sens':en02cat[key]['sens'] if(key in en02cat) else None,
|
|
'en03_sens':en03cat[key]['sens'] if(key in en03cat) else None,
|
|
'en06_sens':en06cat[key]['sens'] if(key in en06cat) else None,
|
|
|
|
'en01_ape_cts':en01cat[key]['ape_cts'] if(key in en01cat) else None,
|
|
'en02_ape_cts':en02cat[key]['ape_cts'] if(key in en02cat) else None,
|
|
'en03_ape_cts':en03cat[key]['ape_cts'] if(key in en03cat) else None,
|
|
'en06_ape_cts':en06cat[key]['ape_cts'] if(key in en06cat) else None,
|
|
|
|
'en01_ape_exp':en01cat[key]['ape_exp'] if(key in en01cat) else None,
|
|
'en02_ape_exp':en02cat[key]['ape_exp'] if(key in en02cat) else None,
|
|
'en03_ape_exp':en03cat[key]['ape_exp'] if(key in en03cat) else None,
|
|
'en06_ape_exp':en06cat[key]['ape_exp'] if(key in en06cat) else None,
|
|
|
|
'en01_ape_bkg':en01cat[key]['ape_bkg'] if(key in en01cat) else None,
|
|
'en02_ape_bkg':en02cat[key]['ape_bkg'] if(key in en02cat) else None,
|
|
'en03_ape_bkg':en03cat[key]['ape_bkg'] if(key in en03cat) else None,
|
|
'en06_ape_bkg':en06cat[key]['ape_bkg'] if(key in en06cat) else None,
|
|
|
|
'en01_ape_radius':en01cat[key]['ape_radius'] if(key in en01cat) else None,
|
|
'en02_ape_radius':en02cat[key]['ape_radius'] if(key in en02cat) else None,
|
|
'en03_ape_radius':en03cat[key]['ape_radius'] if(key in en03cat) else None,
|
|
'en06_ape_radius':en06cat[key]['ape_radius'] if(key in en06cat) else None,
|
|
|
|
'en01_ape_pois':en01cat[key]['ape_pois'] if(key in en01cat) else None,
|
|
'en02_ape_pois':en02cat[key]['ape_pois'] if(key in en02cat) else None,
|
|
'en03_ape_pois':en03cat[key]['ape_pois'] if(key in en03cat) else None,
|
|
'en06_ape_pois':en06cat[key]['ape_pois'] if(key in en06cat) else None,
|
|
'forced_name':forced_name
|
|
},
|
|
)
|
|
|
|
selected_count = selected_count + 1
|
|
|
|
print("total={} selected={}".format(len(tbdata),selected_count))
|
|
|
|
with open(fout_selected, 'w') as csvfile:
|
|
fieldnames = ['src_id', 'name', 'ra', 'dec', 'radec_err',
|
|
'det_like',
|
|
'ext','ext_err','ext_lowerr','ext_uperr','ext_like',
|
|
'ml_exp','ml_bkg',
|
|
'ml_cts','ml_cts_err','ml_cts_lowerr','ml_cts_uperr',
|
|
'ml_rate','ml_rate_err','ml_rate_lowerr','ml_rate_uperr',
|
|
'ml_flux','ml_flux_err','ml_flux_lowerr','ml_flux_uperr','forced_name',
|
|
'ape_cts','ape_exp','ape_bkg','ape_radius','ape_pois',
|
|
|
|
'en01_dl','en01_ml_rate','en01_ml_rate_err','en01_ml_rate_lowerr','en01_ml_rate_uperr','en01_cts','en01_cts_err','en01_ml_exp','en01_ml_bkg',
|
|
'en01_flux','en01_flux_err','en01_flux_lowerr','en01_flux_uperr','en01_sens','en01_ape_cts','en01_ape_exp','en01_ape_bkg','en01_ape_radius','en01_ape_pois',
|
|
|
|
'en02_dl','en02_ml_rate','en02_ml_rate_err','en02_ml_rate_lowerr','en02_ml_rate_uperr','en02_cts','en02_cts_err','en02_ml_exp','en02_ml_bkg',
|
|
'en02_flux','en02_flux_err','en02_flux_lowerr','en02_flux_uperr','en02_sens','en02_ape_cts','en02_ape_exp','en02_ape_bkg','en02_ape_radius','en02_ape_pois',
|
|
|
|
'en03_dl','en03_ml_rate','en03_ml_rate_err','en03_ml_rate_lowerr','en03_ml_rate_uperr','en03_cts','en03_cts_err','en03_ml_exp','en03_ml_bkg',
|
|
'en03_flux','en03_flux_err','en03_flux_lowerr','en03_flux_uperr','en03_sens','en03_ape_cts','en03_ape_exp','en03_ape_bkg','en03_ape_radius','en03_ape_pois',
|
|
|
|
'en06_dl','en06_ml_rate','en06_ml_rate_err','en06_ml_rate_lowerr','en06_ml_rate_uperr','en06_cts','en06_cts_err','en06_ml_exp','en06_ml_bkg',
|
|
'en06_flux','en06_flux_err','en06_flux_lowerr','en06_flux_uperr','en06_sens','en06_ape_cts','en06_ape_exp','en06_ape_bkg','en06_ape_radius','en06_ape_pois',]
|
|
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
|
|
writer.writeheader()
|
|
for rec in catsel:
|
|
writer.writerow(rec)
|
|
|
|
with open(rawcat, 'wb') as f:
|
|
pickle.dump(catsel, f)
|
|
|
|
def final_euds_catalog(infile=None,outfile_fits=None,expcut=100):
|
|
with open(infile, 'rb') as f:
|
|
table = pickle.load(f)
|
|
|
|
print("Reading {} rows from {}".format(len(table),infile))
|
|
|
|
|
|
name = []
|
|
srcid = []
|
|
|
|
dr12name = []
|
|
|
|
ra = []
|
|
dec = []
|
|
radec_err = []
|
|
|
|
ext = []
|
|
ext_err = []
|
|
ext_like = []
|
|
|
|
det_like_0 = []
|
|
det_like_1 = []
|
|
det_like_2 = []
|
|
det_like_3 = []
|
|
det_like_4 = []
|
|
|
|
ml_rate_0 = []
|
|
ml_rate_1 = []
|
|
ml_rate_2 = []
|
|
ml_rate_3 = []
|
|
ml_rate_4 = []
|
|
|
|
ml_rate_err_0 = []
|
|
ml_rate_err_1 = []
|
|
ml_rate_err_2 = []
|
|
ml_rate_err_3 = []
|
|
ml_rate_err_4 = []
|
|
|
|
ml_rate_lowerr_0 = []
|
|
ml_rate_lowerr_1 = []
|
|
ml_rate_lowerr_2 = []
|
|
ml_rate_lowerr_3 = []
|
|
ml_rate_lowerr_4 = []
|
|
|
|
ml_rate_uperr_0 = []
|
|
ml_rate_uperr_1 = []
|
|
ml_rate_uperr_2 = []
|
|
ml_rate_uperr_3 = []
|
|
ml_rate_uperr_4 = []
|
|
|
|
|
|
ml_cts_0 = []
|
|
ml_cts_1 = []
|
|
ml_cts_2 = []
|
|
ml_cts_3 = []
|
|
ml_cts_4 = []
|
|
|
|
ml_cts_err_0 = []
|
|
ml_cts_err_1 = []
|
|
ml_cts_err_2 = []
|
|
ml_cts_err_3 = []
|
|
ml_cts_err_4 = []
|
|
|
|
ml_flux_0 = []
|
|
ml_flux_1 = []
|
|
ml_flux_2 = []
|
|
ml_flux_3 = []
|
|
ml_flux_4 = []
|
|
|
|
ml_flux_err_0 = []
|
|
ml_flux_err_1 = []
|
|
ml_flux_err_2 = []
|
|
ml_flux_err_3 = []
|
|
ml_flux_err_4 = []
|
|
|
|
ml_flux_lowerr_0 = []
|
|
ml_flux_lowerr_1 = []
|
|
ml_flux_lowerr_2 = []
|
|
ml_flux_lowerr_3 = []
|
|
ml_flux_lowerr_4 = []
|
|
|
|
ml_flux_uperr_0 = []
|
|
ml_flux_uperr_1 = []
|
|
ml_flux_uperr_2 = []
|
|
ml_flux_uperr_3 = []
|
|
ml_flux_uperr_4 = []
|
|
|
|
ml_exp_0 = []
|
|
ml_exp_1 = []
|
|
ml_exp_2 = []
|
|
ml_exp_3 = []
|
|
ml_exp_4 = []
|
|
|
|
ml_bkg_0 = []
|
|
ml_bkg_1 = []
|
|
ml_bkg_2 = []
|
|
ml_bkg_3 = []
|
|
ml_bkg_4 = []
|
|
|
|
ape_cts_0 = []
|
|
ape_cts_1 = []
|
|
ape_cts_2 = []
|
|
ape_cts_3 = []
|
|
ape_cts_4 = []
|
|
|
|
ape_exp_0 = []
|
|
ape_exp_1 = []
|
|
ape_exp_2 = []
|
|
ape_exp_3 = []
|
|
ape_exp_4 = []
|
|
|
|
ape_bkg_0 = []
|
|
ape_bkg_1 = []
|
|
ape_bkg_2 = []
|
|
ape_bkg_3 = []
|
|
ape_bkg_4 = []
|
|
|
|
ape_rad_0 = []
|
|
ape_rad_1 = []
|
|
ape_rad_2 = []
|
|
ape_rad_3 = []
|
|
ape_rad_4 = []
|
|
|
|
ape_pois_0 = []
|
|
ape_pois_1 = []
|
|
ape_pois_2 = []
|
|
ape_pois_3 = []
|
|
ape_pois_4 = []
|
|
|
|
count=1
|
|
for rec in table:
|
|
#print(rec['ape_exp'])
|
|
if (rec['ape_exp'] < expcut):
|
|
print("Skip due to {} < {}".format(rec['ape_exp'], expcut))
|
|
continue
|
|
name.append(rec['name'])
|
|
srcid.append(count)
|
|
|
|
count=count+1
|
|
ra.append(rec['ra'])
|
|
dec.append(rec['dec'])
|
|
radec_err.append(rec['radec_err'])
|
|
|
|
ext.append(rec['ext'])
|
|
ext_err.append(rec['ext_err'] if(rec['ext_err']>0.0) else None)
|
|
ext_like.append(rec['ext_like'])
|
|
dr12name.append(rec['forced_name'])
|
|
|
|
|
|
det_like_0.append(rec['det_like'])
|
|
det_like_1.append(rec['en01_dl'])
|
|
det_like_2.append(rec['en02_dl'])
|
|
det_like_3.append(rec['en03_dl'])
|
|
det_like_4.append(rec['en06_dl'])
|
|
|
|
ml_rate_0.append(rec['ml_rate'])
|
|
ml_rate_1.append(rec['en01_ml_rate'])
|
|
ml_rate_2.append(rec['en02_ml_rate'])
|
|
ml_rate_3.append(rec['en03_ml_rate'])
|
|
ml_rate_4.append(rec['en06_ml_rate'])
|
|
|
|
ml_rate_err_0.append(rec['ml_rate_err'])
|
|
ml_rate_err_1.append(rec['en01_ml_rate_err'])
|
|
ml_rate_err_2.append(rec['en02_ml_rate_err'])
|
|
ml_rate_err_3.append(rec['en03_ml_rate_err'])
|
|
ml_rate_err_4.append(rec['en06_ml_rate_err'])
|
|
|
|
#ml_rate_lowerr_0.append(rec['ml_rate_lowerr'])
|
|
#ml_rate_lowerr_1.append(rec['en01_ml_rate_lowerr'])
|
|
#ml_rate_lowerr_2.append(rec['en02_ml_rate_lowerr'])
|
|
#ml_rate_lowerr_3.append(rec['en03_ml_rate_lowerr'])
|
|
#ml_rate_lowerr_4.append(rec['en06_ml_rate_lowerr'])
|
|
|
|
#ml_rate_uperr_0.append(rec['ml_rate_uperr'])
|
|
#ml_rate_uperr_1.append(rec['en01_ml_rate_uperr'])
|
|
#ml_rate_uperr_2.append(rec['en02_ml_rate_uperr'])
|
|
#ml_rate_uperr_3.append(rec['en03_ml_rate_uperr'])
|
|
#ml_rate_uperr_4.append(rec['en06_ml_rate_uperr'])
|
|
|
|
|
|
ml_cts_0.append(rec['ml_cts'])
|
|
ml_cts_1.append(rec['en01_cts'])
|
|
ml_cts_2.append(rec['en02_cts'])
|
|
ml_cts_3.append(rec['en03_cts'])
|
|
ml_cts_4.append(rec['en06_cts'])
|
|
|
|
ml_cts_err_0.append(rec['ml_cts_err'])
|
|
ml_cts_err_1.append(rec['en01_cts_err'])
|
|
ml_cts_err_2.append(rec['en02_cts_err'])
|
|
ml_cts_err_3.append(rec['en03_cts_err'])
|
|
ml_cts_err_4.append(rec['en06_cts_err'])
|
|
|
|
|
|
ml_flux_0.append(rec['ml_flux'])
|
|
ml_flux_1.append(rec['en01_flux'])
|
|
ml_flux_2.append(rec['en02_flux'])
|
|
ml_flux_3.append(rec['en03_flux'])
|
|
ml_flux_4.append(rec['en06_flux'])
|
|
|
|
ml_flux_err_0.append(rec['ml_flux_err'])
|
|
ml_flux_err_1.append(rec['en01_flux_err'])
|
|
ml_flux_err_2.append(rec['en02_flux_err'])
|
|
ml_flux_err_3.append(rec['en03_flux_err'])
|
|
ml_flux_err_4.append(rec['en06_flux_err'])
|
|
|
|
|
|
ml_exp_0.append(rec['ml_exp'])
|
|
ml_exp_1.append(rec['en01_ml_exp'])
|
|
ml_exp_2.append(rec['en02_ml_exp'])
|
|
ml_exp_3.append(rec['en03_ml_exp'])
|
|
ml_exp_4.append(rec['en06_ml_exp'])
|
|
|
|
ml_bkg_0.append(rec['ml_bkg'])
|
|
ml_bkg_1.append(rec['en01_ml_bkg'])
|
|
ml_bkg_2.append(rec['en02_ml_bkg'])
|
|
ml_bkg_3.append(rec['en03_ml_bkg'])
|
|
ml_bkg_4.append(rec['en06_ml_bkg'])
|
|
|
|
ape_cts_0.append(rec['ape_cts'])
|
|
ape_cts_1.append(rec['en01_ape_cts'])
|
|
ape_cts_2.append(rec['en02_ape_cts'])
|
|
ape_cts_3.append(rec['en03_ape_cts'])
|
|
ape_cts_4.append(rec['en06_ape_cts'])
|
|
|
|
ape_exp_0.append(rec['ape_exp'])
|
|
ape_exp_1.append(rec['en01_ape_exp'])
|
|
ape_exp_2.append(rec['en02_ape_exp'])
|
|
ape_exp_3.append(rec['en03_ape_exp'])
|
|
ape_exp_4.append(rec['en06_ape_exp'])
|
|
|
|
ape_bkg_0.append(rec['ape_bkg'])
|
|
ape_bkg_1.append(rec['en01_ape_bkg'])
|
|
ape_bkg_2.append(rec['en02_ape_bkg'])
|
|
ape_bkg_3.append(rec['en03_ape_bkg'])
|
|
ape_bkg_4.append(rec['en06_ape_bkg'])
|
|
|
|
ape_rad_0.append(rec['ape_radius'])
|
|
ape_rad_1.append(rec['en01_ape_radius'])
|
|
ape_rad_2.append(rec['en02_ape_radius'])
|
|
ape_rad_3.append(rec['en03_ape_radius'])
|
|
ape_rad_4.append(rec['en06_ape_radius'])
|
|
|
|
ape_pois_0.append(rec['ape_pois'] if(rec['ape_pois']>0.0) else None)
|
|
ape_pois_1.append(rec['en01_ape_pois'] if(rec['en01_ape_pois']>0.0) else None)
|
|
ape_pois_2.append(rec['en02_ape_pois'] if(rec['en02_ape_pois']>0.0) else None)
|
|
ape_pois_3.append(rec['en03_ape_pois'] if(rec['en03_ape_pois']>0.0) else None)
|
|
ape_pois_4.append(rec['en06_ape_pois'] if(rec['en06_ape_pois']>0.0) else None)
|
|
|
|
print("Ready to write {} rows".format(count))
|
|
|
|
""" Sort output catalogue by RA """
|
|
indices = sorted(
|
|
range(len(ra)),
|
|
key=lambda index: ra[index]
|
|
)
|
|
|
|
coldefs = fits.ColDefs([
|
|
fits.Column(name='ID_SRC', format='I', array=srcid), # don't sort srcid, apply it as is
|
|
fits.Column(name='NAME', format='21A', array=[name[index] for index in indices]),
|
|
fits.Column(name='RA', format='D', unit='deg', array=[ra[index] for index in indices]),
|
|
fits.Column(name='DEC', format='D', unit='deg', array=[dec[index] for index in indices]),
|
|
fits.Column(name='RADEC_ERR', format='E', unit='arcsec', array=[radec_err[index] for index in indices]),
|
|
fits.Column(name='EXT', format='E', unit='arcsec', array=[ext[index] for index in indices]),
|
|
fits.Column(name='EXT_ERR', format='E', unit='arcsec', array=[ext_err[index] for index in indices]),
|
|
fits.Column(name='EXT_LIKE', format='E', unit='', array=[ext_like[index] for index in indices]),
|
|
fits.Column(name='DET_LIKE', format='E', unit='', array=[det_like_0[index] for index in indices]),
|
|
fits.Column(name='ML_RATE', format='E', unit='cts/s', array=[ml_rate_0[index] for index in indices]),
|
|
fits.Column(name='ML_RATE_ERR', format='E', unit='cts/s', array=[ml_rate_err_0[index] for index in indices]),
|
|
fits.Column(name='ML_CTS', format='E', unit='cts', array=[ml_cts_0[index] for index in indices]),
|
|
fits.Column(name='ML_CTS_ERR', format='E', unit='cts', array=[ml_cts_err_0[index] for index in indices]),
|
|
fits.Column(name='ML_FLUX', format='E', unit='erg/cm**2/s', array=[ml_flux_0[index] for index in indices]),
|
|
fits.Column(name='ML_FLUX_ERR', format='E', unit='erg/cm**2/s', array=[ml_flux_err_0[index] for index in indices]),
|
|
fits.Column(name='ML_EXP', format='E', unit='s', array=[ml_exp_0[index] for index in indices]),
|
|
fits.Column(name='ML_BKG', format='E', unit='cts/arcmin**2', array=[ml_bkg_0[index] for index in indices]),
|
|
fits.Column(name='DR12_IAU_NAME', format='21A' , array=[dr12name[index] for index in indices]),
|
|
|
|
|
|
fits.Column(name='DET_LIKE_1', format='E', unit='', array=[det_like_1[index] for index in indices]),
|
|
fits.Column(name='DET_LIKE_2', format='E', unit='', array=[det_like_2[index] for index in indices]),
|
|
fits.Column(name='DET_LIKE_3', format='E', unit='', array=[det_like_3[index] for index in indices]),
|
|
fits.Column(name='DET_LIKE_4', format='E', unit='', array=[det_like_4[index] for index in indices]),
|
|
|
|
fits.Column(name='ML_RATE_1', format='E', unit='cts/s', array=[ml_rate_1[index] for index in indices]),
|
|
fits.Column(name='ML_RATE_2', format='E', unit='cts/s', array=[ml_rate_2[index] for index in indices]),
|
|
fits.Column(name='ML_RATE_3', format='E', unit='cts/s', array=[ml_rate_3[index] for index in indices]),
|
|
fits.Column(name='ML_RATE_4', format='E', unit='cts/s', array=[ml_rate_4[index] for index in indices]),
|
|
|
|
fits.Column(name='ML_RATE_ERR_1', format='E', unit='cts/s', array=[ml_rate_err_1[index] for index in indices]),
|
|
fits.Column(name='ML_RATE_ERR_2', format='E', unit='cts/s', array=[ml_rate_err_2[index] for index in indices]),
|
|
fits.Column(name='ML_RATE_ERR_3', format='E', unit='cts/s', array=[ml_rate_err_3[index] for index in indices]),
|
|
fits.Column(name='ML_RATE_ERR_4', format='E', unit='cts/s', array=[ml_rate_err_4[index] for index in indices]),
|
|
|
|
#fits.Column(name='ML_RATE_LOWERR', format='E', unit='cts/s', array=[ml_rate_lowerr_0[index] for index in indices]),
|
|
#fits.Column(name='ML_RATE_LOWERR_1', format='E', unit='cts/s', array=[ml_rate_lowerr_1[index] for index in indices]),
|
|
#fits.Column(name='ML_RATE_LOWERR_2', format='E', unit='cts/s', array=[ml_rate_lowerr_2[index] for index in indices]),
|
|
#fits.Column(name='ML_RATE_LOWERR_3', format='E', unit='cts/s', array=[ml_rate_lowerr_3[index] for index in indices]),
|
|
#fits.Column(name='ML_RATE_LOWERR_4', format='E', unit='cts/s', array=[ml_rate_lowerr_4[index] for index in indices]),
|
|
|
|
#fits.Column(name='ML_RATE_UPERR', format='E', unit='cts/s', array=[ml_rate_uperr_0[index] for index in indices]),
|
|
#fits.Column(name='ML_RATE_UPERR_1', format='E', unit='cts/s', array=[ml_rate_uperr_1[index] for index in indices]),
|
|
#fits.Column(name='ML_RATE_UPERR_2', format='E', unit='cts/s', array=[ml_rate_uperr_2[index] for index in indices]),
|
|
#fits.Column(name='ML_RATE_UPERR_3', format='E', unit='cts/s', array=[ml_rate_uperr_3[index] for index in indices]),
|
|
#fits.Column(name='ML_RATE_UPERR_4', format='E', unit='cts/s', array=[ml_rate_uperr_4[index] for index in indices]),
|
|
|
|
fits.Column(name='ML_CTS_1', format='E', unit='cts', array=[ml_cts_1[index] for index in indices]),
|
|
fits.Column(name='ML_CTS_2', format='E', unit='cts', array=[ml_cts_2[index] for index in indices]),
|
|
fits.Column(name='ML_CTS_3', format='E', unit='cts', array=[ml_cts_3[index] for index in indices]),
|
|
fits.Column(name='ML_CTS_4', format='E', unit='cts', array=[ml_cts_4[index] for index in indices]),
|
|
|
|
fits.Column(name='ML_CTS_ERR_1', format='E', unit='cts', array=[ml_cts_err_1[index] for index in indices]),
|
|
fits.Column(name='ML_CTS_ERR_2', format='E', unit='cts', array=[ml_cts_err_2[index] for index in indices]),
|
|
fits.Column(name='ML_CTS_ERR_3', format='E', unit='cts', array=[ml_cts_err_3[index] for index in indices]),
|
|
fits.Column(name='ML_CTS_ERR_4', format='E', unit='cts', array=[ml_cts_err_4[index] for index in indices]),
|
|
|
|
fits.Column(name='ML_FLUX_1', format='E', unit='erg/cm**2/s', array=[ml_flux_1[index] for index in indices]),
|
|
fits.Column(name='ML_FLUX_2', format='E', unit='erg/cm**2/s', array=[ml_flux_2[index] for index in indices]),
|
|
fits.Column(name='ML_FLUX_3', format='E', unit='erg/cm**2/s', array=[ml_flux_3[index] for index in indices]),
|
|
fits.Column(name='ML_FLUX_4', format='E', unit='erg/cm**2/s', array=[ml_flux_4[index] for index in indices]),
|
|
|
|
fits.Column(name='ML_FLUX_ERR_1', format='E', unit='erg/cm**2/s', array=[ml_flux_err_1[index] for index in indices]),
|
|
fits.Column(name='ML_FLUX_ERR_2', format='E', unit='erg/cm**2/s', array=[ml_flux_err_2[index] for index in indices]),
|
|
fits.Column(name='ML_FLUX_ERR_3', format='E', unit='erg/cm**2/s', array=[ml_flux_err_3[index] for index in indices]),
|
|
fits.Column(name='ML_FLUX_ERR_4', format='E', unit='erg/cm**2/s', array=[ml_flux_err_4[index] for index in indices]),
|
|
|
|
fits.Column(name='ML_EXP_1', format='E', unit='s', array=[ml_exp_1[index] for index in indices]),
|
|
fits.Column(name='ML_EXP_2', format='E', unit='s', array=[ml_exp_2[index] for index in indices]),
|
|
fits.Column(name='ML_EXP_3', format='E', unit='s', array=[ml_exp_3[index] for index in indices]),
|
|
fits.Column(name='ML_EXP_4', format='E', unit='s', array=[ml_exp_4[index] for index in indices]),
|
|
|
|
fits.Column(name='ML_BKG_1', format='E', unit='cts/arcmin**2', array=[ml_bkg_1[index] for index in indices]),
|
|
fits.Column(name='ML_BKG_2', format='E', unit='cts/arcmin**2', array=[ml_bkg_2[index] for index in indices]),
|
|
fits.Column(name='ML_BKG_3', format='E', unit='cts/arcmin**2', array=[ml_bkg_3[index] for index in indices]),
|
|
fits.Column(name='ML_BKG_4', format='E', unit='cts/arcmin**2', array=[ml_bkg_4[index] for index in indices]),
|
|
|
|
fits.Column(name='APE_CTS_0', format='I', unit='count', array=[ape_cts_0[index] for index in indices]),
|
|
fits.Column(name='APE_CTS_1', format='I', unit='count', array=[ape_cts_1[index] for index in indices]),
|
|
fits.Column(name='APE_CTS_2', format='I', unit='count', array=[ape_cts_2[index] for index in indices]),
|
|
fits.Column(name='APE_CTS_3', format='I', unit='count', array=[ape_cts_3[index] for index in indices]),
|
|
fits.Column(name='APE_CTS_4', format='I', unit='count', array=[ape_cts_4[index] for index in indices]),
|
|
|
|
fits.Column(name='APE_EXP_0', format='E', unit='s', array=[ape_exp_0[index] for index in indices]),
|
|
fits.Column(name='APE_EXP_1', format='E', unit='s', array=[ape_exp_1[index] for index in indices]),
|
|
fits.Column(name='APE_EXP_2', format='E', unit='s', array=[ape_exp_2[index] for index in indices]),
|
|
fits.Column(name='APE_EXP_3', format='E', unit='s', array=[ape_exp_3[index] for index in indices]),
|
|
fits.Column(name='APE_EXP_4', format='E', unit='s', array=[ape_exp_4[index] for index in indices]),
|
|
|
|
fits.Column(name='APE_BKG_0', format='E', unit='count', array=[ape_bkg_0[index] for index in indices]),
|
|
fits.Column(name='APE_BKG_1', format='E', unit='count', array=[ape_bkg_1[index] for index in indices]),
|
|
fits.Column(name='APE_BKG_2', format='E', unit='count', array=[ape_bkg_2[index] for index in indices]),
|
|
fits.Column(name='APE_BKG_3', format='E', unit='count', array=[ape_bkg_3[index] for index in indices]),
|
|
fits.Column(name='APE_BKG_4', format='E', unit='count', array=[ape_bkg_4[index] for index in indices]),
|
|
|
|
fits.Column(name='APE_RADIUS_0', format='E', unit='pixel', array=[ape_rad_0[index] for index in indices]),
|
|
fits.Column(name='APE_RADIUS_1', format='E', unit='pixel', array=[ape_rad_1[index] for index in indices]),
|
|
fits.Column(name='APE_RADIUS_2', format='E', unit='pixel', array=[ape_rad_2[index] for index in indices]),
|
|
fits.Column(name='APE_RADIUS_3', format='E', unit='pixel', array=[ape_rad_3[index] for index in indices]),
|
|
fits.Column(name='APE_RADIUS_4', format='E', unit='pixel', array=[ape_rad_4[index] for index in indices]),
|
|
|
|
fits.Column(name='APE_POIS_0', format='E', unit='', array=[ape_pois_0[index] for index in indices]),
|
|
fits.Column(name='APE_POIS_1', format='E', unit='', array=[ape_pois_1[index] for index in indices]),
|
|
fits.Column(name='APE_POIS_2', format='E', unit='', array=[ape_pois_2[index] for index in indices]),
|
|
fits.Column(name='APE_POIS_3', format='E', unit='', array=[ape_pois_3[index] for index in indices]),
|
|
fits.Column(name='APE_POIS_4', format='E', unit='', array=[ape_pois_4[index] for index in indices]),
|
|
])
|
|
|
|
hdu = fits.BinTableHDU.from_columns(coldefs, name='CATALOG')
|
|
hdu.header['MISSION'] = ('SRG', 'SPECTRUM-RG')
|
|
hdu.header['TELESCOP'] = ('eROSITA', '')
|
|
hdu.header['INSTITUT'] = ('IKI', 'Affiliation')
|
|
hdu.header['AUTHOR'] = ('Roman Krivonos', 'Responsible person')
|
|
hdu.header['EMAIL'] = ('krivonos@cosmos.ru', 'E-mail')
|
|
#hdu.add_checksum()
|
|
print(hdu.columns)
|
|
hdu.writeto(outfile_fits, overwrite=True)
|
|
|
|
with fits.open(outfile_fits, mode='update') as hdus:
|
|
hdus[1].add_checksum()
|
|
|
|
|
|
|
|
|
|
def get_log_distr(infile=None, index_col=None, field=None, minval=10, maxval=100, nbin=50):
|
|
logbins=np.logspace(np.log10(minval),np.log10(maxval), nbin)
|
|
df = pandas.read_csv(infile, index_col=index_col)
|
|
data = df[field]
|
|
mean = np.mean(data)
|
|
median = np.median(data)
|
|
#hist, edges = np.histogram(array, bins=logbins)
|
|
return data, logbins, mean, median
|
|
|
|
|
|
|
|
|
|
def make_source_name(ra, dec, key='SRGe'):
|
|
""" Makes source name in format JHHMMSS.s+/-DDMMSS based on input RA and Dec. """
|
|
try:
|
|
crd = SkyCoord(ra, dec, frame=FK5(), unit="deg")
|
|
name = '{0} J{1}{2}'.format(key,
|
|
crd.ra.to_string(unit=u.hourangle, sep='', precision=1, pad=True),
|
|
crd.dec.to_string(sep='', precision=0, alwayssign=True, pad=True))
|
|
return(name)
|
|
except BaseException as e:
|
|
print('Failed: '+ str(e))
|
|
return('None')
|
|
|
|
|
|
def add_zero_column(infile, colname, ext=1):
|
|
hdul = fits.open(infile)
|
|
for col in hdul[ext].columns:
|
|
if(col.name == colname):
|
|
print("Column {} already exists in {}".format(colname, infile))
|
|
return
|
|
hdul.close()
|
|
|
|
naxis2=hdul[ext].header['NAXIS2']
|
|
colval = np.zeros((naxis2))
|
|
|
|
table = Table.read(infile, format='fits')
|
|
table.add_column(Column(name=colname, data=colval))
|
|
table.write(infile, format='fits', overwrite='True')
|
|
|
|
def add_specific_columns(infile):
|
|
""" Read forced catalog and the corresponding sensitivity map """
|
|
if(os.path.isfile(infile)==False):
|
|
print("File not found {}".format(infile))
|
|
|
|
add_zero_column(infile, 'ML_RATE_LOWERR')
|
|
add_zero_column(infile, 'ML_RATE_UPERR')
|
|
add_zero_column(infile, 'ML_FLUX_LOWERR')
|
|
add_zero_column(infile, 'ML_FLUX_UPERR')
|
|
|
|
"""
|
|
ermldet: eSASS4EDR фев 11 11:41:20 2022
|
|
ermldet/FGET_BOXLIST: **ERROR2** column SCTS not found
|
|
ermldet/FGET_BOXLIST: **ERROR2** column SCTS_ERR not found
|
|
ermldet/FGET_BOXLIST: **ERROR2** column BOX_CTS not found
|
|
ermldet/FGET_BOXLIST: **ERROR2** column LIKE not found
|
|
ermldet/FGET_BOXLIST: **ERROR2** column BG_MAP not found
|
|
ermldet/FGET_BOXLIST: **ERROR2** column BG_RAW not found
|
|
ermldet/FGET_BOXLIST: **ERROR2** column EXP_MAP not found
|
|
ermldet/FGET_BOXLIST: **ERROR2** column FLUX not found
|
|
ermldet/FGET_BOXLIST: **ERROR2** column FLUX_ERR not found
|
|
ermldet/FGET_BOXLIST: **ERROR2** column RATE not found
|
|
ermldet/FGET_BOXLIST: **ERROR2** column RATE_ERR not found
|
|
ermldet/FGET_BOXLIST: **ERROR2** column HR2_ER not found
|
|
ermldet/FGET_BOXLIST: **ERROR2** column VIGNETTING not found
|
|
ermldet/FGET_BOXLIST: **ERROR2** column BOX_SIZE not found
|
|
ermldet/FGET_BOXLIST: **ERROR2** column EEF not found
|
|
|
|
ermldet: v1.56/2.18 esass_200412 Jul 2 12:04:46 2022
|
|
ermldet/read_tHeader: **WARNING0** keyword not found in header- keyword CHOPDEAD
|
|
ermldet/FGET_MLLIST: **ERROR2** column ML_CTS_LOWERR not found
|
|
ermldet/FGET_MLLIST: **ERROR2** column ML_CTS_UPERR not found
|
|
ermldet/FGET_MLLIST: **ERROR2** column X_IMA_LOWERR not found
|
|
ermldet/FGET_MLLIST: **ERROR2** column X_IMA_UPERR not found
|
|
ermldet/FGET_MLLIST: **ERROR2** column Y_IMA_LOWERR not found
|
|
ermldet/FGET_MLLIST: **ERROR2** column Y_IMA_UPERR not found
|
|
ermldet/FGET_MLLIST: **ERROR2** column EXT_LOWERR not found
|
|
ermldet/FGET_MLLIST: **ERROR2** column EXT_UPERR not found
|
|
ermldet/FGET_MLLIST: **ERROR2** column ML_FLUX_LOWERR not found
|
|
ermldet/FGET_MLLIST: **ERROR2** column ML_FLUX_UPERR not found
|
|
ermldet/FGET_MLLIST: **ERROR2** column ML_RATE_LOWERR not found
|
|
ermldet/FGET_MLLIST: **ERROR2** column ML_RATE_UPERR not found
|
|
ermldet/FGET_MLLIST: **ERROR2** column RA_LOWERR not found
|
|
ermldet/FGET_MLLIST: **ERROR2** column RA_UPERR not found
|
|
ermldet/FGET_MLLIST: **ERROR2** column DEC_LOWERR not found
|
|
ermldet/FGET_MLLIST: **ERROR2** column DEC_UPERR not found
|
|
"""
|
|
def make_final_ds9reg(infile=None, outreg=None, scale=3600):
|
|
with open(infile, 'rb') as f:
|
|
data = pickle.load(f)
|
|
|
|
print()
|
|
with open(outreg, 'w') as writer:
|
|
for rec in data:
|
|
if(rec['forced_name'] != None):
|
|
notes=rec['forced_name']
|
|
else:
|
|
notes=''
|
|
|
|
writer.write("fk5;circle({}, {}, {}) # color=red text={{{} {}}}\n".format(rec['ra'], rec['dec'],
|
|
rec['radec_err']/scale,
|
|
rec['name'],notes))
|
|
|
|
|
|
|
|
def make_extended(infile=None, elmin=5.0, outreg=None, scale=3600):
|
|
with open(infile, 'rb') as f:
|
|
data = pickle.load(f)
|
|
|
|
print()
|
|
with open(outreg, 'w') as writer:
|
|
for rec in data:
|
|
if(rec['forced_name'] != None):
|
|
#text = json.dumps(rec, sort_keys=True, indent=4)
|
|
print("Forced: ", rec['forced_name'])
|
|
ext_like = rec['ext_like']
|
|
if(ext_like>elmin):
|
|
print("{:.4f} {} {} & {:.4f} & {:.4f} & {:.2f} & {:.2f} & {:.2f} $\pm$ {:.2f} \\\\".format(rec['ext_like'],rec['src_id'],
|
|
rec['name'], rec['ra'], rec['dec'],
|
|
rec['det_like'], rec['ext_like'],
|
|
rec['ext'], rec['ext_err']))
|
|
|
|
writer.write("fk5;circle({}, {}, {}) # color=red text={{{} {:.2f}}}\n".format(rec['ra'], rec['dec'],
|
|
rec['ext']/scale,
|
|
rec['src_id'],
|
|
rec['ext_like']))
|
|
def fix_catalog(mllist=None, refimage=None, srcs_remove=None, srcs_add=None):
|
|
"""
|
|
Removes a selection of sources
|
|
|
|
mllist -- catalog of sources after ermldet
|
|
image -- FITS image, used to extract WCS information for X_IMA,Y_IMA calculation
|
|
|
|
Notes -- ermldet translates X_IMA_ERR and Y_IMA_ERR into the final
|
|
RADEC_ERR=3600*SQRT((X_IMA_ERR*CDELT)^2 + (Y_IMA_ERR*CDELT)^2)
|
|
|
|
"""
|
|
outfile=mllist.replace(".fits", ".fixed.fits")
|
|
print("Fix catalog {} --> {}".format(mllist,outfile))
|
|
remove_file(outfile)
|
|
shutil.copyfile(mllist, outfile)
|
|
|
|
""" remove selected false extended sources """
|
|
|
|
if(len(srcs_remove)>0):
|
|
indexes=[]
|
|
table = Table.read(outfile, format='fits')
|
|
maxid=1
|
|
maxcl=1
|
|
print()
|
|
for index, row in enumerate(table):
|
|
if row['ID_SRC'] in srcs_remove:
|
|
print("Remove source ID_SRC={}".format(row['ID_SRC']))
|
|
indexes.append(index)
|
|
if(row['ID_SRC']>maxid):
|
|
maxid=row['ID_SRC']
|
|
if(row['ID_CLUSTER']>maxcl):
|
|
maxcl=row['ID_CLUSTER']
|
|
print()
|
|
table.remove_rows(indexes)
|
|
table.write(outfile, format='fits', overwrite='True')
|
|
|
|
|
|
if(len(srcs_add)>0):
|
|
hdulist = fits.open(refimage)
|
|
w = WCS(hdulist[0].header)
|
|
cdelt=abs(w.wcs.cdelt[0])
|
|
|
|
""" add sources """
|
|
table = Table.read(outfile, format='fits')
|
|
dt = table.copy()
|
|
|
|
|
|
""" test two methods of conversion world --> pix """
|
|
"""
|
|
for d in dt:
|
|
src_crd = SkyCoord(d['RA'], d['DEC'], frame=FK5(), unit="deg")
|
|
x_ima, y_ima = w.world_to_pixel(src_crd)
|
|
pix = w.wcs_world2pix([(d['RA'], d['DEC']),], 1)
|
|
print(x_ima,pix[0][0],d['X_IMA'],y_ima,pix[0][1],d['Y_IMA'])
|
|
sys.exit()
|
|
"""
|
|
|
|
row0 = dt[0]
|
|
row0['BOX_ID_SRC']=9999
|
|
print()
|
|
for key in srcs_add.keys():
|
|
maxid=maxid+1
|
|
maxcl=maxcl+1
|
|
|
|
print("Add source {} --> ID_SRC={}".format(key,maxid))
|
|
srcs_add[key].append(maxid)
|
|
ra=srcs_add[key][0]
|
|
dec=srcs_add[key][1]
|
|
radec_err=srcs_add[key][2]
|
|
pix = w.wcs_world2pix([(ra, dec),], 1)
|
|
x_ima=pix[0][0]
|
|
y_ima=pix[0][1]
|
|
|
|
row0['ID_SRC']=maxid
|
|
row0['ID_CLUSTER']=maxcl
|
|
row0['RA']=ra
|
|
row0['DEC']=dec
|
|
row0['RA_LOWERR']=radec_err
|
|
row0['RA_UPERR']=radec_err
|
|
row0['DEC_LOWERR']=radec_err
|
|
row0['DEC_UPERR']=radec_err
|
|
row0['RADEC_ERR']=radec_err # doesn't work. This field will be overwritten by ermldet (see Notes above).
|
|
row0['X_IMA']=x_ima
|
|
row0['X_IMA_ERR']=(radec_err/3600)/cdelt/np.sqrt(2)
|
|
row0['Y_IMA']=y_ima
|
|
row0['Y_IMA_ERR']=(radec_err/3600)/cdelt/np.sqrt(2)
|
|
row0['DET_LIKE']=0.0
|
|
row0['EXT']=0.0
|
|
row0['EXT_LIKE']=0.0
|
|
|
|
row0['ID_INST']=0
|
|
row0['ID_BAND']=0
|
|
table.add_row(row0)
|
|
|
|
row0['ID_INST']=1
|
|
row0['ID_BAND']=0
|
|
table.add_row(row0)
|
|
|
|
row0['ID_INST']=1
|
|
row0['ID_BAND']=1
|
|
table.add_row(row0)
|
|
|
|
print()
|
|
print(srcs_add)
|
|
table.write(outfile, format='fits', overwrite='True')
|
|
|
|
|
|
def fix_xmm_sources(mllist=None, refimage=None, xmm_catalog=None):
|
|
"""
|
|
Makes catalog for XMM forced
|
|
|
|
mllist -- catalog of sources after ermldet
|
|
image -- FITS image, used to extract WCS information for X_IMA,Y_IMA calculation
|
|
|
|
Notes -- ermldet translates X_IMA_ERR and Y_IMA_ERR into the final
|
|
RADEC_ERR=3600*SQRT((X_IMA_ERR*CDELT)^2 + (Y_IMA_ERR*CDELT)^2)
|
|
|
|
"""
|
|
outfile=mllist.replace(".fits", ".fixed-xmm.fits")
|
|
print("Fix catalog {} --> {}".format(mllist,outfile))
|
|
remove_file(outfile)
|
|
shutil.copyfile(mllist, outfile)
|
|
|
|
""" remove selected false extended sources """
|
|
|
|
table = Table.read(outfile, format='fits')
|
|
dt = table.copy()
|
|
row0 = dt[0]
|
|
row0['BOX_ID_SRC']=9999
|
|
|
|
indexes=[]
|
|
print()
|
|
for index, row in enumerate(table):
|
|
indexes.append(index)
|
|
table.remove_rows(indexes)
|
|
table.write(outfile, format='fits', overwrite='True')
|
|
|
|
|
|
|
|
hdulist = fits.open(refimage)
|
|
w = WCS(hdulist[0].header)
|
|
cdelt=abs(w.wcs.cdelt[0])
|
|
|
|
""" add sources """
|
|
table = Table.read(outfile, format='fits')
|
|
dt = table.copy()
|
|
|
|
|
|
xmm_table = Table.read(xmm_catalog, format='fits')
|
|
srcs_xmm={}
|
|
print()
|
|
maxid=0
|
|
maxcl=0
|
|
for index, row in enumerate(xmm_table):
|
|
key=row['IAUNAME']
|
|
maxid=maxid+1
|
|
maxcl=maxcl+1
|
|
dr12srcid = row['SRCID']
|
|
print("Add source {} --> ID_SRC={}, DR12={}".format(key,maxid,dr12srcid))
|
|
|
|
ra=row['SC_RA']
|
|
dec=row['SC_DEC']
|
|
radec_err=row['SC_POSERR']
|
|
srcs_xmm[key]=[ra, dec, radec_err, maxid, dr12srcid]
|
|
""" Unfortunatelly eSASS does not support 64-bit integers, so we cannot use DR12SRCID as ID_SRC in eSASS """
|
|
|
|
pix = w.wcs_world2pix([(ra, dec),], 1)
|
|
x_ima=pix[0][0]
|
|
y_ima=pix[0][1]
|
|
|
|
row0['ID_SRC']=maxid
|
|
row0['ID_CLUSTER']=maxcl
|
|
row0['RA']=ra
|
|
row0['DEC']=dec
|
|
row0['RA_LOWERR']=radec_err
|
|
row0['RA_UPERR']=radec_err
|
|
row0['DEC_LOWERR']=radec_err
|
|
row0['DEC_UPERR']=radec_err
|
|
row0['RADEC_ERR']=radec_err # doesn't work. This field will be overwritten by ermldet (see Notes above).
|
|
row0['X_IMA']=x_ima
|
|
row0['X_IMA_ERR']=(radec_err/3600)/cdelt/np.sqrt(2)
|
|
row0['Y_IMA']=y_ima
|
|
row0['Y_IMA_ERR']=(radec_err/3600)/cdelt/np.sqrt(2)
|
|
row0['DET_LIKE']=0.0
|
|
row0['EXT']=0.0
|
|
row0['EXT_LIKE']=0.0
|
|
|
|
row0['ID_INST']=0
|
|
row0['ID_BAND']=0
|
|
table.add_row(row0)
|
|
|
|
row0['ID_INST']=1
|
|
row0['ID_BAND']=0
|
|
table.add_row(row0)
|
|
|
|
row0['ID_INST']=1
|
|
row0['ID_BAND']=1
|
|
table.add_row(row0)
|
|
|
|
print()
|
|
|
|
table.write(outfile, format='fits', overwrite='True')
|
|
|
|
with open(mllist.replace(".fits", ".fixed-xmm.pickle"), 'wb') as f:
|
|
pickle.dump(srcs_xmm, f)
|
|
|
|
|
|
def make_xmm_catalog(infile_en00cat=None,
|
|
infile_en01cat=None,
|
|
infile_en02cat=None,
|
|
infile_en03cat=None,
|
|
infile_en06cat=None,
|
|
forced_xmm_sources=None,
|
|
outfile=None,expcut=0):
|
|
|
|
|
|
""" read forced catalogs and corresponding sensitivity maps """
|
|
en00cat=read_forced_catalog(infile_en00cat)
|
|
en01cat=read_forced_catalog(infile_en01cat)
|
|
en02cat=read_forced_catalog(infile_en02cat)
|
|
en03cat=read_forced_catalog(infile_en03cat)
|
|
en06cat=read_forced_catalog(infile_en06cat)
|
|
|
|
with open(forced_xmm_sources, 'rb') as f:
|
|
srcs_forced = pickle.load(f)
|
|
|
|
|
|
catsel=[]
|
|
selected_count=0
|
|
for key in en00cat.keys():
|
|
""" Go through forced en0 catalog """
|
|
if not (en00cat[key]['ape_exp']>expcut):
|
|
continue
|
|
|
|
ikey=int(key.replace('ML',''))
|
|
ra = en00cat[key]['ra']
|
|
dec = en00cat[key]['dec']
|
|
src_crd = SkyCoord(ra, dec, frame=FK5(), unit="deg")
|
|
|
|
|
|
""" find the corresponding 4XMM name """
|
|
forced_name=None
|
|
forced_srcid=None
|
|
for sfkey in srcs_forced.keys():
|
|
if(srcs_forced[sfkey][3] == ikey):
|
|
forced_name=sfkey
|
|
forced_srcid=srcs_forced[sfkey][4]
|
|
|
|
|
|
catsel.append({'name':forced_name,
|
|
'dr12srcid':forced_srcid, # 64-bit integer!
|
|
'ra':ra, 'dec':dec, 'radec_err':en00cat[key]['radec_err'],
|
|
|
|
'det_like':en00cat[key]['det_like'],
|
|
|
|
'src_id':ikey,
|
|
'ml_bkg':en00cat[key]['ml_bkg'],
|
|
'ml_exp':en00cat[key]['ml_exp'],
|
|
|
|
'ml_cts': en00cat[key]['ml_cts'],
|
|
'ml_cts_err': en00cat[key]['ml_cts_err'],
|
|
'ml_cts_lowerr': en00cat[key]['ml_cts_lowerr'],
|
|
'ml_cts_uperr': en00cat[key]['ml_cts_uperr'],
|
|
|
|
'ml_rate': en00cat[key]['ml_rate'],
|
|
'ml_rate_err': en00cat[key]['ml_rate_err'],
|
|
'ml_rate_lowerr': en00cat[key]['ml_rate_lowerr'],
|
|
'ml_rate_uperr': en00cat[key]['ml_rate_uperr'],
|
|
|
|
'ml_flux': en00cat[key]['ml_flux'],
|
|
'ml_flux_err': en00cat[key]['ml_flux_err'],
|
|
'ml_flux_lowerr': en00cat[key]['ml_flux_lowerr'],
|
|
'ml_flux_uperr': en00cat[key]['ml_flux_uperr'],
|
|
|
|
'ape_cts': en00cat[key]['ape_cts'],
|
|
'ape_bkg': en00cat[key]['ape_bkg'],
|
|
'ape_exp': en00cat[key]['ape_exp'],
|
|
'ape_radius': en00cat[key]['ape_radius'],
|
|
'ape_pois': en00cat[key]['ape_pois'],
|
|
|
|
'en01_dl':en01cat[key]['det_like'] if(key in en01cat) else None,
|
|
'en02_dl':en02cat[key]['det_like'] if(key in en02cat) else None,
|
|
'en03_dl':en03cat[key]['det_like'] if(key in en03cat) else None,
|
|
'en06_dl':en06cat[key]['det_like'] if(key in en06cat) else None,
|
|
|
|
'en01_ml_rate':en01cat[key]['ml_rate'] if(key in en01cat) else None,
|
|
'en02_ml_rate':en02cat[key]['ml_rate'] if(key in en02cat) else None,
|
|
'en03_ml_rate':en03cat[key]['ml_rate'] if(key in en03cat) else None,
|
|
'en06_ml_rate':en06cat[key]['ml_rate'] if(key in en06cat) else None,
|
|
|
|
'en01_ml_rate_err':en01cat[key]['ml_rate_err'] if(key in en01cat) else None,
|
|
'en02_ml_rate_err':en02cat[key]['ml_rate_err'] if(key in en02cat) else None,
|
|
'en03_ml_rate_err':en03cat[key]['ml_rate_err'] if(key in en03cat) else None,
|
|
'en06_ml_rate_err':en06cat[key]['ml_rate_err'] if(key in en06cat) else None,
|
|
|
|
'en01_ml_cts':en01cat[key]['ml_cts'] if(key in en01cat) else None,
|
|
'en02_ml_cts':en02cat[key]['ml_cts'] if(key in en02cat) else None,
|
|
'en03_ml_cts':en03cat[key]['ml_cts'] if(key in en03cat) else None,
|
|
'en06_ml_cts':en06cat[key]['ml_cts'] if(key in en06cat) else None,
|
|
|
|
'en01_ml_cts_err':en01cat[key]['ml_cts_err'] if(key in en01cat) else None,
|
|
'en02_ml_cts_err':en02cat[key]['ml_cts_err'] if(key in en02cat) else None,
|
|
'en03_ml_cts_err':en03cat[key]['ml_cts_err'] if(key in en03cat) else None,
|
|
'en06_ml_cts_err':en06cat[key]['ml_cts_err'] if(key in en06cat) else None,
|
|
|
|
'en01_flux':en01cat[key]['ml_flux'] if(key in en01cat) else None,
|
|
'en02_flux':en02cat[key]['ml_flux'] if(key in en02cat) else None,
|
|
'en03_flux':en03cat[key]['ml_flux'] if(key in en03cat) else None,
|
|
'en06_flux':en06cat[key]['ml_flux'] if(key in en06cat) else None,
|
|
|
|
'en01_flux_err':en01cat[key]['ml_flux_err'] if(key in en01cat) else None,
|
|
'en02_flux_err':en02cat[key]['ml_flux_err'] if(key in en02cat) else None,
|
|
'en03_flux_err':en03cat[key]['ml_flux_err'] if(key in en03cat) else None,
|
|
'en06_flux_err':en06cat[key]['ml_flux_err'] if(key in en06cat) else None,
|
|
|
|
'en01_flux_lowerr':en01cat[key]['ml_flux_lowerr'] if(key in en01cat) else None,
|
|
'en02_flux_lowerr':en02cat[key]['ml_flux_lowerr'] if(key in en02cat) else None,
|
|
'en03_flux_lowerr':en03cat[key]['ml_flux_lowerr'] if(key in en03cat) else None,
|
|
'en06_flux_lowerr':en06cat[key]['ml_flux_lowerr'] if(key in en06cat) else None,
|
|
|
|
'en01_flux_uperr':en01cat[key]['ml_flux_uperr'] if(key in en01cat) else None,
|
|
'en02_flux_uperr':en02cat[key]['ml_flux_uperr'] if(key in en02cat) else None,
|
|
'en03_flux_uperr':en03cat[key]['ml_flux_uperr'] if(key in en03cat) else None,
|
|
'en06_flux_uperr':en06cat[key]['ml_flux_uperr'] if(key in en06cat) else None,
|
|
|
|
'en01_ml_exp':en01cat[key]['ml_exp'] if(key in en01cat) else None,
|
|
'en02_ml_exp':en02cat[key]['ml_exp'] if(key in en02cat) else None,
|
|
'en03_ml_exp':en03cat[key]['ml_exp'] if(key in en03cat) else None,
|
|
'en06_ml_exp':en06cat[key]['ml_exp'] if(key in en06cat) else None,
|
|
|
|
'en01_ml_bkg':en01cat[key]['ml_bkg'] if(key in en01cat) else None,
|
|
'en02_ml_bkg':en02cat[key]['ml_bkg'] if(key in en02cat) else None,
|
|
'en03_ml_bkg':en03cat[key]['ml_bkg'] if(key in en03cat) else None,
|
|
'en06_ml_bkg':en06cat[key]['ml_bkg'] if(key in en06cat) else None,
|
|
|
|
'en01_ape_cts':en01cat[key]['ape_cts'] if(key in en01cat) else None,
|
|
'en02_ape_cts':en02cat[key]['ape_cts'] if(key in en02cat) else None,
|
|
'en03_ape_cts':en03cat[key]['ape_cts'] if(key in en03cat) else None,
|
|
'en06_ape_cts':en06cat[key]['ape_cts'] if(key in en06cat) else None,
|
|
|
|
'en01_ape_exp':en01cat[key]['ape_exp'] if(key in en01cat) else None,
|
|
'en02_ape_exp':en02cat[key]['ape_exp'] if(key in en02cat) else None,
|
|
'en03_ape_exp':en03cat[key]['ape_exp'] if(key in en03cat) else None,
|
|
'en06_ape_exp':en06cat[key]['ape_exp'] if(key in en06cat) else None,
|
|
|
|
'en01_ape_bkg':en01cat[key]['ape_bkg'] if(key in en01cat) else None,
|
|
'en02_ape_bkg':en02cat[key]['ape_bkg'] if(key in en02cat) else None,
|
|
'en03_ape_bkg':en03cat[key]['ape_bkg'] if(key in en03cat) else None,
|
|
'en06_ape_bkg':en06cat[key]['ape_bkg'] if(key in en06cat) else None,
|
|
|
|
'en01_ape_radius':en01cat[key]['ape_radius'] if(key in en01cat) else None,
|
|
'en02_ape_radius':en02cat[key]['ape_radius'] if(key in en02cat) else None,
|
|
'en03_ape_radius':en03cat[key]['ape_radius'] if(key in en03cat) else None,
|
|
'en06_ape_radius':en06cat[key]['ape_radius'] if(key in en06cat) else None,
|
|
|
|
'en01_ape_pois':en01cat[key]['ape_pois'] if(key in en01cat) else None,
|
|
'en02_ape_pois':en02cat[key]['ape_pois'] if(key in en02cat) else None,
|
|
'en03_ape_pois':en03cat[key]['ape_pois'] if(key in en03cat) else None,
|
|
'en06_ape_pois':en06cat[key]['ape_pois'] if(key in en06cat) else None,
|
|
},
|
|
)
|
|
|
|
selected_count = selected_count + 1
|
|
|
|
print("selected={} at expcut={}".format(selected_count,expcut))
|
|
with open(outfile, 'wb') as f:
|
|
pickle.dump(catsel, f)
|
|
|
|
|
|
def final_xmm_catalog(infile=None,outfile_fits=None,expcut=100):
|
|
with open(infile, 'rb') as f:
|
|
table = pickle.load(f)
|
|
name = []
|
|
dr12srcid = []
|
|
ra = []
|
|
dec = []
|
|
radec_err = []
|
|
|
|
det_like_0 = []
|
|
det_like_1 = []
|
|
det_like_2 = []
|
|
det_like_3 = []
|
|
det_like_4 = []
|
|
|
|
ml_rate_0 = []
|
|
ml_rate_1 = []
|
|
ml_rate_2 = []
|
|
ml_rate_3 = []
|
|
ml_rate_4 = []
|
|
|
|
ml_rate_err_0 = []
|
|
ml_rate_err_1 = []
|
|
ml_rate_err_2 = []
|
|
ml_rate_err_3 = []
|
|
ml_rate_err_4 = []
|
|
|
|
ml_cts_0 = []
|
|
ml_cts_1 = []
|
|
ml_cts_2 = []
|
|
ml_cts_3 = []
|
|
ml_cts_4 = []
|
|
|
|
ml_cts_err_0 = []
|
|
ml_cts_err_1 = []
|
|
ml_cts_err_2 = []
|
|
ml_cts_err_3 = []
|
|
ml_cts_err_4 = []
|
|
|
|
ml_flux_0 = []
|
|
ml_flux_1 = []
|
|
ml_flux_2 = []
|
|
ml_flux_3 = []
|
|
ml_flux_4 = []
|
|
|
|
ml_flux_err_0 = []
|
|
ml_flux_err_1 = []
|
|
ml_flux_err_2 = []
|
|
ml_flux_err_3 = []
|
|
ml_flux_err_4 = []
|
|
|
|
ml_exp_0 = []
|
|
ml_exp_1 = []
|
|
ml_exp_2 = []
|
|
ml_exp_3 = []
|
|
ml_exp_4 = []
|
|
|
|
ml_bkg_0 = []
|
|
ml_bkg_1 = []
|
|
ml_bkg_2 = []
|
|
ml_bkg_3 = []
|
|
ml_bkg_4 = []
|
|
|
|
ape_cts_0 = []
|
|
ape_cts_1 = []
|
|
ape_cts_2 = []
|
|
ape_cts_3 = []
|
|
ape_cts_4 = []
|
|
|
|
ape_exp_0 = []
|
|
ape_exp_1 = []
|
|
ape_exp_2 = []
|
|
ape_exp_3 = []
|
|
ape_exp_4 = []
|
|
|
|
ape_bkg_0 = []
|
|
ape_bkg_1 = []
|
|
ape_bkg_2 = []
|
|
ape_bkg_3 = []
|
|
ape_bkg_4 = []
|
|
|
|
ape_rad_0 = []
|
|
ape_rad_1 = []
|
|
ape_rad_2 = []
|
|
ape_rad_3 = []
|
|
ape_rad_4 = []
|
|
|
|
ape_pois_0 = []
|
|
ape_pois_1 = []
|
|
ape_pois_2 = []
|
|
ape_pois_3 = []
|
|
ape_pois_4 = []
|
|
|
|
confusion = []
|
|
|
|
table0=[] # temporal NAN-free table
|
|
for rec in table:
|
|
if not (np.isnan(rec['ml_flux'])):
|
|
table0.append(rec)
|
|
""" Sort by flux """
|
|
indices = sorted(
|
|
range(len(table0)),
|
|
key=lambda index: table0[index]['ml_flux'], reverse=True
|
|
)
|
|
|
|
table_sorted=[table0[index] for index in indices]
|
|
|
|
|
|
""" identify confused sources """
|
|
conf = {}
|
|
conf_flux1=2e-14
|
|
conf_rad1 = 60.0
|
|
conf_flux2=9e-13
|
|
conf_rad2 = 120.0
|
|
test=False
|
|
print("Start confusion lookup")
|
|
manual_conf=['4XMM J021831.8-041403', '4XMM J021833.6-053759', '4XMM J021927.4-040653', '4XMM J022148.7-035620']
|
|
for rec1 in table_sorted:
|
|
if (rec1['ape_exp'] > expcut):
|
|
key1 = rec1['dr12srcid']
|
|
s1_crd = SkyCoord(rec1['ra'], rec1['dec'], frame=FK5(), unit="deg")
|
|
|
|
if(rec1['name'] in manual_conf):
|
|
print("MANUAL confusion {} flux={}".format(rec1['name'],rec1['ml_flux']))
|
|
conf[key1] = True
|
|
continue
|
|
for rec2 in table_sorted:
|
|
key2 = rec2['dr12srcid']
|
|
if (rec2['ape_exp'] > expcut and rec2['det_like']>10 and rec1['ml_flux'] < rec2['ml_flux'] and not key2 in conf.keys()):
|
|
if(key1 == key2):
|
|
continue
|
|
if(rec2['ml_flux'] > conf_flux1):
|
|
s2_crd = SkyCoord(rec2['ra'], rec2['dec'], frame=FK5(), unit="deg")
|
|
sep = s1_crd.separation(s2_crd).arcsec
|
|
if(sep < conf_rad1):
|
|
conf[key1] = True
|
|
""" one shot is enough """
|
|
break
|
|
""" take very bright sources as special case """
|
|
if(rec2['ml_flux'] > conf_flux2):
|
|
s2_crd = SkyCoord(rec2['ra'], rec2['dec'], frame=FK5(), unit="deg")
|
|
sep = s1_crd.separation(s2_crd).arcsec
|
|
if(sep < conf_rad2):
|
|
conf[key1] = True
|
|
""" one shot is enough """
|
|
break
|
|
|
|
|
|
print("Stop confusion lookup")
|
|
|
|
for rec in table:
|
|
if (rec['ape_exp'] < expcut):
|
|
continue
|
|
name.append(rec['name'])
|
|
dr12srcid.append(rec['dr12srcid'])
|
|
ra.append(rec['ra'])
|
|
dec.append(rec['dec'])
|
|
radec_err.append(rec['radec_err'])
|
|
|
|
if(rec['dr12srcid'] in conf.keys()):
|
|
confusion.append(1)
|
|
else:
|
|
confusion.append(0)
|
|
|
|
|
|
det_like_0.append(rec['det_like'])
|
|
det_like_1.append(rec['en01_dl'])
|
|
det_like_2.append(rec['en02_dl'])
|
|
det_like_3.append(rec['en03_dl'])
|
|
det_like_4.append(rec['en06_dl'])
|
|
|
|
ml_rate_0.append(rec['ml_rate'])
|
|
ml_rate_1.append(rec['en01_ml_rate'])
|
|
ml_rate_2.append(rec['en02_ml_rate'])
|
|
ml_rate_3.append(rec['en03_ml_rate'])
|
|
ml_rate_4.append(rec['en06_ml_rate'])
|
|
|
|
ml_rate_err_0.append(rec['ml_rate_err'])
|
|
ml_rate_err_1.append(rec['en01_ml_rate_err'])
|
|
ml_rate_err_2.append(rec['en02_ml_rate_err'])
|
|
ml_rate_err_3.append(rec['en03_ml_rate_err'])
|
|
ml_rate_err_4.append(rec['en06_ml_rate_err'])
|
|
|
|
ml_cts_0.append(rec['ml_cts'])
|
|
ml_cts_1.append(rec['en01_ml_cts'])
|
|
ml_cts_2.append(rec['en02_ml_cts'])
|
|
ml_cts_3.append(rec['en03_ml_cts'])
|
|
ml_cts_4.append(rec['en06_ml_cts'])
|
|
|
|
ml_cts_err_0.append(rec['ml_cts_err'])
|
|
ml_cts_err_1.append(rec['en01_ml_cts_err'])
|
|
ml_cts_err_2.append(rec['en02_ml_cts_err'])
|
|
ml_cts_err_3.append(rec['en03_ml_cts_err'])
|
|
ml_cts_err_4.append(rec['en06_ml_cts_err'])
|
|
|
|
ml_flux_0.append(rec['ml_flux'])
|
|
ml_flux_1.append(rec['en01_flux'])
|
|
ml_flux_2.append(rec['en02_flux'])
|
|
ml_flux_3.append(rec['en03_flux'])
|
|
ml_flux_4.append(rec['en06_flux'])
|
|
|
|
ml_flux_err_0.append(rec['ml_flux_err'])
|
|
ml_flux_err_1.append(rec['en01_flux_err'])
|
|
ml_flux_err_2.append(rec['en02_flux_err'])
|
|
ml_flux_err_3.append(rec['en03_flux_err'])
|
|
ml_flux_err_4.append(rec['en06_flux_err'])
|
|
|
|
ml_exp_0.append(rec['ml_exp'])
|
|
ml_exp_1.append(rec['en01_ml_exp'])
|
|
ml_exp_2.append(rec['en02_ml_exp'])
|
|
ml_exp_3.append(rec['en03_ml_exp'])
|
|
ml_exp_4.append(rec['en06_ml_exp'])
|
|
|
|
ml_bkg_0.append(rec['ml_bkg'])
|
|
ml_bkg_1.append(rec['en01_ml_bkg'])
|
|
ml_bkg_2.append(rec['en02_ml_bkg'])
|
|
ml_bkg_3.append(rec['en03_ml_bkg'])
|
|
ml_bkg_4.append(rec['en06_ml_bkg'])
|
|
|
|
ape_cts_0.append(rec['ape_cts'])
|
|
ape_cts_1.append(rec['en01_ape_cts'])
|
|
ape_cts_2.append(rec['en02_ape_cts'])
|
|
ape_cts_3.append(rec['en03_ape_cts'])
|
|
ape_cts_4.append(rec['en06_ape_cts'])
|
|
|
|
ape_exp_0.append(rec['ape_exp'])
|
|
ape_exp_1.append(rec['en01_ape_exp'])
|
|
ape_exp_2.append(rec['en02_ape_exp'])
|
|
ape_exp_3.append(rec['en03_ape_exp'])
|
|
ape_exp_4.append(rec['en06_ape_exp'])
|
|
|
|
ape_bkg_0.append(rec['ape_bkg'])
|
|
ape_bkg_1.append(rec['en01_ape_bkg'])
|
|
ape_bkg_2.append(rec['en02_ape_bkg'])
|
|
ape_bkg_3.append(rec['en03_ape_bkg'])
|
|
ape_bkg_4.append(rec['en06_ape_bkg'])
|
|
|
|
ape_rad_0.append(rec['ape_radius'])
|
|
ape_rad_1.append(rec['en01_ape_radius'])
|
|
ape_rad_2.append(rec['en02_ape_radius'])
|
|
ape_rad_3.append(rec['en03_ape_radius'])
|
|
ape_rad_4.append(rec['en06_ape_radius'])
|
|
|
|
ape_pois_0.append(rec['ape_pois'] if(rec['ape_pois']>0.0) else None)
|
|
ape_pois_1.append(rec['en01_ape_pois'] if(rec['en01_ape_pois']>0.0) else None)
|
|
ape_pois_2.append(rec['en02_ape_pois'] if(rec['en02_ape_pois']>0.0) else None)
|
|
ape_pois_3.append(rec['en03_ape_pois'] if(rec['en03_ape_pois']>0.0) else None)
|
|
ape_pois_4.append(rec['en06_ape_pois'] if(rec['en06_ape_pois']>0.0) else None)
|
|
|
|
""" Sort output catalogue by RA """
|
|
indices = sorted(
|
|
range(len(ra)),
|
|
key=lambda index: ra[index]
|
|
)
|
|
|
|
coldefs = fits.ColDefs([
|
|
fits.Column(name='DR12_IAU_NAME', format='21A', array=[name[index] for index in indices]),
|
|
fits.Column(name='DR12_SRCID', format='K', array=[dr12srcid[index] for index in indices]),
|
|
fits.Column(name='DR12_RA', format='D', unit='deg', array=[ra[index] for index in indices]),
|
|
fits.Column(name='DR12_DEC', format='D', unit='deg', array=[dec[index] for index in indices]),
|
|
fits.Column(name='DR12_RADEC_ERR', format='E', unit='arcsec', array=[radec_err[index] for index in indices]),
|
|
|
|
fits.Column(name='CONF', format='I', unit='', array=[confusion[index] for index in indices]),
|
|
|
|
fits.Column(name='DET_LIKE_0', format='E', unit='', array=[det_like_0[index] for index in indices]),
|
|
fits.Column(name='DET_LIKE_1', format='E', unit='', array=[det_like_1[index] for index in indices]),
|
|
fits.Column(name='DET_LIKE_2', format='E', unit='', array=[det_like_2[index] for index in indices]),
|
|
fits.Column(name='DET_LIKE_3', format='E', unit='', array=[det_like_3[index] for index in indices]),
|
|
fits.Column(name='DET_LIKE_4', format='E', unit='', array=[det_like_4[index] for index in indices]),
|
|
|
|
fits.Column(name='ML_RATE_0', format='E', unit='cts/s', array=[ml_rate_0[index] for index in indices]),
|
|
fits.Column(name='ML_RATE_1', format='E', unit='cts/s', array=[ml_rate_1[index] for index in indices]),
|
|
fits.Column(name='ML_RATE_2', format='E', unit='cts/s', array=[ml_rate_2[index] for index in indices]),
|
|
fits.Column(name='ML_RATE_3', format='E', unit='cts/s', array=[ml_rate_3[index] for index in indices]),
|
|
fits.Column(name='ML_RATE_4', format='E', unit='cts/s', array=[ml_rate_4[index] for index in indices]),
|
|
|
|
fits.Column(name='ML_RATE_ERR_0', format='E', unit='cts/s', array=[ml_rate_err_0[index] for index in indices]),
|
|
fits.Column(name='ML_RATE_ERR_1', format='E', unit='cts/s', array=[ml_rate_err_1[index] for index in indices]),
|
|
fits.Column(name='ML_RATE_ERR_2', format='E', unit='cts/s', array=[ml_rate_err_2[index] for index in indices]),
|
|
fits.Column(name='ML_RATE_ERR_3', format='E', unit='cts/s', array=[ml_rate_err_3[index] for index in indices]),
|
|
fits.Column(name='ML_RATE_ERR_4', format='E', unit='cts/s', array=[ml_rate_err_4[index] for index in indices]),
|
|
|
|
fits.Column(name='ML_CTS_0', format='E', unit='cts', array=[ml_cts_0[index] for index in indices]),
|
|
fits.Column(name='ML_CTS_1', format='E', unit='cts', array=[ml_cts_1[index] for index in indices]),
|
|
fits.Column(name='ML_CTS_2', format='E', unit='cts', array=[ml_cts_2[index] for index in indices]),
|
|
fits.Column(name='ML_CTS_3', format='E', unit='cts', array=[ml_cts_3[index] for index in indices]),
|
|
fits.Column(name='ML_CTS_4', format='E', unit='cts', array=[ml_cts_4[index] for index in indices]),
|
|
|
|
fits.Column(name='ML_CTS_ERR_0', format='E', unit='cts', array=[ml_cts_err_0[index] for index in indices]),
|
|
fits.Column(name='ML_CTS_ERR_1', format='E', unit='cts', array=[ml_cts_err_1[index] for index in indices]),
|
|
fits.Column(name='ML_CTS_ERR_2', format='E', unit='cts', array=[ml_cts_err_2[index] for index in indices]),
|
|
fits.Column(name='ML_CTS_ERR_3', format='E', unit='cts', array=[ml_cts_err_3[index] for index in indices]),
|
|
fits.Column(name='ML_CTS_ERR_4', format='E', unit='cts', array=[ml_cts_err_4[index] for index in indices]),
|
|
|
|
fits.Column(name='ML_FLUX_0', format='E', unit='erg/cm**2/s', array=[ml_flux_0[index] for index in indices]),
|
|
fits.Column(name='ML_FLUX_1', format='E', unit='erg/cm**2/s', array=[ml_flux_1[index] for index in indices]),
|
|
fits.Column(name='ML_FLUX_2', format='E', unit='erg/cm**2/s', array=[ml_flux_2[index] for index in indices]),
|
|
fits.Column(name='ML_FLUX_3', format='E', unit='erg/cm**2/s', array=[ml_flux_3[index] for index in indices]),
|
|
fits.Column(name='ML_FLUX_4', format='E', unit='erg/cm**2/s', array=[ml_flux_4[index] for index in indices]),
|
|
|
|
fits.Column(name='ML_FLUX_ERR_0', format='E', unit='erg/cm**2/s', array=[ml_flux_err_0[index] for index in indices]),
|
|
fits.Column(name='ML_FLUX_ERR_1', format='E', unit='erg/cm**2/s', array=[ml_flux_err_1[index] for index in indices]),
|
|
fits.Column(name='ML_FLUX_ERR_2', format='E', unit='erg/cm**2/s', array=[ml_flux_err_2[index] for index in indices]),
|
|
fits.Column(name='ML_FLUX_ERR_3', format='E', unit='erg/cm**2/s', array=[ml_flux_err_3[index] for index in indices]),
|
|
fits.Column(name='ML_FLUX_ERR_4', format='E', unit='erg/cm**2/s', array=[ml_flux_err_4[index] for index in indices]),
|
|
|
|
fits.Column(name='ML_EXP_0', format='E', unit='s', array=[ml_exp_0[index] for index in indices]),
|
|
fits.Column(name='ML_EXP_1', format='E', unit='s', array=[ml_exp_1[index] for index in indices]),
|
|
fits.Column(name='ML_EXP_2', format='E', unit='s', array=[ml_exp_2[index] for index in indices]),
|
|
fits.Column(name='ML_EXP_3', format='E', unit='s', array=[ml_exp_3[index] for index in indices]),
|
|
fits.Column(name='ML_EXP_4', format='E', unit='s', array=[ml_exp_4[index] for index in indices]),
|
|
|
|
fits.Column(name='ML_BKG_0', format='E', unit='cts/arcmin**2', array=[ml_bkg_0[index] for index in indices]),
|
|
fits.Column(name='ML_BKG_1', format='E', unit='cts/arcmin**2', array=[ml_bkg_1[index] for index in indices]),
|
|
fits.Column(name='ML_BKG_2', format='E', unit='cts/arcmin**2', array=[ml_bkg_2[index] for index in indices]),
|
|
fits.Column(name='ML_BKG_3', format='E', unit='cts/arcmin**2', array=[ml_bkg_3[index] for index in indices]),
|
|
fits.Column(name='ML_BKG_4', format='E', unit='cts/arcmin**2', array=[ml_bkg_4[index] for index in indices]),
|
|
|
|
fits.Column(name='APE_CTS_0', format='I', unit='count', array=[ape_cts_0[index] for index in indices]),
|
|
fits.Column(name='APE_CTS_1', format='I', unit='count', array=[ape_cts_1[index] for index in indices]),
|
|
fits.Column(name='APE_CTS_2', format='I', unit='count', array=[ape_cts_2[index] for index in indices]),
|
|
fits.Column(name='APE_CTS_3', format='I', unit='count', array=[ape_cts_3[index] for index in indices]),
|
|
fits.Column(name='APE_CTS_4', format='I', unit='count', array=[ape_cts_4[index] for index in indices]),
|
|
|
|
fits.Column(name='APE_EXP_0', format='E', unit='s', array=[ape_exp_0[index] for index in indices]),
|
|
fits.Column(name='APE_EXP_1', format='E', unit='s', array=[ape_exp_1[index] for index in indices]),
|
|
fits.Column(name='APE_EXP_2', format='E', unit='s', array=[ape_exp_2[index] for index in indices]),
|
|
fits.Column(name='APE_EXP_3', format='E', unit='s', array=[ape_exp_3[index] for index in indices]),
|
|
fits.Column(name='APE_EXP_4', format='E', unit='s', array=[ape_exp_4[index] for index in indices]),
|
|
|
|
fits.Column(name='APE_BKG_0', format='E', unit='count', array=[ape_bkg_0[index] for index in indices]),
|
|
fits.Column(name='APE_BKG_1', format='E', unit='count', array=[ape_bkg_1[index] for index in indices]),
|
|
fits.Column(name='APE_BKG_2', format='E', unit='count', array=[ape_bkg_2[index] for index in indices]),
|
|
fits.Column(name='APE_BKG_3', format='E', unit='count', array=[ape_bkg_3[index] for index in indices]),
|
|
fits.Column(name='APE_BKG_4', format='E', unit='count', array=[ape_bkg_4[index] for index in indices]),
|
|
|
|
fits.Column(name='APE_RADIUS_0', format='E', unit='pixel', array=[ape_rad_0[index] for index in indices]),
|
|
fits.Column(name='APE_RADIUS_1', format='E', unit='pixel', array=[ape_rad_1[index] for index in indices]),
|
|
fits.Column(name='APE_RADIUS_2', format='E', unit='pixel', array=[ape_rad_2[index] for index in indices]),
|
|
fits.Column(name='APE_RADIUS_3', format='E', unit='pixel', array=[ape_rad_3[index] for index in indices]),
|
|
fits.Column(name='APE_RADIUS_4', format='E', unit='pixel', array=[ape_rad_4[index] for index in indices]),
|
|
|
|
fits.Column(name='APE_POIS_0', format='E', unit='', array=[ape_pois_0[index] for index in indices]),
|
|
fits.Column(name='APE_POIS_1', format='E', unit='', array=[ape_pois_1[index] for index in indices]),
|
|
fits.Column(name='APE_POIS_2', format='E', unit='', array=[ape_pois_2[index] for index in indices]),
|
|
fits.Column(name='APE_POIS_3', format='E', unit='', array=[ape_pois_3[index] for index in indices]),
|
|
fits.Column(name='APE_POIS_4', format='E', unit='', array=[ape_pois_4[index] for index in indices]),
|
|
])
|
|
|
|
hdu = fits.BinTableHDU.from_columns(coldefs, name='CATALOG')
|
|
hdu.header['MISSION'] = ('SRG', 'SPECTRUM-RG')
|
|
hdu.header['TELESCOP'] = ('eROSITA', '')
|
|
hdu.header['INSTITUT'] = ('IKI', 'Affiliation')
|
|
hdu.header['AUTHOR'] = ('Roman Krivonos', 'Responsible person')
|
|
hdu.header['EMAIL'] = ('krivonos@cosmos.ru', 'E-mail')
|
|
#hdu.add_checksum()
|
|
print(hdu.columns)
|
|
hdu.writeto(outfile_fits, overwrite=True)
|
|
|
|
with fits.open(outfile_fits, mode='update') as hdus:
|
|
hdus[1].add_checksum()
|
|
|
|
|
|
def dr12_stat(infile=None, xmmslim=None, xmmlim=None, outfile_reg=None):
|
|
if(os.path.isfile(infile)==False):
|
|
print("File not found {}".format(infile))
|
|
sys.exit()
|
|
|
|
""" Read slim DR12 catalog """
|
|
xcat={}
|
|
hdul = fits.open(xmmslim)
|
|
xtab_slim = hdul[1].data
|
|
hdul.close()
|
|
for rec in xtab_slim:
|
|
key = rec['SRCID']
|
|
flux, error = convert_dr12_to_erosita_flux(rec)
|
|
xcat[key]={'name':rec['IAUNAME'],
|
|
'srcid':rec['SRCID'],
|
|
'flux':flux,
|
|
'flux_err':error,
|
|
'confused':rec['CONFUSED'],
|
|
'ra':rec['SC_RA'],
|
|
'dec':rec['SC_DEC'],
|
|
'ext':rec['SC_EXTENT'],
|
|
'ext_err':rec['SC_EXT_ERR'],
|
|
'ext_like':rec['SC_EXT_ML'],
|
|
'ndet':rec['N_DETECTIONS']}
|
|
|
|
""" Read forced eUDS photometry of 4XMM-DR12 """
|
|
hdul = fits.open(infile)
|
|
tab = hdul[1].data
|
|
hdul.close()
|
|
print("Reading {} nrows={}\n\n".format(infile,len(tab)))
|
|
|
|
freg = open(outfile_reg, "w")
|
|
mean_flux_err=0.0
|
|
ntotal=0
|
|
for rec in tab:
|
|
key = rec['DR12_SRCID']
|
|
|
|
if not (rec['DET_LIKE_0']>10):
|
|
continue
|
|
|
|
# if not (float(xcat[key]['ext']) > float(4*xcat[key]['ext_err'])):
|
|
# continue
|
|
|
|
# if(float(xcat[key]['ext_like'])>10):
|
|
# continue
|
|
|
|
if(xmmlim):
|
|
if not (xcat[key]['flux']>xmmlim):
|
|
continue
|
|
|
|
freg.write("fk5;point({:.4f} {:.4f}) # text={{{}}}\n".format(xcat[key]['ra'],xcat[key]['dec'],xcat[key]['name'].replace('4XMM','')))
|
|
|
|
print("ext_like {} ext {} +/- {}".format(xcat[key]['ext_like'],float(xcat[key]['ext']),float(xcat[key]['ext_err'])))
|
|
mean_flux_err+=xcat[key]['flux_err']
|
|
ntotal=ntotal+1
|
|
freg.close()
|
|
mean_flux_err = mean_flux_err / float(ntotal) / 1e-15
|
|
print("Selected {} above flux lim={}, mean XMM flux err {:.2f} (x1e-15)".format(ntotal,xmmlim,mean_flux_err))
|
|
|
|
|
|
def final_xmm_xmatch(infile=None,xmmslim=None,xmmfull=None,outfile_flux=None, xmmlim=None):
|
|
""" Read forced catalog and the corresponding sensitivity map """
|
|
|
|
flux_scale=1e-15
|
|
|
|
|
|
if(os.path.isfile(infile)==False):
|
|
print("File not found {}".format(infile))
|
|
|
|
""" Read full DR12 catalog """
|
|
xdr12={}
|
|
hdul = fits.open(xmmfull)
|
|
xtab_full = hdul[1].data
|
|
hdul.close()
|
|
|
|
for rec in xtab_full:
|
|
key = rec['DETID']
|
|
flux, error = convert_dr12_to_erosita_flux(rec)
|
|
mjd = rec['MJD_START'] + (rec['MJD_STOP'] - rec['MJD_START'])/2
|
|
t = Time(mjd, format='mjd')
|
|
xdr12[key]={'name':rec['IAUNAME'],
|
|
'srcid':rec['SRCID'],
|
|
'flux':flux,
|
|
'flux_err':error,
|
|
'confused':rec['CONFUSED'],
|
|
'mjd':mjd,
|
|
'date':t.to_value('iso', subfmt='date'),
|
|
'ndet':rec['N_DETECTIONS']}
|
|
|
|
""" Read slim DR12 catalog """
|
|
xcat={}
|
|
hdul = fits.open(xmmslim)
|
|
xtab_slim = hdul[1].data
|
|
hdul.close()
|
|
|
|
for rec in xtab_slim:
|
|
key = rec['SRCID']
|
|
flux, error = convert_dr12_to_erosita_flux(rec)
|
|
#flux = coeff * (rec['SC_EP_1_FLUX']+rec['SC_EP_2_FLUX']+rec['SC_EP_3_FLUX'])
|
|
#error = coeff * np.sqrt(rec['SC_EP_1_FLUX_ERR']**2 + rec['SC_EP_2_FLUX_ERR']**2 + rec['SC_EP_3_FLUX_ERR']**2)
|
|
xcat[key]={'name':rec['IAUNAME'],
|
|
'srcid':rec['SRCID'],
|
|
'flux':flux,
|
|
'flux_err':error,
|
|
'confused':rec['CONFUSED'],
|
|
'ra':rec['SC_RA'],
|
|
'dec':rec['SC_DEC'],
|
|
'ndet':rec['N_DETECTIONS']}
|
|
|
|
""" Read forced eUDS photometry of 4XMM-DR12 """
|
|
ecat=[]
|
|
hdul = fits.open(infile)
|
|
tab = hdul[1].data
|
|
hdul.close()
|
|
print("reading {} nrows={}\n\n".format(infile,len(tab)))
|
|
|
|
ratio=[]
|
|
var10up=[]
|
|
ftex = open(infile.replace(".fits",".ratio10up.tex"), "w")
|
|
freg = open(infile.replace(".fits",".ratio10up.reg"), "w")
|
|
for rec in tab:
|
|
key = rec['DR12_SRCID']
|
|
if not (rec['DET_LIKE_0']>10):
|
|
continue
|
|
|
|
if(xmmlim):
|
|
if not (xcat[key]['flux']>xmmlim):
|
|
continue
|
|
|
|
rat=rec['ML_FLUX_0']/xcat[key]['flux']
|
|
if(rec['CONF']==0):
|
|
ratio.append(rat)
|
|
ecat.append({'euds_det_like':rec['DET_LIKE_0'], 'euds_flux':rec['ML_FLUX_0'],'euds_flux_err':rec['ML_FLUX_ERR_0'],
|
|
'dr12_flux':xcat[key]['flux'],'dr12_flux_err':xcat[key]['flux_err'],'ratio':rat})
|
|
|
|
if(rat>10.0):
|
|
var10up.append({'euds_det_like':rec['DET_LIKE_0'], 'euds_flux':rec['ML_FLUX_0'],'euds_flux_err':rec['ML_FLUX_ERR_0'],
|
|
'dr12_flux':xcat[key]['flux'], 'dr12_flux_err':xcat[key]['flux_err'], 'ratio':rat})
|
|
|
|
|
|
freg.write("fk5;point({:.4f} {:.4f}) # text={{{}}}\n".format(xcat[key]['ra'],xcat[key]['dec'],xcat[key]['name'].replace('4XMM','')))
|
|
|
|
ftex.write("\\hline {} & {:.2f} $\\pm$ {:.2f} & {:.2f} $\\pm$ {:.2f} & {:.2f} & {} & & \\\\\n".format(xcat[key]['name'].replace('4XMM',''),
|
|
xcat[key]['flux']/flux_scale,
|
|
xcat[key]['flux_err']/flux_scale,
|
|
rec['ML_FLUX_0']/flux_scale,
|
|
rec['ML_FLUX_ERR_0']/flux_scale,
|
|
rat,xcat[key]['ndet']
|
|
))
|
|
""" add history of 4XMM detections """
|
|
for kk in xdr12.keys():
|
|
if(xdr12[kk]['srcid'] == key):
|
|
ftex.write(" & {:.2f} $\\pm$ {:.2f} & & & & {} & \\\\\n".format(xdr12[kk]['flux']/flux_scale,
|
|
xdr12[kk]['flux_err']/flux_scale,
|
|
xdr12[kk]['date']))
|
|
ftex.close()
|
|
freg.close()
|
|
|
|
with open(outfile_flux, 'w') as csvfile:
|
|
fieldnames = ['euds_det_like', 'euds_flux', 'euds_flux_err', 'dr12_flux', 'dr12_flux_err', 'ratio']
|
|
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
|
|
writer.writeheader()
|
|
for rec in ecat:
|
|
writer.writerow(rec)
|
|
print("\n\n Selected {}".format(len(ecat)))
|
|
|
|
""" Find faded 4XMM sources """
|
|
ftex = open(infile.replace(".fits",".ratio10down.tex"), "w")
|
|
freg = open(infile.replace(".fits",".ratio10down.reg"), "w")
|
|
for rec in tab:
|
|
key = rec['DR12_SRCID']
|
|
rat=xcat[key]['flux']/rec['ML_FLUX_0']
|
|
if(xcat[key]['flux']>1e-13 and rec['ML_FLUX_0']<2e-14):
|
|
print("***: 4XMM {} {:.4f} eUDS {:.4f} +/- {:.4f} ratio {:.2f}".format(xcat[key]['name'].replace('4XMM',''),
|
|
xcat[key]['flux']/flux_scale,
|
|
rec['ML_FLUX_0']/flux_scale,
|
|
rec['ML_FLUX_ERR_0']/flux_scale,
|
|
rat))
|
|
if not (rec['ML_EXP_0']>600):
|
|
continue
|
|
if not (xcat[key]['flux']>1e-14):
|
|
continue
|
|
if not(rat>9 or np.isnan(rat)):
|
|
continue
|
|
print("down: 4XMM {} {:.4f} eUDS {:.4f} +/- {:.4f} ratio {:.2f}".format(xcat[key]['name'].replace('4XMM',''),
|
|
xcat[key]['flux']/flux_scale,
|
|
rec['ML_FLUX_0']/flux_scale,
|
|
rec['ML_FLUX_ERR_0']/flux_scale,
|
|
rat))
|
|
freg.write("fk5;point({:.4f} {:.4f}) # text={{{} {:.1f}}}\n".format(xcat[key]['ra'],xcat[key]['dec'],xcat[key]['name'].replace('4XMM',''),rat))
|
|
ftex.write("\\hline {} & {:.2f} $\\pm$ {:.2f} & $<$ {:.2f} & {} & & \\\\\n".format(xcat[key]['name'].replace('4XMM',''),
|
|
xcat[key]['flux']/flux_scale,
|
|
xcat[key]['flux_err']/flux_scale,
|
|
rec['ML_FLUX_ERR_0']/flux_scale,
|
|
xcat[key]['ndet']))
|
|
""" add history of 4XMM detections """
|
|
for kk in xdr12.keys():
|
|
if(xdr12[kk]['srcid'] == key):
|
|
ftex.write(" & {:.2f} $\\pm$ {:.2f} & & & & {} & \\\\\n".format(xdr12[kk]['flux']/flux_scale,
|
|
xdr12[kk]['flux_err']/flux_scale,
|
|
xdr12[kk]['date']))
|
|
|
|
ftex.close()
|
|
freg.close()
|
|
return
|
|
|
|
|
|
|
|
def make_xmm_ds9reg_confused(infile=None,outfile=None,scale=60*60):
|
|
if(os.path.isfile(infile)==True):
|
|
hdul = fits.open(infile)
|
|
tbdata = hdul[1].data
|
|
with open(outfile, 'w') as writer:
|
|
for rec in tbdata:
|
|
if(rec['conf']==1):
|
|
writer.write("fk5;circle({}, {}, {}) # text={{{}}}\n".format(rec['dr12_ra'],rec['dr12_dec'],rec['dr12_radec_err']/scale,rec['dr12_iau_name']))
|
|
|
|
def cross_check_euds(infile=None,euds=None,outkey=None,dlmin=10,devmax=30):
|
|
print("Reading {}".format(euds))
|
|
flux_scale=1e-15
|
|
|
|
""" Read eUDS catalog """
|
|
ecat={}
|
|
hdul = fits.open(euds)
|
|
etab = hdul[1].data
|
|
hdul.close()
|
|
|
|
for rec in etab:
|
|
key = rec['NAME']
|
|
ecat[key]={'ra':rec['RA'], 'dec':rec['DEC'],'flux':rec['ML_FLUX'],'flux_err':rec['ML_FLUX_ERR']}
|
|
|
|
print("Reading {}".format(infile))
|
|
|
|
cat={}
|
|
hdul = fits.open(infile)
|
|
tab = hdul[1].data
|
|
hdul.close()
|
|
|
|
count=0
|
|
ftex = open("{}.tex".format(outkey), "w")
|
|
freg = open("{}.reg".format(outkey), "w")
|
|
freg_missed = open("{}.missed.reg".format(outkey), "w")
|
|
freg_found = open("{}.found.reg".format(outkey), "w")
|
|
for rec in tab:
|
|
name=rec['ID_SRC']
|
|
det_like=rec['DET_LIKE_0']
|
|
if not (det_like>dlmin):
|
|
continue
|
|
count=count+1
|
|
ra=rec['RA']
|
|
dec=rec['DEC']
|
|
name = make_source_name(ra, dec, key='')
|
|
check_crd = SkyCoord(ra, dec, frame=FK5(), unit="deg")
|
|
freg.write("fk5;point({:.4f} {:.4f}) # text={{{} {:.1f}}}\n".format(ra,dec,name,det_like))
|
|
#print("{}".format(rec['RA']))
|
|
|
|
found=False
|
|
for kk in ecat.keys():
|
|
euds_crd = SkyCoord(ecat[kk]['ra'], ecat[kk]['dec'], frame=FK5(), unit="deg")
|
|
sep = check_crd.separation(euds_crd).arcsec
|
|
if(sep < devmax):
|
|
print("{} {} {}".format(name,kk,sep))
|
|
found=True
|
|
f1=rec['ML_FLUX_0']
|
|
f2=ecat[kk]['flux']
|
|
hr=(f1-f2)/(f1+f2)
|
|
e1=rec['ML_FLUX_ERR_0']
|
|
e2=ecat[kk]['flux_err']
|
|
err_1 = np.sqrt(e1*e1+e2*e2)/(f1-f2) # fractional error of nominator
|
|
err_2 = np.sqrt(e1*e1+e2*e2)/(f1+f2) # fractional error of denominator
|
|
hr_err = np.sqrt(err_1**2 + err_2**2)*np.abs(hr)
|
|
freg_found.write("fk5;point({:.4f} {:.4f}) # text={{{} {:.1f}}}\n".format(ra,dec,name,det_like))
|
|
ftex.write("{:.2f} {} & {:.5f} & {:.5f} & {:.2f} & {:.2f} $\\pm$ {:.2f} & {:.2f} $\\pm$ {:.2f} & {:.2f} $\\pm$ {:.2f} \\\\\n".format(rec['DET_LIKE_0'],
|
|
name,ra,dec,rec['DET_LIKE_0'],
|
|
rec['ML_FLUX_0']/flux_scale,
|
|
rec['ML_FLUX_ERR_0']/flux_scale,
|
|
ecat[kk]['flux']/flux_scale,
|
|
ecat[kk]['flux_err']/flux_scale,
|
|
hr,hr_err))
|
|
|
|
if(found == True):
|
|
pass
|
|
else:
|
|
print("Not found {}".format(name))
|
|
freg_missed.write("fk5;point({:.4f} {:.4f}) # text={{{} {:.1f}}}\n".format(ra,dec,name,det_like))
|
|
ftex.write("{:.2f} {} & {:.5f} & {:.5f} & {:.2f} & {:.2f} $\\pm$ {:.2f} & {:.2f} $\\pm$ {:.2f} & & \\\\\n".format(rec['DET_LIKE_0'],name,ra,dec,rec['DET_LIKE_0'],
|
|
rec['ML_FLUX_0']/flux_scale,
|
|
rec['ML_FLUX_ERR_0']/flux_scale,
|
|
0.0/flux_scale,
|
|
0.0/flux_scale))
|
|
|
|
print(count)
|
|
freg.close()
|
|
freg_missed.close()
|
|
freg_found.close()
|
|
ftex.close()
|
|
|
|
|
|
|
|
def make_euds_stat(infile=None,fluxlim=None):
|
|
print("Reading {}".format(infile))
|
|
flux_scale=1e-15
|
|
|
|
""" Read eUDS catalog """
|
|
ecat={}
|
|
hdul = fits.open(infile)
|
|
etab = hdul[1].data
|
|
hdul.close()
|
|
|
|
print("Total {}".format(len(etab)))
|
|
|
|
extended=0
|
|
point=0
|
|
ntotal=0
|
|
for rec in etab:
|
|
if(rec['EXT_LIKE']>5.0):
|
|
extended=extended+1
|
|
else:
|
|
point=point+1
|
|
if(fluxlim):
|
|
if not (rec['ML_FLUX']>fluxlim):
|
|
continue
|
|
if (rec['EXT_LIKE']>5.0):
|
|
continue
|
|
ntotal=ntotal+1
|
|
print("Extended {}".format(extended))
|
|
print("Point {}".format(point))
|
|
print("Selected {} above {}".format(ntotal,fluxlim))
|
|
|
|
def make_euds_cds(infile=None,outfile=None,fluxlim=None):
|
|
""" Obsolete, see makecds.py, which uses cdspyreadme package """
|
|
print("Reading {}".format(infile))
|
|
flux_scale=1e-15
|
|
|
|
""" Read eUDS catalog """
|
|
ecat={}
|
|
hdul = fits.open(infile)
|
|
etab = hdul[1].data
|
|
hdul.close()
|
|
|
|
print("Total {}".format(len(etab)))
|
|
|
|
f = open(outfile, 'w')
|
|
for r in etab:
|
|
|
|
f.write("{0:3d} {1} {2:10.6f} {3:10.6f} {4:6.3f} {5:5.2f} {6:5.2f} {7:10.6f} {8:12.6f} {9:8.6f} {10:8.6f} {11:11.6f} {12:10.6f} {13:12.6e} {14:12.6e} {15:10.6e} {16:12.6f} {17:>21} {18:12.6f} {19:8.6f} {20:8.6f} {21:11.6f} {22:10.6f} {23:12.6e} {24:12.6e} {25:10.6e} {26:12.6f} {27:12.6f} {28:8.6f} {29:8.6f} {30:11.6f} {31:10.6f} {32:12.6e} {33:12.6e} {34:10.6e} {35:12.6f} {36:12.6f} {37:8.6f} {38:8.6f} {39:11.6f} {40:10.6f} {41:12.6e} {42:12.6e} {43:10.6e} {44:12.6f} {45:12.6f} {46:8.6f} {47:8.6f} {48:11.6f} {49:10.6f} {50:12.6e} {51:12.6e} {52:10.6e} {53:12.6f}\n".format(
|
|
r['ID_SRC'], # 0
|
|
r['NAME'], # 1
|
|
r['RA'], # 2
|
|
r['DEC'], # 3
|
|
r['RADEC_ERR'], # 4 Positional error (68% confidence)
|
|
r['EXT'], # 5 Source extent
|
|
r['EXT_ERR'] if(r['EXT_ERR'] > 0.0) else 0.0, # 6 Extent uncertainty
|
|
r['EXT_LIKE'], # 7
|
|
r['DET_LIKE'], # 8
|
|
r['ML_RATE'], # 9
|
|
r['ML_RATE_ERR'], # 10
|
|
r['ML_CTS'], # 11
|
|
r['ML_CTS_ERR'], # 12
|
|
r['ML_FLUX'], # 13
|
|
r['ML_FLUX_ERR'], # 14
|
|
r['ML_EXP'], # 15
|
|
r['ML_BKG'], # 16
|
|
r['DR12_IAU_NAME'] if(r['DR12_IAU_NAME'] !='None') else '', # 17 4XMM J021738.8-051257
|
|
|
|
r['DET_LIKE_1'] if(r['DET_LIKE_1'] > 0.0) else 0.0, # 18
|
|
r['ML_RATE_1'] if(r['ML_RATE_1'] > 0.0) else 0.0, # 19
|
|
r['ML_RATE_ERR_1'] if(r['ML_RATE_ERR_1'] > 0.0) else 0.0, # 20
|
|
r['ML_CTS_1'] if(r['ML_CTS_1'] > 0.0) else 0.0, # 21
|
|
r['ML_CTS_ERR_1'] if(r['ML_CTS_ERR_1'] > 0.0) else 0.0, # 22
|
|
r['ML_FLUX_1'] if(r['ML_FLUX_1'] > 0.0) else 0.0, # 23
|
|
r['ML_FLUX_ERR_1'] if(r['ML_FLUX_ERR_1'] > 0.0) else 0.0, # 24
|
|
r['ML_EXP_1'] if(r['ML_EXP_1'] > 0.0) else 0.0, # 25
|
|
r['ML_BKG_1'] if(r['ML_BKG_1'] > 0.0) else 0.0, # 26
|
|
|
|
r['DET_LIKE_2'] if(r['DET_LIKE_2'] > 0.0) else 0.0, # 27
|
|
r['ML_RATE_2'] if(r['ML_RATE_2'] > 0.0) else 0.0, # 28
|
|
r['ML_RATE_ERR_2'] if(r['ML_RATE_ERR_2'] > 0.0) else 0.0, # 29
|
|
r['ML_CTS_2'] if(r['ML_CTS_2'] > 0.0) else 0.0, # 30
|
|
r['ML_CTS_ERR_2'] if(r['ML_CTS_ERR_2'] > 0.0) else 0.0, # 31
|
|
r['ML_FLUX_2'] if(r['ML_FLUX_2'] > 0.0) else 0.0, # 32
|
|
r['ML_FLUX_ERR_2'] if(r['ML_FLUX_ERR_2'] > 0.0) else 0.0, # 33
|
|
r['ML_EXP_2'] if(r['ML_EXP_2'] > 0.0) else 0.0, # 34
|
|
r['ML_BKG_2'] if(r['ML_BKG_2'] > 0.0) else 0.0, # 35
|
|
|
|
r['DET_LIKE_3'] if(r['DET_LIKE_3'] > 0.0) else 0.0, # 36
|
|
r['ML_RATE_3'] if(r['ML_RATE_3'] > 0.0) else 0.0, # 37
|
|
r['ML_RATE_ERR_3'] if(r['ML_RATE_ERR_3'] > 0.0) else 0.0, # 38
|
|
r['ML_CTS_3'] if(r['ML_CTS_3'] > 0.0) else 0.0, # 39
|
|
r['ML_CTS_ERR_3'] if(r['ML_CTS_ERR_3'] > 0.0) else 0.0, # 40
|
|
r['ML_FLUX_3'] if(r['ML_FLUX_3'] > 0.0) else 0.0, # 41
|
|
r['ML_FLUX_ERR_3'] if(r['ML_FLUX_ERR_3'] > 0.0) else 0.0, # 42
|
|
r['ML_EXP_3'] if(r['ML_EXP_3'] > 0.0) else 0.0, # 43
|
|
r['ML_BKG_3'] if(r['ML_BKG_3'] > 0.0) else 0.0, # 44
|
|
|
|
r['DET_LIKE_4'] if(r['DET_LIKE_4'] > 0.0) else 0.0, # 45
|
|
r['ML_RATE_4'] if(r['ML_RATE_4'] > 0.0) else 0.0, # 46
|
|
r['ML_RATE_ERR_4'] if(r['ML_RATE_ERR_4'] > 0.0) else 0.0, # 47
|
|
r['ML_CTS_4'] if(r['ML_CTS_4'] > 0.0) else 0.0, # 48
|
|
r['ML_CTS_ERR_4'] if(r['ML_CTS_ERR_4'] > 0.0) else 0.0, # 49
|
|
r['ML_FLUX_4'] if(r['ML_FLUX_4'] > 0.0) else 0.0, # 50
|
|
r['ML_FLUX_ERR_4'] if(r['ML_FLUX_ERR_4'] > 0.0) else 0.0, # 51
|
|
r['ML_EXP_4'] if(r['ML_EXP_4'] > 0.0) else 0.0, # 52
|
|
r['ML_BKG_4'] if(r['ML_BKG_4'] > 0.0) else 0.0, # 53
|
|
))
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
def make_xmm_cds(infile=None,outfile=None,fluxlim=None):
|
|
print("Reading {}".format(infile))
|
|
flux_scale=1e-15
|
|
|
|
""" Read XMM catalog """
|
|
ecat={}
|
|
hdul = fits.open(infile)
|
|
etab = hdul[1].data
|
|
hdul.close()
|
|
|
|
print("Total {}".format(len(etab)))
|
|
|
|
|
|
|
|
f = open(outfile, 'w')
|
|
for r in etab:
|
|
|
|
f.write("{0} {1} {2:10.6f} {3:10.6f} {4:6.3f} {5:12.6f} {6:12.6f} {7:12.6f} {8:12.6f} {9:12.6f} {10:10.6f} {11:10.6f} {12:10.6f} {13:10.6f} {14:10.6f} {15:10.6f} {16:10.6f} {17:10.6f} {18:10.6f} {19:10.6f} {20:11.6f} {21:11.6f} {22:11.6f} {23:11.6f} {24:11.6f} {25:12.6f} {26:12.6f} {27:12.6f} {28:12.6f} {29:12.6f} {30:12.6e} {31:12.6e} {32:12.6e} {33:12.6e} {34:12.6e} {35:12.6e} {36:12.6e} {37:12.6e} {38:12.6e} {39:12.6e} {40:10.6e} {41:10.6e} {42:10.6e} {43:10.6e} {44:10.6e} {45:12.6f} {46:12.6f} {47:12.6f} {48:12.6f} {49:12.6f} {50:01d}\n".format(
|
|
r['DR12_IAU_NAME'], # 0
|
|
r['DR12_SRCID'], # 1
|
|
r['DR12_RA'], # 2
|
|
r['DR12_DEC'], # 3
|
|
r['DR12_RADEC_ERR'], # 4
|
|
r['DET_LIKE_0'] if(r['DET_LIKE_0'] > 0.0) else 0.0, # 5
|
|
r['DET_LIKE_1'] if(r['DET_LIKE_1'] > 0.0) else 0.0, # 6
|
|
r['DET_LIKE_2'] if(r['DET_LIKE_2'] > 0.0) else 0.0, # 7
|
|
r['DET_LIKE_3'] if(r['DET_LIKE_3'] > 0.0) else 0.0, # 8
|
|
r['DET_LIKE_4'] if(r['DET_LIKE_4'] > 0.0) else 0.0, # 9
|
|
r['ML_RATE_0'] if(r['ML_RATE_0'] > 0.0) else 0.0, # 10
|
|
r['ML_RATE_1'] if(r['ML_RATE_1'] > 0.0) else 0.0, # 11
|
|
r['ML_RATE_2'] if(r['ML_RATE_2'] > 0.0) else 0.0, # 12
|
|
r['ML_RATE_3'] if(r['ML_RATE_3'] > 0.0) else 0.0, # 13
|
|
r['ML_RATE_4'] if(r['ML_RATE_4'] > 0.0) else 0.0, # 14
|
|
r['ML_RATE_ERR_0'] if(r['ML_RATE_ERR_0'] > 0.0) else 0.0, # 15
|
|
r['ML_RATE_ERR_1'] if(r['ML_RATE_ERR_1'] > 0.0) else 0.0, # 16
|
|
r['ML_RATE_ERR_2'] if(r['ML_RATE_ERR_2'] > 0.0) else 0.0, # 17
|
|
r['ML_RATE_ERR_3'] if(r['ML_RATE_ERR_3'] > 0.0) else 0.0, # 18
|
|
r['ML_RATE_ERR_4'] if(r['ML_RATE_ERR_4'] > 0.0) else 0.0, # 19
|
|
r['ML_CTS_0'] if(r['ML_CTS_0'] > 0.0) else 0.0, # 20
|
|
r['ML_CTS_1'] if(r['ML_CTS_1'] > 0.0) else 0.0, # 21
|
|
r['ML_CTS_2'] if(r['ML_CTS_2'] > 0.0) else 0.0, # 22
|
|
r['ML_CTS_3'] if(r['ML_CTS_3'] > 0.0) else 0.0, # 23
|
|
r['ML_CTS_4'] if(r['ML_CTS_4'] > 0.0) else 0.0, # 24
|
|
r['ML_CTS_ERR_0'] if(r['ML_CTS_ERR_0'] > 0.0) else 0.0, # 25
|
|
r['ML_CTS_ERR_1'] if(r['ML_CTS_ERR_1'] > 0.0) else 0.0, # 26
|
|
r['ML_CTS_ERR_2'] if(r['ML_CTS_ERR_2'] > 0.0) else 0.0, # 27
|
|
r['ML_CTS_ERR_3'] if(r['ML_CTS_ERR_3'] > 0.0) else 0.0, # 28
|
|
r['ML_CTS_ERR_4'] if(r['ML_CTS_ERR_4'] > 0.0) else 0.0, # 29
|
|
r['ML_FLUX_0'] if(r['ML_FLUX_0'] > 0.0) else 0.0, # 30
|
|
r['ML_FLUX_1'] if(r['ML_FLUX_1'] > 0.0) else 0.0, # 31
|
|
r['ML_FLUX_2'] if(r['ML_FLUX_2'] > 0.0) else 0.0, # 32
|
|
r['ML_FLUX_3'] if(r['ML_FLUX_3'] > 0.0) else 0.0, # 33
|
|
r['ML_FLUX_4'] if(r['ML_FLUX_4'] > 0.0) else 0.0, # 34
|
|
r['ML_FLUX_ERR_0'] if(r['ML_FLUX_ERR_0'] > 0.0) else 0.0, # 35
|
|
r['ML_FLUX_ERR_1'] if(r['ML_FLUX_ERR_1'] > 0.0) else 0.0, # 36
|
|
r['ML_FLUX_ERR_2'] if(r['ML_FLUX_ERR_2'] > 0.0) else 0.0, # 37
|
|
r['ML_FLUX_ERR_3'] if(r['ML_FLUX_ERR_3'] > 0.0) else 0.0, # 38
|
|
r['ML_FLUX_ERR_4'] if(r['ML_FLUX_ERR_4'] > 0.0) else 0.0, # 39
|
|
r['ML_EXP_0'] if(r['ML_EXP_0'] > 0.0) else 0.0, # 40
|
|
r['ML_EXP_1'] if(r['ML_EXP_1'] > 0.0) else 0.0, # 41
|
|
r['ML_EXP_2'] if(r['ML_EXP_2'] > 0.0) else 0.0, # 42
|
|
r['ML_EXP_3'] if(r['ML_EXP_3'] > 0.0) else 0.0, # 43
|
|
r['ML_EXP_4'] if(r['ML_EXP_4'] > 0.0) else 0.0, # 44
|
|
r['ML_BKG_0'] if(r['ML_BKG_0'] > 0.0) else 0.0, # 45
|
|
r['ML_BKG_1'] if(r['ML_BKG_1'] > 0.0) else 0.0, # 46
|
|
r['ML_BKG_2'] if(r['ML_BKG_2'] > 0.0) else 0.0, # 47
|
|
r['ML_BKG_3'] if(r['ML_BKG_3'] > 0.0) else 0.0, # 48
|
|
r['ML_BKG_4'] if(r['ML_BKG_4'] > 0.0) else 0.0, # 49
|
|
r['CONF'], # 50
|
|
))
|
|
|
|
f.close()
|
|
|
|
def make_euds_cosmatch(infile=None,outfile=None):
|
|
"""
|
|
eUDS-CosMatch - потоки в этих диапазонах нам нужны:
|
|
E1 0.2-0.5 keV
|
|
E2 0.5-1.0 keV
|
|
E3 1.0-2.0 keV
|
|
E4 2.0-4.5 keV
|
|
E5 4.5-12 keV
|
|
E6 0.2-12 keV
|
|
E7 0.5-4.5 keV
|
|
E8 0.5-2.0 keV
|
|
"""
|
|
|
|
"""
|
|
Используем такую модель для конвертации потоков:
|
|
========================================================================
|
|
Model wabs<1>*powerlaw<2> Source No.: 1 Active/Off
|
|
Model Model Component Parameter Unit Value
|
|
par comp
|
|
1 1 wabs nH 10^22 2.00000E-02 +/- 0.0
|
|
2 2 powerlaw PhoIndex 2.00000 +/- 0.0
|
|
3 2 powerlaw norm 1.00000E-02 +/- 0.0
|
|
________________________________________________________________________
|
|
|
|
eUDS bands:
|
|
Model Flux 0.023955 photons (2.9017e-11 ergs/cm^2/s) range (0.30000 - 2.3000 keV)
|
|
Model Flux 0.012343 photons (8.432e-12 ergs/cm^2/s) range (0.30000 - 0.6000 keV)
|
|
Model Flux 0.011613 photons (2.0585e-11 ergs/cm^2/s) range (0.60000 - 2.3000 keV)
|
|
Model Flux 0.0023412 photons (1.241e-11 ergs/cm^2/s) range (2.3000 - 5.0000 keV)
|
|
Model Flux 0.00074967 photons (7.5271e-12 ergs/cm^2/s) range (5.0000 - 8.0000 keV)
|
|
"""
|
|
|
|
euds_E0 = 2.9017e-11 # ergs/cm^2/s) range (0.30000 - 2.3000 keV) ML_FLUX
|
|
euds_E1 = 8.432e-12 # ergs/cm^2/s) range (0.30000 - 0.6000 keV) ML_FLUX_1
|
|
euds_E2 = 2.0585e-11 # ergs/cm^2/s) range (0.60000 - 2.3000 keV) ML_FLUX_2
|
|
euds_E3 = 1.241e-11 # ergs/cm^2/s) range (2.3000 - 5.0000 keV) ML_FLUX_3
|
|
euds_E4 = 7.5271e-12 # ergs/cm^2/s) range (5.0000 - 8.0000 keV) ML_FLUX_4
|
|
|
|
"""
|
|
XMM bands:
|
|
Model Flux 0.015487 photons (8.3626e-12 ergs/cm^2/s) range (0.20000 - 0.5000 keV) E1
|
|
Model Flux 0.0089234 photons (9.9868e-12 ergs/cm^2/s) range (0.50000 - 1.0000 keV) E2
|
|
Model Flux 0.0048784 photons (1.0859e-11 ergs/cm^2/s) range (1.0000 - 2.0000 keV) E3
|
|
Model Flux 0.0027667 photons (1.2947e-11 ergs/cm^2/s) range (2.0000 - 4.5000 keV) E4
|
|
Model Flux 0.0013883 photons (1.5709e-11 ergs/cm^2/s) range (4.5000 - 12.000 keV) E5
|
|
Model Flux 0.033444 photons (5.7864e-11 ergs/cm^2/s) range (0.20000 - 12.000 keV) E6
|
|
Model Flux 0.016568 photons (3.3793e-11 ergs/cm^2/s) range (0.50000 - 4.5000 keV) E7
|
|
Model Flux 0.013802 photons (2.0846e-11 ergs/cm^2/s) range (0.50000 - 2.0000 keV) E8
|
|
"""
|
|
|
|
xmm_E1 = 8.3626e-12 # ergs/cm^2/s) range (0.20000 - 0.5000 keV) E1
|
|
xmm_E2 = 9.9868e-12 # ergs/cm^2/s) range (0.50000 - 1.0000 keV) E2
|
|
xmm_E3 = 1.0859e-11 # ergs/cm^2/s) range (1.0000 - 2.0000 keV) E3
|
|
xmm_E4 = 1.2947e-11 # ergs/cm^2/s) range (2.0000 - 4.5000 keV) E4
|
|
xmm_E5 = 1.5709e-11 # ergs/cm^2/s) range (4.5000 - 12.000 keV) E5
|
|
xmm_E6 = 5.7864e-11 # ergs/cm^2/s) range (0.20000 - 12.000 keV) E6
|
|
xmm_E7 = 3.3793e-11 # ergs/cm^2/s) range (0.50000 - 4.5000 keV) E7
|
|
xmm_E8 = 2.0846e-11 # ergs/cm^2/s) range (0.50000 - 2.0000 keV) E8
|
|
|
|
print("Reading {}".format(infile))
|
|
|
|
t = Table.read(infile)
|
|
|
|
col_E1_flx = Column(name='XMM_E1_FLUX', description="0.2-0.5keV", data=t['ML_FLUX']*xmm_E1/euds_E0)
|
|
col_E2_flx = Column(name='XMM_E2_FLUX', description="0.5-1.0keV", data=t['ML_FLUX']*xmm_E2/euds_E0)
|
|
col_E3_flx = Column(name='XMM_E3_FLUX', description="1.0-2.0keV", data=t['ML_FLUX']*xmm_E3/euds_E0)
|
|
col_E4_flx = Column(name='XMM_E4_FLUX', description="2.0-4.5keV", data=t['ML_FLUX']*xmm_E4/euds_E0)
|
|
col_E5_flx = Column(name='XMM_E5_FLUX', description="4.5-12keV", data=t['ML_FLUX']*xmm_E5/euds_E0)
|
|
col_E6_flx = Column(name='XMM_E6_FLUX', description="0.2-12keV", data=t['ML_FLUX']*xmm_E6/euds_E0)
|
|
col_E7_flx = Column(name='XMM_E7_FLUX', description="0.5-4.5keV", data=t['ML_FLUX']*xmm_E7/euds_E0)
|
|
col_E8_flx = Column(name='XMM_E8_FLUX', description="0.5-2.0keV", data=t['ML_FLUX']*xmm_E8/euds_E0)
|
|
|
|
t.add_column(col_E1_flx)
|
|
t.add_column(col_E2_flx)
|
|
t.add_column(col_E3_flx)
|
|
t.add_column(col_E4_flx)
|
|
t.add_column(col_E5_flx)
|
|
t.add_column(col_E6_flx)
|
|
t.add_column(col_E7_flx)
|
|
t.add_column(col_E8_flx)
|
|
|
|
col_E1_err = Column(name='XMM_E1_FLUX_ERR', description="0.2-0.5keV", data=t['ML_FLUX_ERR']*xmm_E1/euds_E0)
|
|
col_E2_err = Column(name='XMM_E2_FLUX_ERR', description="0.5-1.0keV", data=t['ML_FLUX_ERR']*xmm_E2/euds_E0)
|
|
col_E3_err = Column(name='XMM_E3_FLUX_ERR', description="1.0-2.0keV", data=t['ML_FLUX_ERR']*xmm_E3/euds_E0)
|
|
col_E4_err = Column(name='XMM_E4_FLUX_ERR', description="2.0-4.5keV", data=t['ML_FLUX_ERR']*xmm_E4/euds_E0)
|
|
col_E5_err = Column(name='XMM_E5_FLUX_ERR', description="4.5-12keV", data=t['ML_FLUX_ERR']*xmm_E5/euds_E0)
|
|
col_E6_err = Column(name='XMM_E6_FLUX_ERR', description="0.2-12keV", data=t['ML_FLUX_ERR']*xmm_E6/euds_E0)
|
|
col_E7_err = Column(name='XMM_E7_FLUX_ERR', description="0.5-4.5keV", data=t['ML_FLUX_ERR']*xmm_E7/euds_E0)
|
|
col_E8_err = Column(name='XMM_E8_FLUX_ERR', description="0.5-2.0keV", data=t['ML_FLUX_ERR']*xmm_E8/euds_E0)
|
|
|
|
t.add_column(col_E1_err)
|
|
t.add_column(col_E2_err)
|
|
t.add_column(col_E3_err)
|
|
t.add_column(col_E4_err)
|
|
t.add_column(col_E5_err)
|
|
t.add_column(col_E6_err)
|
|
t.add_column(col_E7_err)
|
|
t.add_column(col_E8_err)
|
|
|
|
t.write(outfile, format='fits', overwrite=True)
|
|
|
|
|