1
0
forked from erosita/uds
This commit is contained in:
Roman Krivonos 2025-04-21 12:44:25 +03:00
parent e5760e312f
commit 49d8e21c32
10 changed files with 2435 additions and 229 deletions

View File

@ -10,6 +10,8 @@ from astropy.wcs import WCS
from astropy.coordinates import SkyCoord # High-level coordinates from astropy.coordinates import SkyCoord # High-level coordinates
from astropy.coordinates import ICRS, Galactic, FK4, FK5 # Low-level frames from astropy.coordinates import ICRS, Galactic, FK4, FK5 # Low-level frames
from astropy.coordinates import Angle, Latitude, Longitude # Angles from astropy.coordinates import Angle, Latitude, Longitude # Angles
from astropy.stats import sigma_clip
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from astropy.table import QTable, Table, Column from astropy.table import QTable, Table, Column
@ -59,7 +61,7 @@ def pix2sky(wcs,dsrc):
s['ra1']=sk[0][1] s['ra1']=sk[0][1]
s['dec1']=sk[0][0] s['dec1']=sk[0][0]
def plot_resid(dsrc,dref,crval1,crval2): def plot_resid(dsrc,dref,crval1,crval2, sigma=3, maxiters=10):
# calculates RMS of original ra,dec # calculates RMS of original ra,dec
center_crd = SkyCoord(crval1, crval2, frame=FK5(), unit="deg") center_crd = SkyCoord(crval1, crval2, frame=FK5(), unit="deg")
offset=[] offset=[]
@ -72,6 +74,24 @@ def plot_resid(dsrc,dref,crval1,crval2):
resid.append(src_crd.separation(ref_crd).arcsec) resid.append(src_crd.separation(ref_crd).arcsec)
error.append(s['radec_err']) error.append(s['radec_err'])
data = np.array(resid)
filtered_data = sigma_clip(data, sigma=sigma, maxiters=maxiters, return_bounds=True)
filtered_arr=filtered_data[0]
filtered_min=filtered_data[1]
filtered_max=filtered_data[2]
print("***")
print("*** sigma clipping, sigma={}".format(sigma))
print("*** length orig: {} taken: {} filtered: {}".format(len(data),len(data[filtered_arr.mask==False]),len(data[filtered_arr.mask==True])))
print("*** min:{:.2f} max:{:.2f}".format(filtered_min,filtered_max))
print("***")
# mark sources as bad (skip=True)
for idx in range(len(resid)):
if(filtered_arr.mask[idx] == True):
dsrc[idx]['skip']=True
dref[idx]['skip']=True
#print("{:2f} {:2f}".format(resid[idx],filtered_arr[idx]))
indices = sorted( indices = sorted(
range(len(offset)), range(len(offset)),
key=lambda index: offset[index] key=lambda index: offset[index]
@ -167,30 +187,26 @@ def get_rms1(dsrc,dref):
def get_chi2_radec(dsrc,dref): def get_chi2_radec(dsrc,dref):
# calculates RMS of original ra,dec # calculates RMS of original ra,dec
resid=0.0 chi2=0.0
n=0
for idx, s in enumerate(dsrc): for idx, s in enumerate(dsrc):
radec_err=s['radec_err'] radec_err=s['radec_err']
src_crd = SkyCoord(dsrc[idx]['ra'], dsrc[idx]['dec'], frame=FK5(), unit="deg") src_crd = SkyCoord(dsrc[idx]['ra'], dsrc[idx]['dec'], frame=FK5(), unit="deg")
ref_crd = SkyCoord(dref[idx]['ra'], dref[idx]['dec'], frame=FK5(), unit="deg") ref_crd = SkyCoord(dref[idx]['ra'], dref[idx]['dec'], frame=FK5(), unit="deg")
sep=src_crd.separation(ref_crd).arcsec sep=src_crd.separation(ref_crd).arcsec
resid = resid+sep**2/radec_err**2 chi2 += sep**2/radec_err**2
n=n+1 return chi2
return resid
def get_chi2_radec1(dsrc,dref): def get_chi2_radec1(dsrc,dref):
# calculates RMS of modified ra1,dec1 # calculates RMS of modified ra1,dec1
resid=0.0 chi2=0.0
n=0
for idx, s in enumerate(dsrc): for idx, s in enumerate(dsrc):
radec_err=s['radec_err'] radec_err=s['radec_err']
src_crd = SkyCoord(dsrc[idx]['ra1'], dsrc[idx]['dec1'], frame=FK5(), unit="deg") src_crd = SkyCoord(dsrc[idx]['ra1'], dsrc[idx]['dec1'], frame=FK5(), unit="deg")
ref_crd = SkyCoord(dref[idx]['ra'], dref[idx]['dec'], frame=FK5(), unit="deg") ref_crd = SkyCoord(dref[idx]['ra'], dref[idx]['dec'], frame=FK5(), unit="deg")
sep=src_crd.separation(ref_crd).arcsec sep=src_crd.separation(ref_crd).arcsec
resid = resid+sep**2/radec_err**2 chi2 += sep**2/radec_err**2
n=n+1 return chi2
return resid
@ -220,7 +236,7 @@ def do_transform_resid(params, wcs, dsrc, dref, crval1, crval2):
return resid1 return resid1
def do_astro_corr(src=None, ref=None, catalog=None, Nsim=100, Rsim=10.0): def astro_corr(src=None, ref=None, catalog=None, Nsim=100, Rsim=10.0):
""" calculates astronomy correction """ """ calculates astronomy correction """
log=catalog.replace(".fits", ".shift.log") log=catalog.replace(".fits", ".shift.log")
@ -235,7 +251,7 @@ def do_astro_corr(src=None, ref=None, catalog=None, Nsim=100, Rsim=10.0):
#png=png.replace(key,"{}_{}".format(obslist[key],key)) #png=png.replace(key,"{}_{}".format(obslist[key],key))
t = Table(names=('rota', 'shift_ra','shift_dec','chi2',), dtype=('f8','f8','f8','f8')) t = Table(names=('rota', 'shift_ra','shift_dec','chi2',), dtype=('f8','f8','f8','f8'))
print(src) print('src =',src)
hdul = fits.open(src) hdul = fits.open(src)
src_map_data = hdul[0].data src_map_data = hdul[0].data
src_map_hdr = hdul[0].header src_map_hdr = hdul[0].header
@ -245,10 +261,10 @@ def do_astro_corr(src=None, ref=None, catalog=None, Nsim=100, Rsim=10.0):
dsrc=[] dsrc=[]
for rec in src_tab_data: for rec in src_tab_data:
d = dict(ra = rec['RA'], dec = rec['DEC'], ra_err = rec['RA_ERR'], dec_err = rec['DEC_ERR'], radec_err = rec['RADEC_ERR']) d = dict(ra = rec['RA'], dec = rec['DEC'], ra_err = rec['RA_ERR'], dec_err = rec['DEC_ERR'], radec_err = rec['RADEC_ERR'], skip=False)
dsrc.append(d) dsrc.append(d)
print(ref) print('ref =',ref)
hdul = fits.open(ref) hdul = fits.open(ref)
ref_map_data = hdul[0].data ref_map_data = hdul[0].data
ref_map_hdr = hdul[0].header ref_map_hdr = hdul[0].header
@ -258,21 +274,39 @@ def do_astro_corr(src=None, ref=None, catalog=None, Nsim=100, Rsim=10.0):
dref=[] dref=[]
for rec in ref_tab_data: for rec in ref_tab_data:
d = dict(ra = rec['RA'], dec = rec['DEC'], ra_err = rec['RA_ERR'], dec_err = rec['DEC_ERR']) d = dict(ra = rec['RA'], dec = rec['DEC'], ra_err = rec['RA_ERR'], dec_err = rec['DEC_ERR'], skip=False)
dref.append(d) dref.append(d)
rms = get_rms(dsrc,dref)
chi2_radec = get_chi2_radec(dsrc,dref)
wcs_src=mWCS(src_map_hdr) wcs_src=mWCS(src_map_hdr)
wcs_src.info() wcs_src.info()
crval1=wcs_src.get_crval1() crval1=wcs_src.get_crval1()
crval2=wcs_src.get_crval2() crval2=wcs_src.get_crval2()
#print(crval1,crval2)
# calculate separations and mark sources to be removed from astrocorrection
offset0, sep0, err0 = plot_resid(dsrc,dref,crval1,crval2) offset0, sep0, err0 = plot_resid(dsrc,dref,crval1,crval2)
print("*** Nsrc {} == {} Nref [before sigma_clip]".format(len(dsrc),len(dref)))
# delete outliers
while(True):
del_index=None
for idx, r in enumerate(dref):
if(r['skip']==True):
del_index=idx # find new index to remove
break
if(del_index):
del dref[del_index]
del dsrc[del_index]
else:
break
print("*** Nsrc {} == {} Nref [after sigma_clip]".format(len(dsrc),len(dref)))
print("***")
rms = get_rms(dsrc,dref)
chi2_radec = get_chi2_radec(dsrc,dref)
with open(log0, 'w') as writer: with open(log0, 'w') as writer:
writer.write("label Y Separation, arcsec\nlabel X On-axis offset, arcmin\nr y -1 15\n")
writer.write("label Y Separation, arcsec\nlabel X Offset, arcmin\nr y -1 15\n")
writer.write("ma 9 on\n") writer.write("ma 9 on\n")
writer.write("read serr 2\n") writer.write("read serr 2\n")
for idx, s in enumerate(offset0): for idx, s in enumerate(offset0):
@ -350,16 +384,16 @@ def do_astro_corr(src=None, ref=None, catalog=None, Nsim=100, Rsim=10.0):
writer.write("{:.2f} {:.2f} {:.2f}\n".format(offset1[idx],sep1[idx],err1[idx])) writer.write("{:.2f} {:.2f} {:.2f}\n".format(offset1[idx],sep1[idx],err1[idx]))
with open(log, 'w') as writer: with open(log, 'w') as writer:
writer.write("{} {:+.2f} {:+.2f} {:+.2f} (chi2_orig:{:.2f} - chi2_opt:{:.2f}) =\t {:.2f} RMS_orig:{:.2f} (wgtd:{:.2f}) RMS_opt:{:.2f} (wgtd:{:.2f}) N={}\n".format(key, writer.write("{} {:+.2f} {:+.2f} {:+.2f} (chi2_orig:{:.2f}-chi2_opt:{:.2f}) =\t {:.2f} RMS_orig:{:.2f} (wgtd:{:.2f}) RMS_opt:{:.2f} (wgtd:{:.2f}) N={}\n".format(key,
rota_opt, rota_opt,
shift_ra, shift_ra,
shift_dec, shift_dec,
get_chi2_radec(dsrc,dref), get_chi2_radec(dsrc,dref),
get_chi2_radec1(dsrc,dref), get_chi2_radec1(dsrc,dref),
chi2_radec-chi2_radec1_opt, chi2_radec-chi2_radec1_opt,
rms,get_rms_w(dsrc,dref), rms,get_rms_w(dsrc,dref),
rms1,get_rms1_w(dsrc,dref), rms1,get_rms1_w(dsrc,dref),
len(dsrc))) len(dsrc)))

View File

@ -6,7 +6,7 @@
""" """
Координаты сентрального кадра, к которому будут Координаты центрального кадра, к которому будут
приводиться изображения всех списков событий приводиться изображения всех списков событий
""" """
ra_cen=194.9601418 ra_cen=194.9601418

View File

@ -37,6 +37,7 @@ import shutil
from multiprocessing import Pool from multiprocessing import Pool
from coma.astrometry import * from coma.astrometry import *
from coma.config import ra_cen, de_cen, width from coma.config import ra_cen, de_cen, width
MJDREF = 51543.875 MJDREF = 51543.875
@ -107,7 +108,7 @@ def convert_dr12_to_erosita_flux(rec, field_prefix='SC_'):
def do_evtool_esass(evfile=None,events=None,outfile=None,evlist=None,rusky=None, def do_evtool_esass(evfile=None,events=None,outfile=None,evlist=None,rusky=None,
gti=None,region=None,emin=None,emax=None, rmlock=False, gti=None,region=None,emin=None,emax=None, rmlock=False,
do_center=False, width=1024, ra_cen=None, de_cen=None): do_center=False, width=1024, ra_cen=None, de_cen=None, local_run=True, cwd=None):
eventfiles=None eventfiles=None
if(events): if(events):
@ -149,10 +150,11 @@ def do_evtool_esass(evfile=None,events=None,outfile=None,evlist=None,rusky=None,
"pattern=15" "pattern=15"
] ]
print((" ").join(cmd)) #print((" ").join(cmd))
test_exe('evtool') #test_exe('evtool')
os.system((" ").join(cmd)) #os.system((" ").join(cmd))
runme(cmd, local_run=local_run, cwd=cwd)
if(rmlock==True): if(rmlock==True):
lockfiles=outfile.replace(".fits", "*.lock") lockfiles=outfile.replace(".fits", "*.lock")
for filename in glob.glob(lockfiles): for filename in glob.glob(lockfiles):
@ -244,7 +246,7 @@ def do_badpix_tm6(filename):
f[4].append(data4) f[4].append(data4)
f.close() f.close()
def init_events(key=None, eband_selected=[0], eband_index=None, def init_events(key=None, eband_selected=[0], eband_index=None,local_run=True,cwd=None,
ra_cen=None, de_cen=None, width=1024, do_init=True,vign=True, ra_cen=None, de_cen=None, width=1024, do_init=True,vign=True,
emin_kev=None, emax_kev=None, infile_dir=None, outfile_dir=None, emin_kev=None, emax_kev=None, infile_dir=None, outfile_dir=None,
do_obsmode=False,do_center=False,do_evtool=False,do_expmap=False,attcorr=False): do_obsmode=False,do_center=False,do_evtool=False,do_expmap=False,attcorr=False):
@ -328,9 +330,10 @@ def init_events(key=None, eband_selected=[0], eband_index=None,
# run the command # run the command
if(do_evtool==True and do_init==True): if(do_evtool==True and do_init==True):
#log = subprocess.check_call(cmd) #log = subprocess.check_call(cmd)
print((" ").join(cmd)) #print((" ").join(cmd))
test_exe('evtool') #test_exe('evtool')
os.system((" ").join(cmd)) #os.system((" ").join(cmd))
runme(cmd, local_run=local_run, cwd=cwd)
""" correct OBS_MODE """ """ correct OBS_MODE """
with fits.open(outfile_evtool) as hdu: with fits.open(outfile_evtool) as hdu:
for h in hdu: for h in hdu:
@ -465,23 +468,47 @@ def save_ds9reg(filename,scale=60):
for rec in tbdata: for rec in tbdata:
writer.write("fk5;circle({}, {}, {})\n".format(rec['ra'],rec['dec'],rec['radec_err']/scale)) writer.write("fk5;circle({}, {}, {})\n".format(rec['ra'],rec['dec'],rec['radec_err']/scale))
def save_ermldet_ds9reg(filename,scale=60,id_instr=1,id_band=1,ext_like=0.0,label='det_like',dl=10.0): def save_ermldet_ds9reg_id(filename,scale=60,id_instr=1,id_band=1,ext_like=0.0,dl=6.0,point=False):
if(os.path.isfile(filename)==True): if(os.path.isfile(filename)==True):
#fout=filename.replace(".fits", ".instr{}.{}.reg".format(id_instr,label)) fout=filename.replace(".fits", ".id.reg")
fout=filename.replace(".fits", ".{}.reg".format(label))
hdul = fits.open(filename) hdul = fits.open(filename)
tbdata = hdul[1].data tbdata = hdul[1].data
with open(fout, 'w') as writer: with open(fout, 'w') as writer:
for rec in tbdata: for rec in tbdata:
det_like=float(rec['det_like'])
id_src=int(rec['id_src'])
if not (rec['id_inst'] == id_instr and rec['id_band'] == id_band): if not (rec['id_inst'] == id_instr and rec['id_band'] == id_band):
continue continue
if (rec['det_like'] < dl): if (det_like < dl):
continue continue
#if (rec['ext_like'] > ext_like): #if (rec['ext_like'] > ext_like):
# continue # continue
if(abs(rec[label])>0.0):
writer.write("fk5;circle({}, {}, {}) # text={{{}}}\n".format(rec['ra'],rec['dec'],rec['radec_err']/scale,rec[label]))
if(point==False):
writer.write("fk5;circle({}, {}, {}) # text={{{}}}\n".format(rec['ra'],rec['dec'],rec['radec_err']/scale,id_src))
else:
writer.write("fk5;point({}, {}) # point=cross text={{{}}}\n".format(rec['ra'],rec['dec'],id_src))
def save_ermldet_ds9reg_dl(filename,scale=60,id_instr=1,id_band=1,ext_like=0.0,dl=6,point=False):
if(os.path.isfile(filename)==True):
fout=filename.replace(".fits", ".dl{}.reg".format(dl))
hdul = fits.open(filename)
tbdata = hdul[1].data
with open(fout, 'w') as writer:
for rec in tbdata:
det_like=float(rec['det_like'])
if not (rec['id_inst'] == id_instr and rec['id_band'] == id_band):
continue
if (det_like < float(dl)):
continue
#if (rec['ext_like'] > ext_like):
# continue
if(abs(det_like)>0.0):
if(point==False):
writer.write("fk5;circle({}, {}, {}) # text={{{:.2f}}}\n".format(rec['ra'],rec['dec'],rec['radec_err']/scale,det_like))
else:
writer.write("fk5;point({}, {}) # point=cross text={{{:.2f}}}\n".format(rec['ra'],rec['dec'],det_like))
def save_catprep_ds9reg(filename,scale=60,id_instr=1,id_band=1,ext_like=0.0,label='det_like_0'): def save_catprep_ds9reg(filename,scale=60,id_instr=1,id_band=1,ext_like=0.0,label='det_like_0'):
if(os.path.isfile(filename)==True): if(os.path.isfile(filename)==True):
#fout=filename.replace(".fits", ".instr{}.{}.reg".format(id_instr,label)) #fout=filename.replace(".fits", ".instr{}.{}.reg".format(id_instr,label))
@ -496,7 +523,7 @@ def save_catprep_ds9reg(filename,scale=60,id_instr=1,id_band=1,ext_like=0.0,labe
writer.write("fk5;circle({}, {}, {}) # text={{{:.1f}}}\n".format(rec['ra'],rec['dec'],rec['radec_err']/scale,rec[label])) writer.write("fk5;circle({}, {}, {}) # text={{{:.1f}}}\n".format(rec['ra'],rec['dec'],rec['radec_err']/scale,rec[label]))
def crossmatch_shu2019(filename,refimage=None,crval=None,devmax=30,dlmin=6.0,dlmax=10000,ext_like=0.0,outkey='shu2019', catalog=None, errlim=10.0, nside=1024, order='nested', datakey=None): def crossmatch_shu2019(filename,refimage=None,crval=None,devmax=15,dlmin=6.0,dlmax=10000,ext_like=0.0,outkey='shu2019', catalog=None, errlim=10.0, nside=1024, order='nested', datakey=None):
if(os.path.isfile(filename)==False): if(os.path.isfile(filename)==False):
print("File not found {}".format(filename)) print("File not found {}".format(filename))
print("Start cross-match with Gaia-unWISE") print("Start cross-match with Gaia-unWISE")
@ -543,6 +570,7 @@ def crossmatch_shu2019(filename,refimage=None,crval=None,devmax=30,dlmin=6.0,dlm
srcs=[] srcs=[]
for rec in tbdata_src: for rec in tbdata_src:
# exclude extended objects
if (rec['ext_like'] > ext_like): if (rec['ext_like'] > ext_like):
continue continue
@ -935,8 +963,8 @@ def wcs_astro_corr(catalog=None, Nsim=100, Rsim=10.0):
#log=catalog.replace(".fits", ".xfm.log") #log=catalog.replace(".fits", ".xfm.log")
#do_astro_corr_rota(src=src_list, ref=ref_list, catalog=catalog) #astro_corr_rota(src=src_list, ref=ref_list, catalog=catalog)
do_astro_corr(src=src_list, ref=ref_list, catalog=catalog, Nsim=Nsim, Rsim=Rsim) astro_corr(src=src_list, ref=ref_list, catalog=catalog, Nsim=Nsim, Rsim=Rsim)
""" """
def wcs_match_ciao(catalog=None, method='trans',radius=15,residlim=7,residfac=2.0,residtype=0): def wcs_match_ciao(catalog=None, method='trans',radius=15,residlim=7,residfac=2.0,residtype=0):
@ -1441,15 +1469,39 @@ def create_expmap_merged(expmaps,outfile,scale=7.0):
if 'merged_map' in locals() and 'merged_hdr' in locals(): if 'merged_map' in locals() and 'merged_hdr' in locals():
fits.writeto(outfile, merged_map, header=merged_hdr, overwrite=True) fits.writeto(outfile, merged_map, header=merged_hdr, overwrite=True)
def add_expmaps(expmaps,outfile,scale=1.0):
"""
Adds exposure from TMs. Header is taken from first exposure file.
"""
tmlist={}
for expmap in expmaps:
hdul = fits.open(expmap)
emap = hdul[0].data
ehdr = hdul[0].header
if 'merged_map' in locals():
merged_map = np.add(merged_map, emap)
else:
merged_map = emap
merged_hdr = ehdr
merged_hdr['INSTRUME']='merged'
merged_hdr['NINST']=len(tmlist)
merged_hdr['OBS_MODE']=' '
fits.writeto(outfile, merged_map, header=merged_hdr, overwrite=True)
def do_erbackmap_esass(image,expimage,boxlist,detmask,emin,emax,outfile_backmap,cheese_mask): def do_erbackmap_esass(image,expimage,boxlist,detmask,emin,emax,outfile_backmap,cheese_mask):
test_exe('erbackmap') test_exe('erbackmap')
cmd=["erbackmap", cmd=["erbackmap",
"image=%s" %(image), "image=\'{}\'".format(image),
"expimage=%s" %(expimage), "expimage=\'{}\'".format(expimage),
#"image=%s" %(image),
#"expimage=%s" %(expimage),
"boxlist={}".format(boxlist), "boxlist={}".format(boxlist),
"detmask=%s" %(detmask), "detmask=%s" %(detmask),
"emin=%s" %(emin), "emin=\'{}\'".format(emin),
"emax=%s" %(emax), "emax=\'{}\'".format(emax),
"bkgimage=%s" %(outfile_backmap), "bkgimage=%s" %(outfile_backmap),
"cheesemask=%s" %(cheese_mask), "cheesemask=%s" %(cheese_mask),
"idband=1", "idband=1",
@ -1465,13 +1517,14 @@ def do_erbackmap_esass(image,expimage,boxlist,detmask,emin,emax,outfile_backmap,
os.system((" ").join(cmd)) os.system((" ").join(cmd))
print((" ").join(cmd)) print((" ").join(cmd))
def filter_catprep(filename, expcut=100,dlmin=6.0,dlmax=10,scale=60*60,ext_like=0.0,outkey='main'): def filter_catprep(filename, expcut=100,dlmin=6.0,dlmax=10,scale=60*60,ext_like=5.0,outkey='main'):
if(os.path.isfile(filename)==False): if(os.path.isfile(filename)==False):
print("File not found {}".format(filename)) print("File not found {}".format(filename))
print("Filter catprep {}".format(filename)) print("Filter catprep {}".format(filename))
fout_selected=filename.replace(".fits", ".{}.selected.reg".format(outkey)) fout_selected=filename.replace(".fits", ".{}.reg".format(outkey))
fout_skip=filename.replace(".fits", ".{}.skip.reg".format(outkey)) fout_skip=filename.replace(".fits", ".{}.skip.reg".format(outkey))
fout_extended=filename.replace(".fits", ".extended.reg") fout_extended=filename.replace(".fits", ".ext.reg")
fout_extended_skip=filename.replace(".fits", ".ext_skip.reg")
hdul = fits.open(filename) hdul = fits.open(filename)
tbdata = hdul[1].data tbdata = hdul[1].data
@ -1479,24 +1532,32 @@ def filter_catprep(filename, expcut=100,dlmin=6.0,dlmax=10,scale=60*60,ext_like=
catsel=[] catsel=[]
catskip=[] catskip=[]
catext=[] catext=[]
catext_skip=[]
skip_count=0 skip_count=0
selected_count=0 selected_count=0
keepalive_count=0 keepalive_count=0
print("Print of extended sources:") print("Print of extended sources:")
for rec in tbdata: for rec in tbdata:
if (rec['ext_like'] > ext_like): if (rec['ext_like'] >= ext_like and rec['ext_bad'] == False):
catext.append({'ra':rec['ra'],'dec':rec['dec'],'radec_err':rec['radec_err'], catext.append({'ra':rec['ra'],'dec':rec['dec'],'radec_err':rec['radec_err'],
'det_like':rec['det_like_0'],'ext_like':rec['ext_like'], 'det_like':rec['det_like_0'],'ext_like':rec['ext_like'],
'ext':rec['ext'],'ext_err':rec['ext_err'], 'ext':rec['ext'],'ext_err':rec['ext_err'],
'src_id':rec['id_src']}) 'src_id':rec['id_src'],'ext_bad':rec['ext_bad']})
if (rec['ext_like'] >= ext_like and rec['ext_bad'] == True):
catext_skip.append({'ra':rec['ra'],'dec':rec['dec'],'radec_err':rec['radec_err'],
'det_like':rec['det_like_0'],'ext_like':rec['ext_like'],
'ext':rec['ext'],'ext_err':rec['ext_err'],
'src_id':rec['id_src'],'ext_bad':rec['ext_bad']})
"""
print("{:.2f} {} {} & {:.4f} & {:.4f} & {:.2f} & {:.2f} & {:.2f} $\\pm$ {:.2f} \\\\".format(rec['ra'],rec['id_src'], print("{:.2f} {} {} & {:.4f} & {:.4f} & {:.2f} & {:.2f} & {:.2f} $\\pm$ {:.2f} \\\\".format(rec['ra'],rec['id_src'],
make_source_name(rec['ra'], rec['dec']), rec['ra'], rec['dec'], make_source_name(rec['ra'], rec['dec']), rec['ra'], rec['dec'],
rec['det_like_0'], rec['ext_like'], rec['det_like_0'], rec['ext_like'],
rec['ext'], rec['ext_err'])) rec['ext'], rec['ext_err']))
"""
if ((rec['det_like_0'] > dlmin and rec['det_like_0'] < dlmax)): if ((rec['det_like_0'] > dlmin and rec['det_like_0'] < dlmax)):
catsel.append({'ra':rec['ra'],'dec':rec['dec'],'radec_err':rec['radec_err'],'det_like':rec['det_like_0'], catsel.append({'ra':rec['ra'],'dec':rec['dec'],'radec_err':rec['radec_err'],'det_like':rec['det_like_0'],
'src_id':rec['id_src']}) 'src_id':rec['id_src'],'owner':'RU' if rec['owner']==0 else 'DE'})
selected_count=selected_count + 1 selected_count=selected_count + 1
else: else:
catskip.append({'ra':rec['ra'],'dec':rec['dec'],'radec_err':rec['radec_err'],'det_like':rec['det_like_0'], catskip.append({'ra':rec['ra'],'dec':rec['dec'],'radec_err':rec['radec_err'],'det_like':rec['det_like_0'],
@ -1507,12 +1568,13 @@ def filter_catprep(filename, expcut=100,dlmin=6.0,dlmax=10,scale=60*60,ext_like=
with open(fout_selected, 'w') as writer: with open(fout_selected, 'w') as writer:
for rec in catsel: for rec in catsel:
radius = rec['radec_err']/scale if not (np.isnan(rec['radec_err'])) else 15/scale radius = rec['radec_err']/scale if not (np.isnan(rec['radec_err'])) else 15/scale
writer.write("fk5;circle({}, {}, {}) # color=white text={{{} {:.2f}}}\n".format(rec['ra'],rec['dec'],radius, writer.write("fk5;circle({}, {}, {}) # color=white text={{{} {:.1f}}}\n".format(rec['ra'],rec['dec'],radius,
rec['src_id'],rec['det_like'])) rec['src_id'],rec['det_like']))
#writer.write("fk5;circle({}, {}, {}) # color=white text={{{}}}\n".format(rec['ra'],rec['dec'],radius,rec['owner'],))
with open(fout_skip, 'w') as writer: with open(fout_skip, 'w') as writer:
for rec in catskip: for rec in catskip:
writer.write("fk5;circle({}, {}, {}) # color=red text={{{} {:.2f}}}\n".format(rec['ra'],rec['dec'],rec['radec_err']/scale, writer.write("fk5;circle({}, {}, {}) # color=red text={{{} {:.1f}}}\n".format(rec['ra'],rec['dec'],rec['radec_err']/scale,
rec['src_id'],rec['det_like'])) rec['src_id'],rec['det_like']))
with open(fout_extended, 'w') as writer: with open(fout_extended, 'w') as writer:
@ -1523,6 +1585,14 @@ def filter_catprep(filename, expcut=100,dlmin=6.0,dlmax=10,scale=60*60,ext_like=
rec['src_id'], rec['src_id'],
rec['det_like'], rec['det_like'],
rec['ext_like'])) rec['ext_like']))
with open(fout_extended_skip, 'w') as writer:
for rec in catext_skip:
writer.write("fk5;circle({}, {}, {}) # color=white text={{{} dl:{:.1f} el:{:.1f}}}\n".format(rec['ra'],
rec['dec'],
rec['ext']/scale,
rec['src_id'],
rec['det_like'],
rec['ext_like']))
def filter_mllist(filename, expcut=100,dlcut=6.0,dlmin=6.0,dlmax=10,scale=60*60,ext_like=0.0): def filter_mllist(filename, expcut=100,dlcut=6.0,dlmin=6.0,dlmax=10,scale=60*60,ext_like=0.0):
if(os.path.isfile(filename)==False): if(os.path.isfile(filename)==False):
print("File not found {}".format(filename)) print("File not found {}".format(filename))
@ -1670,7 +1740,7 @@ def check_ermldet_forced(infile):
hdul.close() hdul.close()
for rec in tbdata: for rec in tbdata:
idsrc=rec['ID_SRC'] idsrc = rec['ID_SRC']
boxid = rec['BOX_ID_SRC'] boxid = rec['BOX_ID_SRC']
if(idsrc != boxid): if(idsrc != boxid):
print("The ermldet catalog in forced mode should contain only unique sources.") print("The ermldet catalog in forced mode should contain only unique sources.")
@ -1801,18 +1871,18 @@ def read_forced_catalog(infile,sensmap=None):
return cat return cat
def make_euds_catalog(infile=None, rawcat=None, dlmin=6.0,dlmax=10,scale=60*60,ext_like=0.0,outkey='main', def make_coma_catalog(infile=None, rawcat=None, dlmin=6.0,dlmax=10,scale=60*60,ext_like=0.0,outkey='main',
emin=None, emax=None, eband=None, #ecf=1.0, emin=None, emax=None, eband=None, #ecf=1.0,
infile_en00cat=None, infile_en00cat=None,
infile_en01cat=None, infile_en01cat=None,
infile_en02cat=None, infile_en02cat=None,
infile_en03cat=None, infile_en03cat=None,
infile_en06cat=None, infile_en04cat=None,
infile_en00sens=None, infile_en00sens=None,
infile_en01sens=None, infile_en01sens=None,
infile_en02sens=None, infile_en02sens=None,
infile_en03sens=None, infile_en03sens=None,
infile_en06sens=None, infile_en04sens=None,
srcs_forced=None): srcs_forced=None):
if(os.path.isfile(infile)==False): if(os.path.isfile(infile)==False):
print("File not found {}".format(infile)) print("File not found {}".format(infile))
@ -1831,7 +1901,8 @@ def make_euds_catalog(infile=None, rawcat=None, dlmin=6.0,dlmax=10,scale=60*60,e
en01cat=read_forced_catalog(infile_en01cat,sensmap=infile_en01sens) en01cat=read_forced_catalog(infile_en01cat,sensmap=infile_en01sens)
en02cat=read_forced_catalog(infile_en02cat,sensmap=infile_en02sens) en02cat=read_forced_catalog(infile_en02cat,sensmap=infile_en02sens)
en03cat=read_forced_catalog(infile_en03cat,sensmap=infile_en03sens) en03cat=read_forced_catalog(infile_en03cat,sensmap=infile_en03sens)
en06cat=read_forced_catalog(infile_en06cat,sensmap=infile_en06sens) en04cat=read_forced_catalog(infile_en04cat,sensmap=infile_en04sens)
hdul = fits.open(infile) hdul = fits.open(infile)
tbdata = hdul[1].data tbdata = hdul[1].data
@ -1926,97 +1997,97 @@ def make_euds_catalog(infile=None, rawcat=None, dlmin=6.0,dlmax=10,scale=60*60,e
'en01_dl':en01cat[key]['det_like'] if(key in en01cat) else None, 'en01_dl':en01cat[key]['det_like'] if(key in en01cat) else None,
'en02_dl':en02cat[key]['det_like'] if(key in en02cat) else None, 'en02_dl':en02cat[key]['det_like'] if(key in en02cat) else None,
'en03_dl':en03cat[key]['det_like'] if(key in en03cat) else None, 'en03_dl':en03cat[key]['det_like'] if(key in en03cat) else None,
'en06_dl':en06cat[key]['det_like'] if(key in en06cat) else None, 'en04_dl':en04cat[key]['det_like'] if(key in en04cat) else None,
'en01_ml_rate':en01cat[key]['ml_rate'] if(key in en01cat) else None, 'en01_ml_rate':en01cat[key]['ml_rate'] if(key in en01cat) else None,
'en02_ml_rate':en02cat[key]['ml_rate'] if(key in en02cat) else None, 'en02_ml_rate':en02cat[key]['ml_rate'] if(key in en02cat) else None,
'en03_ml_rate':en03cat[key]['ml_rate'] if(key in en03cat) else None, 'en03_ml_rate':en03cat[key]['ml_rate'] if(key in en03cat) else None,
'en06_ml_rate':en06cat[key]['ml_rate'] if(key in en06cat) else None, 'en04_ml_rate':en04cat[key]['ml_rate'] if(key in en04cat) else None,
'en01_ml_rate_err':en01cat[key]['ml_rate_err'] if(key in en01cat) else None, 'en01_ml_rate_err':en01cat[key]['ml_rate_err'] if(key in en01cat) else None,
'en02_ml_rate_err':en02cat[key]['ml_rate_err'] if(key in en02cat) else None, 'en02_ml_rate_err':en02cat[key]['ml_rate_err'] if(key in en02cat) else None,
'en03_ml_rate_err':en03cat[key]['ml_rate_err'] if(key in en03cat) else None, 'en03_ml_rate_err':en03cat[key]['ml_rate_err'] if(key in en03cat) else None,
'en06_ml_rate_err':en06cat[key]['ml_rate_err'] if(key in en06cat) else None, 'en04_ml_rate_err':en04cat[key]['ml_rate_err'] if(key in en04cat) else None,
'en01_ml_rate_lowerr':en01cat[key]['ml_rate_lowerr'] if(key in en01cat) else None, 'en01_ml_rate_lowerr':en01cat[key]['ml_rate_lowerr'] if(key in en01cat) else None,
'en02_ml_rate_lowerr':en02cat[key]['ml_rate_lowerr'] if(key in en02cat) else None, 'en02_ml_rate_lowerr':en02cat[key]['ml_rate_lowerr'] if(key in en02cat) else None,
'en03_ml_rate_lowerr':en03cat[key]['ml_rate_lowerr'] if(key in en03cat) else None, 'en03_ml_rate_lowerr':en03cat[key]['ml_rate_lowerr'] if(key in en03cat) else None,
'en06_ml_rate_lowerr':en06cat[key]['ml_rate_lowerr'] if(key in en06cat) else None, 'en04_ml_rate_lowerr':en04cat[key]['ml_rate_lowerr'] if(key in en04cat) else None,
'en01_ml_rate_uperr':en01cat[key]['ml_rate_uperr'] if(key in en01cat) else None, 'en01_ml_rate_uperr':en01cat[key]['ml_rate_uperr'] if(key in en01cat) else None,
'en02_ml_rate_uperr':en02cat[key]['ml_rate_uperr'] if(key in en02cat) else None, 'en02_ml_rate_uperr':en02cat[key]['ml_rate_uperr'] if(key in en02cat) else None,
'en03_ml_rate_uperr':en03cat[key]['ml_rate_uperr'] if(key in en03cat) else None, 'en03_ml_rate_uperr':en03cat[key]['ml_rate_uperr'] if(key in en03cat) else None,
'en06_ml_rate_uperr':en06cat[key]['ml_rate_uperr'] if(key in en06cat) else None, 'en04_ml_rate_uperr':en04cat[key]['ml_rate_uperr'] if(key in en04cat) else None,
'en01_ml_exp':en01cat[key]['ml_exp'] if(key in en01cat) else None, 'en01_ml_exp':en01cat[key]['ml_exp'] if(key in en01cat) else None,
'en02_ml_exp':en02cat[key]['ml_exp'] if(key in en02cat) else None, 'en02_ml_exp':en02cat[key]['ml_exp'] if(key in en02cat) else None,
'en03_ml_exp':en03cat[key]['ml_exp'] if(key in en03cat) else None, 'en03_ml_exp':en03cat[key]['ml_exp'] if(key in en03cat) else None,
'en06_ml_exp':en06cat[key]['ml_exp'] if(key in en06cat) else None, 'en04_ml_exp':en04cat[key]['ml_exp'] if(key in en04cat) else None,
'en01_ml_bkg':en01cat[key]['ml_bkg'] if(key in en01cat) else None, 'en01_ml_bkg':en01cat[key]['ml_bkg'] if(key in en01cat) else None,
'en02_ml_bkg':en02cat[key]['ml_bkg'] if(key in en02cat) else None, 'en02_ml_bkg':en02cat[key]['ml_bkg'] if(key in en02cat) else None,
'en03_ml_bkg':en03cat[key]['ml_bkg'] if(key in en03cat) else None, 'en03_ml_bkg':en03cat[key]['ml_bkg'] if(key in en03cat) else None,
'en06_ml_bkg':en06cat[key]['ml_bkg'] if(key in en06cat) else None, 'en04_ml_bkg':en04cat[key]['ml_bkg'] if(key in en04cat) else None,
'en01_cts':en01cat[key]['ml_cts'] if(key in en01cat) else None, 'en01_cts':en01cat[key]['ml_cts'] if(key in en01cat) else None,
'en02_cts':en02cat[key]['ml_cts'] if(key in en02cat) else None, 'en02_cts':en02cat[key]['ml_cts'] if(key in en02cat) else None,
'en03_cts':en03cat[key]['ml_cts'] if(key in en03cat) else None, 'en03_cts':en03cat[key]['ml_cts'] if(key in en03cat) else None,
'en06_cts':en06cat[key]['ml_cts'] if(key in en06cat) else None, 'en04_cts':en04cat[key]['ml_cts'] if(key in en04cat) else None,
'en01_cts_err':en01cat[key]['ml_cts_err'] if(key in en01cat) else None, 'en01_cts_err':en01cat[key]['ml_cts_err'] if(key in en01cat) else None,
'en02_cts_err':en02cat[key]['ml_cts_err'] if(key in en02cat) else None, 'en02_cts_err':en02cat[key]['ml_cts_err'] if(key in en02cat) else None,
'en03_cts_err':en03cat[key]['ml_cts_err'] if(key in en03cat) else None, 'en03_cts_err':en03cat[key]['ml_cts_err'] if(key in en03cat) else None,
'en06_cts_err':en06cat[key]['ml_cts_err'] if(key in en06cat) else None, 'en04_cts_err':en04cat[key]['ml_cts_err'] if(key in en04cat) else None,
'en01_flux':en01cat[key]['ml_flux'] if(key in en01cat) else None, 'en01_flux':en01cat[key]['ml_flux'] if(key in en01cat) else None,
'en02_flux':en02cat[key]['ml_flux'] if(key in en02cat) else None, 'en02_flux':en02cat[key]['ml_flux'] if(key in en02cat) else None,
'en03_flux':en03cat[key]['ml_flux'] if(key in en03cat) else None, 'en03_flux':en03cat[key]['ml_flux'] if(key in en03cat) else None,
'en06_flux':en06cat[key]['ml_flux'] if(key in en06cat) else None, 'en04_flux':en04cat[key]['ml_flux'] if(key in en04cat) else None,
'en01_flux_err':en01cat[key]['ml_flux_err'] if(key in en01cat) else None, 'en01_flux_err':en01cat[key]['ml_flux_err'] if(key in en01cat) else None,
'en02_flux_err':en02cat[key]['ml_flux_err'] if(key in en02cat) else None, 'en02_flux_err':en02cat[key]['ml_flux_err'] if(key in en02cat) else None,
'en03_flux_err':en03cat[key]['ml_flux_err'] if(key in en03cat) else None, 'en03_flux_err':en03cat[key]['ml_flux_err'] if(key in en03cat) else None,
'en06_flux_err':en06cat[key]['ml_flux_err'] if(key in en06cat) else None, 'en04_flux_err':en04cat[key]['ml_flux_err'] if(key in en04cat) else None,
'en01_flux_lowerr':en01cat[key]['ml_flux_lowerr'] if(key in en01cat) else None, 'en01_flux_lowerr':en01cat[key]['ml_flux_lowerr'] if(key in en01cat) else None,
'en02_flux_lowerr':en02cat[key]['ml_flux_lowerr'] if(key in en02cat) else None, 'en02_flux_lowerr':en02cat[key]['ml_flux_lowerr'] if(key in en02cat) else None,
'en03_flux_lowerr':en03cat[key]['ml_flux_lowerr'] if(key in en03cat) else None, 'en03_flux_lowerr':en03cat[key]['ml_flux_lowerr'] if(key in en03cat) else None,
'en06_flux_lowerr':en06cat[key]['ml_flux_lowerr'] if(key in en06cat) else None, 'en04_flux_lowerr':en04cat[key]['ml_flux_lowerr'] if(key in en04cat) else None,
'en01_flux_uperr':en01cat[key]['ml_flux_uperr'] if(key in en01cat) else None, 'en01_flux_uperr':en01cat[key]['ml_flux_uperr'] if(key in en01cat) else None,
'en02_flux_uperr':en02cat[key]['ml_flux_uperr'] if(key in en02cat) else None, 'en02_flux_uperr':en02cat[key]['ml_flux_uperr'] if(key in en02cat) else None,
'en03_flux_uperr':en03cat[key]['ml_flux_uperr'] if(key in en03cat) else None, 'en03_flux_uperr':en03cat[key]['ml_flux_uperr'] if(key in en03cat) else None,
'en06_flux_uperr':en06cat[key]['ml_flux_uperr'] if(key in en06cat) else None, 'en04_flux_uperr':en04cat[key]['ml_flux_uperr'] if(key in en04cat) else None,
'en01_sens':en01cat[key]['sens'] if(key in en01cat) else None, 'en01_sens':en01cat[key]['sens'] if(key in en01cat) else None,
'en02_sens':en02cat[key]['sens'] if(key in en02cat) else None, 'en02_sens':en02cat[key]['sens'] if(key in en02cat) else None,
'en03_sens':en03cat[key]['sens'] if(key in en03cat) else None, 'en03_sens':en03cat[key]['sens'] if(key in en03cat) else None,
'en06_sens':en06cat[key]['sens'] if(key in en06cat) else None, 'en04_sens':en04cat[key]['sens'] if(key in en04cat) else None,
'en01_ape_cts':en01cat[key]['ape_cts'] if(key in en01cat) else None, 'en01_ape_cts':en01cat[key]['ape_cts'] if(key in en01cat) else None,
'en02_ape_cts':en02cat[key]['ape_cts'] if(key in en02cat) else None, 'en02_ape_cts':en02cat[key]['ape_cts'] if(key in en02cat) else None,
'en03_ape_cts':en03cat[key]['ape_cts'] if(key in en03cat) else None, 'en03_ape_cts':en03cat[key]['ape_cts'] if(key in en03cat) else None,
'en06_ape_cts':en06cat[key]['ape_cts'] if(key in en06cat) else None, 'en04_ape_cts':en04cat[key]['ape_cts'] if(key in en04cat) else None,
'en01_ape_exp':en01cat[key]['ape_exp'] if(key in en01cat) else None, 'en01_ape_exp':en01cat[key]['ape_exp'] if(key in en01cat) else None,
'en02_ape_exp':en02cat[key]['ape_exp'] if(key in en02cat) else None, 'en02_ape_exp':en02cat[key]['ape_exp'] if(key in en02cat) else None,
'en03_ape_exp':en03cat[key]['ape_exp'] if(key in en03cat) else None, 'en03_ape_exp':en03cat[key]['ape_exp'] if(key in en03cat) else None,
'en06_ape_exp':en06cat[key]['ape_exp'] if(key in en06cat) else None, 'en04_ape_exp':en04cat[key]['ape_exp'] if(key in en04cat) else None,
'en01_ape_bkg':en01cat[key]['ape_bkg'] if(key in en01cat) else None, 'en01_ape_bkg':en01cat[key]['ape_bkg'] if(key in en01cat) else None,
'en02_ape_bkg':en02cat[key]['ape_bkg'] if(key in en02cat) else None, 'en02_ape_bkg':en02cat[key]['ape_bkg'] if(key in en02cat) else None,
'en03_ape_bkg':en03cat[key]['ape_bkg'] if(key in en03cat) else None, 'en03_ape_bkg':en03cat[key]['ape_bkg'] if(key in en03cat) else None,
'en06_ape_bkg':en06cat[key]['ape_bkg'] if(key in en06cat) else None, 'en04_ape_bkg':en04cat[key]['ape_bkg'] if(key in en04cat) else None,
'en01_ape_radius':en01cat[key]['ape_radius'] if(key in en01cat) else None, 'en01_ape_radius':en01cat[key]['ape_radius'] if(key in en01cat) else None,
'en02_ape_radius':en02cat[key]['ape_radius'] if(key in en02cat) else None, 'en02_ape_radius':en02cat[key]['ape_radius'] if(key in en02cat) else None,
'en03_ape_radius':en03cat[key]['ape_radius'] if(key in en03cat) else None, 'en03_ape_radius':en03cat[key]['ape_radius'] if(key in en03cat) else None,
'en06_ape_radius':en06cat[key]['ape_radius'] if(key in en06cat) else None, 'en04_ape_radius':en04cat[key]['ape_radius'] if(key in en04cat) else None,
'en01_ape_pois':en01cat[key]['ape_pois'] if(key in en01cat) else None, 'en01_ape_pois':en01cat[key]['ape_pois'] if(key in en01cat) else None,
'en02_ape_pois':en02cat[key]['ape_pois'] if(key in en02cat) else None, 'en02_ape_pois':en02cat[key]['ape_pois'] if(key in en02cat) else None,
'en03_ape_pois':en03cat[key]['ape_pois'] if(key in en03cat) else None, 'en03_ape_pois':en03cat[key]['ape_pois'] if(key in en03cat) else None,
'en06_ape_pois':en06cat[key]['ape_pois'] if(key in en06cat) else None, 'en04_ape_pois':en04cat[key]['ape_pois'] if(key in en04cat) else None,
'forced_name':forced_name 'forced_name':forced_name
}, },
) )
@ -2044,8 +2115,8 @@ def make_euds_catalog(infile=None, rawcat=None, dlmin=6.0,dlmax=10,scale=60*60,e
'en03_dl','en03_ml_rate','en03_ml_rate_err','en03_ml_rate_lowerr','en03_ml_rate_uperr','en03_cts','en03_cts_err','en03_ml_exp','en03_ml_bkg', 'en03_dl','en03_ml_rate','en03_ml_rate_err','en03_ml_rate_lowerr','en03_ml_rate_uperr','en03_cts','en03_cts_err','en03_ml_exp','en03_ml_bkg',
'en03_flux','en03_flux_err','en03_flux_lowerr','en03_flux_uperr','en03_sens','en03_ape_cts','en03_ape_exp','en03_ape_bkg','en03_ape_radius','en03_ape_pois', 'en03_flux','en03_flux_err','en03_flux_lowerr','en03_flux_uperr','en03_sens','en03_ape_cts','en03_ape_exp','en03_ape_bkg','en03_ape_radius','en03_ape_pois',
'en06_dl','en06_ml_rate','en06_ml_rate_err','en06_ml_rate_lowerr','en06_ml_rate_uperr','en06_cts','en06_cts_err','en06_ml_exp','en06_ml_bkg', 'en04_dl','en04_ml_rate','en04_ml_rate_err','en04_ml_rate_lowerr','en04_ml_rate_uperr','en04_cts','en04_cts_err','en04_ml_exp','en04_ml_bkg',
'en06_flux','en06_flux_err','en06_flux_lowerr','en06_flux_uperr','en06_sens','en06_ape_cts','en06_ape_exp','en06_ape_bkg','en06_ape_radius','en06_ape_pois',] 'en04_flux','en04_flux_err','en04_flux_lowerr','en04_flux_uperr','en04_sens','en04_ape_cts','en04_ape_exp','en04_ape_bkg','en04_ape_radius','en04_ape_pois',]
writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader() writer.writeheader()
for rec in catsel: for rec in catsel:
@ -2054,7 +2125,7 @@ def make_euds_catalog(infile=None, rawcat=None, dlmin=6.0,dlmax=10,scale=60*60,e
with open(rawcat, 'wb') as f: with open(rawcat, 'wb') as f:
pickle.dump(catsel, f) pickle.dump(catsel, f)
def final_euds_catalog(infile=None,outfile_fits=None,expcut=100): def final_coma_catalog(infile=None,outfile_fits=None,expcut=100):
with open(infile, 'rb') as f: with open(infile, 'rb') as f:
table = pickle.load(f) table = pickle.load(f)
@ -2207,19 +2278,19 @@ def final_euds_catalog(infile=None,outfile_fits=None,expcut=100):
det_like_1.append(rec['en01_dl']) det_like_1.append(rec['en01_dl'])
det_like_2.append(rec['en02_dl']) det_like_2.append(rec['en02_dl'])
det_like_3.append(rec['en03_dl']) det_like_3.append(rec['en03_dl'])
det_like_4.append(rec['en06_dl']) det_like_4.append(rec['en04_dl'])
ml_rate_0.append(rec['ml_rate']) ml_rate_0.append(rec['ml_rate'])
ml_rate_1.append(rec['en01_ml_rate']) ml_rate_1.append(rec['en01_ml_rate'])
ml_rate_2.append(rec['en02_ml_rate']) ml_rate_2.append(rec['en02_ml_rate'])
ml_rate_3.append(rec['en03_ml_rate']) ml_rate_3.append(rec['en03_ml_rate'])
ml_rate_4.append(rec['en06_ml_rate']) ml_rate_4.append(rec['en04_ml_rate'])
ml_rate_err_0.append(rec['ml_rate_err']) ml_rate_err_0.append(rec['ml_rate_err'])
ml_rate_err_1.append(rec['en01_ml_rate_err']) ml_rate_err_1.append(rec['en01_ml_rate_err'])
ml_rate_err_2.append(rec['en02_ml_rate_err']) ml_rate_err_2.append(rec['en02_ml_rate_err'])
ml_rate_err_3.append(rec['en03_ml_rate_err']) ml_rate_err_3.append(rec['en03_ml_rate_err'])
ml_rate_err_4.append(rec['en06_ml_rate_err']) ml_rate_err_4.append(rec['en04_ml_rate_err'])
#ml_rate_lowerr_0.append(rec['ml_rate_lowerr']) #ml_rate_lowerr_0.append(rec['ml_rate_lowerr'])
#ml_rate_lowerr_1.append(rec['en01_ml_rate_lowerr']) #ml_rate_lowerr_1.append(rec['en01_ml_rate_lowerr'])
@ -2238,69 +2309,69 @@ def final_euds_catalog(infile=None,outfile_fits=None,expcut=100):
ml_cts_1.append(rec['en01_cts']) ml_cts_1.append(rec['en01_cts'])
ml_cts_2.append(rec['en02_cts']) ml_cts_2.append(rec['en02_cts'])
ml_cts_3.append(rec['en03_cts']) ml_cts_3.append(rec['en03_cts'])
ml_cts_4.append(rec['en06_cts']) ml_cts_4.append(rec['en04_cts'])
ml_cts_err_0.append(rec['ml_cts_err']) ml_cts_err_0.append(rec['ml_cts_err'])
ml_cts_err_1.append(rec['en01_cts_err']) ml_cts_err_1.append(rec['en01_cts_err'])
ml_cts_err_2.append(rec['en02_cts_err']) ml_cts_err_2.append(rec['en02_cts_err'])
ml_cts_err_3.append(rec['en03_cts_err']) ml_cts_err_3.append(rec['en03_cts_err'])
ml_cts_err_4.append(rec['en06_cts_err']) ml_cts_err_4.append(rec['en04_cts_err'])
ml_flux_0.append(rec['ml_flux']) ml_flux_0.append(rec['ml_flux'])
ml_flux_1.append(rec['en01_flux']) ml_flux_1.append(rec['en01_flux'])
ml_flux_2.append(rec['en02_flux']) ml_flux_2.append(rec['en02_flux'])
ml_flux_3.append(rec['en03_flux']) ml_flux_3.append(rec['en03_flux'])
ml_flux_4.append(rec['en06_flux']) ml_flux_4.append(rec['en04_flux'])
ml_flux_err_0.append(rec['ml_flux_err']) ml_flux_err_0.append(rec['ml_flux_err'])
ml_flux_err_1.append(rec['en01_flux_err']) ml_flux_err_1.append(rec['en01_flux_err'])
ml_flux_err_2.append(rec['en02_flux_err']) ml_flux_err_2.append(rec['en02_flux_err'])
ml_flux_err_3.append(rec['en03_flux_err']) ml_flux_err_3.append(rec['en03_flux_err'])
ml_flux_err_4.append(rec['en06_flux_err']) ml_flux_err_4.append(rec['en04_flux_err'])
ml_exp_0.append(rec['ml_exp']) ml_exp_0.append(rec['ml_exp'])
ml_exp_1.append(rec['en01_ml_exp']) ml_exp_1.append(rec['en01_ml_exp'])
ml_exp_2.append(rec['en02_ml_exp']) ml_exp_2.append(rec['en02_ml_exp'])
ml_exp_3.append(rec['en03_ml_exp']) ml_exp_3.append(rec['en03_ml_exp'])
ml_exp_4.append(rec['en06_ml_exp']) ml_exp_4.append(rec['en04_ml_exp'])
ml_bkg_0.append(rec['ml_bkg']) ml_bkg_0.append(rec['ml_bkg'])
ml_bkg_1.append(rec['en01_ml_bkg']) ml_bkg_1.append(rec['en01_ml_bkg'])
ml_bkg_2.append(rec['en02_ml_bkg']) ml_bkg_2.append(rec['en02_ml_bkg'])
ml_bkg_3.append(rec['en03_ml_bkg']) ml_bkg_3.append(rec['en03_ml_bkg'])
ml_bkg_4.append(rec['en06_ml_bkg']) ml_bkg_4.append(rec['en04_ml_bkg'])
ape_cts_0.append(rec['ape_cts']) ape_cts_0.append(rec['ape_cts'])
ape_cts_1.append(rec['en01_ape_cts']) ape_cts_1.append(rec['en01_ape_cts'])
ape_cts_2.append(rec['en02_ape_cts']) ape_cts_2.append(rec['en02_ape_cts'])
ape_cts_3.append(rec['en03_ape_cts']) ape_cts_3.append(rec['en03_ape_cts'])
ape_cts_4.append(rec['en06_ape_cts']) ape_cts_4.append(rec['en04_ape_cts'])
ape_exp_0.append(rec['ape_exp']) ape_exp_0.append(rec['ape_exp'])
ape_exp_1.append(rec['en01_ape_exp']) ape_exp_1.append(rec['en01_ape_exp'])
ape_exp_2.append(rec['en02_ape_exp']) ape_exp_2.append(rec['en02_ape_exp'])
ape_exp_3.append(rec['en03_ape_exp']) ape_exp_3.append(rec['en03_ape_exp'])
ape_exp_4.append(rec['en06_ape_exp']) ape_exp_4.append(rec['en04_ape_exp'])
ape_bkg_0.append(rec['ape_bkg']) ape_bkg_0.append(rec['ape_bkg'])
ape_bkg_1.append(rec['en01_ape_bkg']) ape_bkg_1.append(rec['en01_ape_bkg'])
ape_bkg_2.append(rec['en02_ape_bkg']) ape_bkg_2.append(rec['en02_ape_bkg'])
ape_bkg_3.append(rec['en03_ape_bkg']) ape_bkg_3.append(rec['en03_ape_bkg'])
ape_bkg_4.append(rec['en06_ape_bkg']) ape_bkg_4.append(rec['en04_ape_bkg'])
ape_rad_0.append(rec['ape_radius']) ape_rad_0.append(rec['ape_radius'])
ape_rad_1.append(rec['en01_ape_radius']) ape_rad_1.append(rec['en01_ape_radius'])
ape_rad_2.append(rec['en02_ape_radius']) ape_rad_2.append(rec['en02_ape_radius'])
ape_rad_3.append(rec['en03_ape_radius']) ape_rad_3.append(rec['en03_ape_radius'])
ape_rad_4.append(rec['en06_ape_radius']) ape_rad_4.append(rec['en04_ape_radius'])
ape_pois_0.append(rec['ape_pois'] if(rec['ape_pois']>0.0) else None) ape_pois_0.append(rec['ape_pois'] if(rec['ape_pois']>0.0) else None)
ape_pois_1.append(rec['en01_ape_pois'] if(rec['en01_ape_pois']>0.0) else None) ape_pois_1.append(rec['en01_ape_pois'] if(rec['en01_ape_pois']>0.0) else None)
ape_pois_2.append(rec['en02_ape_pois'] if(rec['en02_ape_pois']>0.0) else None) ape_pois_2.append(rec['en02_ape_pois'] if(rec['en02_ape_pois']>0.0) else None)
ape_pois_3.append(rec['en03_ape_pois'] if(rec['en03_ape_pois']>0.0) else None) ape_pois_3.append(rec['en03_ape_pois'] if(rec['en03_ape_pois']>0.0) else None)
ape_pois_4.append(rec['en06_ape_pois'] if(rec['en06_ape_pois']>0.0) else None) ape_pois_4.append(rec['en04_ape_pois'] if(rec['en04_ape_pois']>0.0) else None)
print("Ready to write {} rows".format(count)) print("Ready to write {} rows".format(count))
@ -2522,22 +2593,32 @@ def add_specific_columns(infile):
ermldet/FGET_MLLIST: **ERROR2** column DEC_LOWERR not found ermldet/FGET_MLLIST: **ERROR2** column DEC_LOWERR not found
ermldet/FGET_MLLIST: **ERROR2** column DEC_UPERR not found ermldet/FGET_MLLIST: **ERROR2** column DEC_UPERR not found
""" """
def make_final_ds9reg(infile=None, outreg=None, scale=3600): def make_final_ds9reg(infile=None, outreg=None, scale=3600, noname=False):
with open(infile, 'rb') as f: with open(infile, 'rb') as f:
data = pickle.load(f) data = pickle.load(f)
print() print()
with open(outreg, 'w') as writer: with open(outreg, 'w') as writer:
for rec in data: for rec in data:
title=''
if(np.isnan(float(rec['radec_err']))):
radec_err=0.001
title+='nan'
else:
radec_err=float(rec['radec_err'])
if(rec['forced_name'] != None): if(rec['forced_name'] != None):
notes=rec['forced_name'] notes=rec['forced_name']
else: else:
notes='' notes=''
if(noname==False):
writer.write("fk5;circle({}, {}, {}) # color=red text={{{} {}}}\n".format(rec['ra'], rec['dec'], title+=" {} {}".format(rec['name'],rec['src_id'])
rec['radec_err']/scale, writer.write("fk5;circle({}, {}, {}) # color=red text={{{}}}\n".format(rec['ra'], rec['dec'],
rec['name'],notes)) radec_err/scale,title))
else:
title+=" {}".format(rec['src_id'])
writer.write("fk5;point({}, {}) # color=red point=cross text={{{}}}\n".format(rec['ra'], rec['dec'],title))
def make_extended(infile=None, elmin=5.0, outreg=None, scale=3600): def make_extended(infile=None, elmin=5.0, outreg=None, scale=3600):
@ -3878,3 +3959,99 @@ def make_euds_cosmatch(infile=None,outfile=None):
t.write(outfile, format='fits', overwrite=True) t.write(outfile, format='fits', overwrite=True)
def make_ext_bad(infile=None, ext_like_cut=5.0, col_name='EXT_BAD', bkg_map=None, bkg_cut=1.05, exception=None, bright_dist=160, bright_cut=1E-12):
print("Reading {}".format(infile))
t = Table.read(infile)
cols = t.colnames
#print(cols)
ext_col=t['EXT']
ext_bad=np.array([False]*len(ext_col), dtype=bool)
col_bad = Column(name=col_name, description="EXT BAD", data=ext_bad)
# Part 1, not defined parameters of extention
index = (t.mask['EXT_ERR'] | t.mask['EXT_LOWERR'] | t.mask['EXT_UPERR'])
ext_bad[index]=True
ext_like_count=0
ext_bad_count=0
# print statistics
for idx in range(len(t['EXT'])):
ext_like=float(t['EXT_LIKE'][idx])
if(ext_like >= ext_like_cut):
ext_like_count+=1
if(ext_bad[idx]==True):
ext_bad_count+=1
print("EXT_LIKE > {}: {} BAD: {}".format(ext_like_cut,ext_like_count,ext_bad_count))
# Part 2, Coma region
ext_bkg_count=0
if(bkg_map):
print("reading Background Map {}".format(bkg_map))
hdul = fits.open(bkg_map)
bkg = hdul[0].data
hdr = hdul[0].header
hdul.close()
wcs = WCS(hdr)
for idx in range(len(t['EXT'])):
ext_like=t['EXT_LIKE'][idx]
if(ext_like < ext_like_cut or ext_bad[idx]==True):
continue
if(t['ID_SRC'][idx] in exception):
continue
ra=t['RA'][idx]
dec=t['DEC'][idx]
crd = SkyCoord(ra, dec, frame=FK5(), unit="deg")
pix = wcs.wcs_world2pix([(ra, dec),], 1)
px=round(pix[0][0])-1
py=round(pix[0][1])-1
# mark all extendede sources in Coma as BAD:
if(bkg[py,px] > bkg_cut):
ext_bad[idx]=True
ext_bkg_count+=1
print("Coma region BAD: {}".format(ext_bkg_count))
# Part 3, around bright sources
ext_bright_count=0
bright_ra=[]
bright_dec=[]
for idx in range(len(t['EXT'])):
flux=t['ML_FLUX_0'][idx]
if(flux>bright_cut):
print("Bright {}: {}".format(t['ID_SRC'][idx],flux))
bright_ra.append(float(t['RA'][idx]))
bright_dec.append(float(t['DEC'][idx]))
for idx in range(len(t['EXT'])):
ext_like=float(t['EXT_LIKE'][idx])
if(ext_like<ext_like_cut or ext_bad[idx]==True):
continue
ra=t['RA'][idx]
dec=t['DEC'][idx]
ext_crd = SkyCoord(ra, dec, frame=FK5(), unit="deg")
for ii in range(len(bright_ra)):
crd = SkyCoord(bright_ra[ii], bright_dec[ii], frame=FK5(), unit="deg")
sep = crd.separation(ext_crd).arcsec
if(sep<bright_dist):
ext_bad[idx]=True
print("Bright exclude: id={}".format(t['ID_SRC'][idx]))
ext_bright_count+=1
print("Bright sources BAD: {}".format(ext_bright_count))
# write EXT_BAD column
if(col_name in cols):
t.replace_column(col_name,ext_bad)
else:
t.add_column(col_bad)
t.write(infile, format='fits', overwrite=True)

File diff suppressed because one or more lines are too long

View File

@ -2,6 +2,7 @@ from astropy.io import fits
import sys import sys
filename='Gaia_unWISE_Coma.fits.catalog' filename='Gaia_unWISE_Coma.fits.catalog'
#filename='Gaia_unWISE_Coma.footprint.fits.catalog'
fout=filename.replace(".fits.catalog", ".reg") fout=filename.replace(".fits.catalog", ".reg")
hdul = fits.open(filename) hdul = fits.open(filename)

View File

@ -0,0 +1,66 @@
from astropy.io import fits
import sys
from astropy.wcs import WCS
from astropy import wcs
from astropy.table import QTable, Table, Column
from astropy.coordinates import SkyCoord # High-level coordinates
from astropy.coordinates import ICRS, Galactic, FK4, FK5 # Low-level frames
from astropy.coordinates import Angle, Latitude, Longitude # Angles
import astropy.units as u
filename='Gaia_unWISE_Coma.fits.catalog'
fout=filename.replace(".fits.catalog", ".footprint.reg")
hdul = fits.open(filename)
#hdul.info()
tbdata = hdul[1].data
hdul.close()
detmask='../../products/mosa_parts_tm0_DetectorMask_en00.fits'
print("reading sensmap {}".format(detmask))
hdul = fits.open(detmask)
mask = hdul[0].data
hdr = hdul[0].header
hdul.close()
wcs = WCS(hdr)
tab = Table.read(filename, format='fits')
tbr=[]
for idx in range(len(tab['RA'])):
ra=tab['RA'][idx]
dec=tab['DEC'][idx]
crd = SkyCoord(ra, dec, frame=FK5(), unit="deg")
pix = wcs.wcs_world2pix([(ra, dec),], 1)
px=round(pix[0][0])-1
py=round(pix[0][1])-1
if not (px >=0 and py >= 0 and px <= (10000-1) and py < (10000-1)):
tbr.append(idx)
continue
if mask[py,px] == 0:
tbr.append(idx)
tab.remove_rows(tbr)
tab.write("Gaia_unWISE_Coma.footprint.fits.catalog", format='fits', overwrite='True')
with open("./{}".format(fout), 'w') as writer:
for rec in tbdata:
ra=rec['RA']
dec=rec['DEC']
crd = SkyCoord(ra, dec, frame=FK5(), unit="deg")
pix = wcs.wcs_world2pix([(ra, dec),], 1)
px=round(pix[0][0])-1
py=round(pix[0][1])-1
if not (px >=0 and py >= 0 and px <= (10000-1) and py < (10000-1)):
continue
if mask[py,px] == 1:
print("fk5;circle({}, {}, 0.008)".format(rec['RA'],rec['DEC']))
writer.write("fk5;circle({}, {}, {})\n".format(rec['RA'],rec['DEC'],0.0080000))

View File

@ -57,12 +57,16 @@ from coma.config import *
root_path=dirname(dirname(dirname(inspect.getfile(coma)))) root_path=dirname(dirname(dirname(inspect.getfile(coma))))
print("Coma root path: {}".format(root_path)) print("Coma root path: {}".format(root_path))
cwd = os.path.dirname(os.path.realpath(__file__))
infile_dir=root_path+'/data/processed' infile_dir=root_path+'/data/processed'
outfile_dir=root_path+'/products' outfile_dir=root_path+'/products'
create_folder(outfile_dir) create_folder(outfile_dir)
local_run=False
run_Pool=False run_Pool=False
keylist=keylist_survey keylist=keylist_parts
do_init = False do_init = False
do_ermask = False do_ermask = False
@ -81,10 +85,8 @@ do_cross_match = False
do_astro_corr = False # search optimal shift do_astro_corr = False # search optimal shift
do_astro_update = True do_astro_update = True
do_wcs_match = False # Chandra task -- DEPRECATED eband_selected=[1,2,3,4,5,6,7]
do_wcs_update = False # Chandra task -- DEPRECATED #eband_selected=[0,]
eband_selected=[0]
vign=False vign=False
vignetting = 'vign' if (vign==True) else 'novign' vignetting = 'vign' if (vign==True) else 'novign'
@ -116,11 +118,12 @@ def runme(datakey):
outfile_evtool, outfile_expmap = init_events(key=datakey, eband_index=eband[index], outfile_evtool, outfile_expmap = init_events(key=datakey, eband_index=eband[index],
infile_dir=infile_dir, infile_dir=infile_dir,
outfile_dir=outfile_dir, outfile_dir=outfile_dir,
local_run=local_run, cwd=cwd,
do_init=do_init, do_init=do_init,
do_obsmode=True, do_obsmode=True,
do_center=True, do_center=True,
do_evtool=True, do_evtool=True,
do_expmap=True, do_expmap=False,
vign=vign, vign=vign,
ra_cen=ra_cen, de_cen=de_cen, width=width, ra_cen=ra_cen, de_cen=de_cen, width=width,
emin_kev=emin_kev[index], emin_kev=emin_kev[index],
@ -373,32 +376,27 @@ def runme(datakey):
save_catprep_ds9reg(catprep,scale=60*60) save_catprep_ds9reg(catprep,scale=60*60)
if(do_cross_match==True): if(do_cross_match==True):
crossmatch_shu2019(catprep, dlmin=10,crval=[ra_cen, de_cen], refimage=events[ii],datakey=datakey, # full catalog, all RU sky
catalog=root_path+"/data/Gaia_unWISE/Gaia_unWISE_Coma.fits.catalog",errlim=5.0) cross_catalog=root_path+"/data/Gaia_unWISE/Gaia_unWISE_Coma.fits.catalog"
# Coma scans footprint
#cross_catalog=root_path+"/data/Gaia_unWISE/Gaia_unWISE_Coma.footprint.fits.catalog"
crossmatch_shu2019(catprep, dlmin=15,crval=[ra_cen, de_cen], refimage=events[ii],datakey=datakey,devmax=15,
catalog=cross_catalog,errlim=5.0)
if(do_astro_corr==True and eband[index]==0): if(do_astro_corr==True and eband[index]==0):
""" run astro_corr for 0.3-2.3 keV only """ """ run astro_corr for 0.3-2.3 keV only """
print("START wcs_astro_corr") print("START wcs_astro_corr")
wcs_astro_corr(catprep, Nsim=20000, Rsim=10.0) wcs_astro_corr(catprep, Nsim=5000, Rsim=10.0)
#wcs_match_ciao(catprep, method='rst',radius=12,residlim=0,residtype=0,residfac=1) #wcs_match_ciao(catprep, method='rst',radius=12,residlim=0,residtype=0,residfac=1)
if(do_astro_update==True): if(do_astro_update==True):
""" run astro_corr for 0.3-2.3 keV only """ """ run astro_corr for 0.3-2.3 keV only """
attcorr=wcs_update_shift(events[ii],flog=catprep_en0.replace(".fits", ".shift.log")) attcorr=wcs_update_shift(events[ii],flog=catprep_en0.replace(".fits", ".shift.log"))
do_evtool_esass(evfile=attcorr,outfile=attcorr,rmlock=False, do_center=True, ra_cen=ra_cen, de_cen=de_cen, width=width) do_evtool_esass(evfile=attcorr,outfile=attcorr,rmlock=False, do_center=True, ra_cen=ra_cen, de_cen=de_cen, width=width, local_run=local_run, cwd=cwd)
if(do_wcs_match==True and eband[index] == 0):
""" run wcs_match for 0.3-2.3 keV only """
wcs_match_ciao(catprep, method='trans',radius=12,residlim=5)
#wcs_match_ciao(catprep, method='rst',radius=12,residlim=0,residtype=0,residfac=1)
if(do_wcs_update==True):
""" use 0.3-2.3 keV transform matrix for all other bands """
attcorr=wcs_update_ciao(events[ii],crval=wcslist[datakey],transformfile=catprep_en0.replace(".fits", ".xfm"),clean=False)
do_evtool_esass(evfile=attcorr,outfile=attcorr,rmlock=False, do_center=True, ra_cen=ra_cen, de_cen=de_cen, width=width)
""" """

View File

@ -72,11 +72,11 @@ infile_dir=root_path+'/data/processed'
outfile_dir=root_path+'/products' outfile_dir=root_path+'/products'
create_folder(outfile_dir) create_folder(outfile_dir)
local_run = True local_run = False
do_init = False do_init = False
do_merge = False do_merge = True
do_detmask = False do_detmask = False
do_circles = False # experimental do_circles = False # experimental
do_expmap = False do_expmap = False
@ -96,11 +96,12 @@ do_resid = False # residuals of data and source map
do_fixcat = False # only for index=0 do_fixcat = False # only for index=0
do_fixxmm = False # prepare forced photometry, only for index=0 do_fixxmm = False # prepare forced photometry, only for index=0
do_apetool = False do_apetool = False
do_catprep = True do_catprep = False
do_filter_catalog = True do_ext_bad = False
do_filter_catalog = False
do_cross_match = False do_cross_match = False
index=0 index=4
forced=False forced=False
""" If forced=True, take input catalog from energy range en0 """ """ If forced=True, take input catalog from energy range en0 """
@ -111,8 +112,8 @@ comm='' # for 4XMM-DR12 forced photometry use '-xmm'
vign=True vign=True
attcorr=True attcorr=True
rusky=False rusky=False
keylist = keylist_parts keylist = keylist_parts
outkey = "mosa_parts_tm0{}".format('_attcorr' if (attcorr==True) else '') outkey = "mosa_parts_tm0{}".format('_attcorr' if (attcorr==True) else '')
""" """
@ -122,8 +123,16 @@ mosa_partII_tm0 -- partII only (as is) (keylist_partII)
mosa_parts_tm0 -- all parts (partI and partII) (keylist_parts) mosa_parts_tm0 -- all parts (partI and partII) (keylist_parts)
mosa_all_tm0 -- all scans (partI-partII) and survey data (keylist_all) mosa_all_tm0 -- all scans (partI-partII) and survey data (keylist_all)
mosa_tms_tm0 -- all scans (scan1-7) and survey data (keylist_tms) mosa_tms_tm0 -- all scans (scan1-7) and survey data (keylist_tms)
mosa_survey -- survey data (keylist_survey)
""" """
extra_events=None
extra_expmap=None
#extra_events="/data/erosita/work/coma/products/mosa_survey_tm0_attcorr_EventList_en00.fits"
#extra_expmap="/data/erosita/work/coma/products/mosa_survey_tm0_attcorr_ExposureMap_en00.vign.fits"
vignetting = 'vign' if (vign==True) else 'novign' vignetting = 'vign' if (vign==True) else 'novign'
events=[] events=[]
expmaps=[] expmaps=[]
@ -137,11 +146,12 @@ for tmkey in keylist.keys():
eband_index=eband[index], eband_index=eband[index],
infile_dir=infile_dir, infile_dir=infile_dir,
outfile_dir=outfile_dir, outfile_dir=outfile_dir,
local_run=local_run, cwd=cwd,
do_init=do_init, do_init=do_init,
do_obsmode=True, do_obsmode=True,
do_center=True, do_center=True,
do_evtool=True, do_evtool=True,
do_expmap=False, do_expmap=True,
vign=vign, vign=vign,
ra_cen=ra_cen, de_cen=de_cen, width=width, ra_cen=ra_cen, de_cen=de_cen, width=width,
emin_kev=emin_kev[index], emin_kev=emin_kev[index],
@ -154,6 +164,9 @@ for tmkey in keylist.keys():
outfile_evtool="{}_EventList_en{:02d}.fits".format(os.path.join(outfile_dir,outkey), outfile_evtool="{}_EventList_en{:02d}.fits".format(os.path.join(outfile_dir,outkey),
eband[index]) eband[index])
if (extra_events):
outfile_evtool="{} {}".format(outfile_evtool,extra_events)
if(do_merge==True): if(do_merge==True):
#do_evtool_esass(events=events, outfile=outfile_evtool) #do_evtool_esass(events=events, outfile=outfile_evtool)
evlist="{}.evlist.txt".format(os.getpid()) evlist="{}.evlist.txt".format(os.getpid())
@ -163,7 +176,7 @@ if(do_merge==True):
f.close() f.close()
print(outfile_evtool) print(outfile_evtool)
do_check_events(events=events) do_check_events(events=events)
do_evtool_esass(evlist=evlist, outfile=outfile_evtool, width=width, rusky=rusky) do_evtool_esass(evlist=evlist, outfile=outfile_evtool, width=width, rusky=rusky, local_run=local_run, cwd=cwd)
if(os.path.isfile(evlist)==True): if(os.path.isfile(evlist)==True):
os.remove(evlist) os.remove(evlist)
@ -173,11 +186,10 @@ makes detmask from TM exposures
take limited mask from 'mosa_parts_tm0' for scans+survey data take limited mask from 'mosa_parts_tm0' for scans+survey data
""" """
detmask_outkey=outkey detmask_outkey=outkey
""" if(do_detmask==False and (outkey.startswith('mosa_all_tm0') or outkey.startswith('mosa_survey_tm0'))):
if(do_detmask==False and outkey.startswith('mosa_all_tm0')):
print("*** Substitute DetectorMask ***") print("*** Substitute DetectorMask ***")
detmask_outkey = "mosa_parts_tm0{}".format('_attcorr' if (attcorr==True) else '') detmask_outkey = "mosa_parts_tm0{}".format('_attcorr' if (attcorr==True) else '')
"""
detmask_slug="{}/{}_DetectorMask_en{:02d}".format(outfile_dir,detmask_outkey,eband[index]) detmask_slug="{}/{}_DetectorMask_en{:02d}".format(outfile_dir,detmask_outkey,eband[index])
detmask="{}{}".format(detmask_slug,outfile_post) detmask="{}{}".format(detmask_slug,outfile_post)
@ -201,9 +213,21 @@ outfile_bkgmap="{}_BackMap_en{:02d}.{}.fits".format(os.path.join(outfile_dir,out
outfile_expmap="{}_ExposureMap_en{:02d}.{}{}".format(os.path.join(outfile_dir,outkey), outfile_expmap="{}_ExposureMap_en{:02d}.{}{}".format(os.path.join(outfile_dir,outkey),
eband[index],vignetting, eband[index],vignetting,
outfile_post) outfile_post)
_emin_ev = "{}".format(emin_ev[index])
_emax_ev = "{}".format(emax_ev[index])
_ecf = "{}".format(ecf[index])
if (extra_expmap):
outfile_expmap="{} {}".format(outfile_expmap,extra_expmap)
_emin_ev="{} {}".format(emin_ev[index],emin_ev[index])
_emax_ev="{} {}".format(emax_ev[index],emax_ev[index])
_ecf="{} {}".format(ecf[index],ecf[index],)
if(do_expmap==True): if(do_expmap==True):
create_expmap_merged(expmaps,outfile_expmap,scale=7.0) create_expmap_merged(expmaps,outfile_expmap,scale=7.0)
outfile_boxlist1="{}/{}_BoxList1_en{:02d}{}".format(outfile_dir,outkey, eband[index], outfile_post) outfile_boxlist1="{}/{}_BoxList1_en{:02d}{}".format(outfile_dir,outkey, eband[index], outfile_post)
if(do_erbox1==True): if(do_erbox1==True):
cmd=["erbox", cmd=["erbox",
@ -211,9 +235,9 @@ if(do_erbox1==True):
"boxlist=%s" %(outfile_boxlist1), "boxlist=%s" %(outfile_boxlist1),
"expimages=\'{}\'".format(outfile_expmap), "expimages=\'{}\'".format(outfile_expmap),
"detmasks=\'{}\'".format(detmask), "detmasks=\'{}\'".format(detmask),
"emin=\'{}\'".format(emin_ev[index]), "emin=\'{}\'".format(_emin_ev),
"emax=\'{}\'".format(emax_ev[index]), "emax=\'{}\'".format(_emax_ev),
"ecf=\'{}\'".format(ecf[index]), "ecf=\'{}\'".format(_ecf),
"nruns=2", "nruns=2",
"likemin=6.0", "likemin=6.0",
"boxsize=4", "boxsize=4",
@ -406,10 +430,12 @@ if(do_ermldet==True):
remove_file(mllist) remove_file(mllist)
remove_file(srcmap) remove_file(srcmap)
print(cmd) print(cmd)
runme(cmd, local_run=local_run) runme(cmd, local_run=local_run, cwd=cwd)
print(cmd) print(cmd)
save_ermldet_ds9reg(mllist,scale=60*60,label='det_like') #save_ermldet_ds9reg_dl(mllist,scale=60*60,label='det_like')
save_ermldet_ds9reg(mllist,scale=60*60,label='id_src') #save_ermldet_ds9reg_id(mllist,scale=60*60,label='id_src')
save_ermldet_ds9reg_dl(mllist,scale=60*60, dl=10, point=True)
save_ermldet_ds9reg_id(mllist,scale=60*60, dl=10, point=True)
correct_fluxerr_ermldet_forced(mllist) correct_fluxerr_ermldet_forced(mllist)
if(forced==True): if(forced==True):
@ -514,6 +540,9 @@ if(do_catprep==True):
remove_file(catprep) remove_file(catprep)
runme(cmd, local_run=local_run) runme(cmd, local_run=local_run)
if(do_ext_bad==True):
make_ext_bad(infile=catprep, bkg_map=outfile_backmap3, bkg_cut=1.05, exception=[388,])
if(do_catprep_circles==True): if(do_catprep_circles==True):
catprep_circles(mllist,catprep) catprep_circles(mllist,catprep)
@ -524,8 +553,12 @@ if(do_filter_catalog==True):
#filter_catprep(catprep,expcut=5000.0,dlmin=6,dlmax=10,outkey='faint') #filter_catprep(catprep,expcut=5000.0,dlmin=6,dlmax=10,outkey='faint')
if(do_cross_match==True): if(do_cross_match==True):
crossmatch_shu2019(catprep,dlmin=10,refimage=outfile_evtool,crval=[ra_cen, de_cen], # full catalog, all RU sky
catalog=root_path+"/data/Gaia_unWISE/Gaia_unWISE_Coma.fits.catalog", errlim=5.0) cross_catalog=root_path+"/data/Gaia_unWISE/Gaia_unWISE_Coma.fits.catalog"
# Coma scans footprint
#cross_catalog=root_path+"/data/Gaia_unWISE/Gaia_unWISE_Coma.footprint.fits.catalog"
crossmatch_shu2019(catprep,dlmin=15,refimage=outfile_evtool,crval=[ra_cen, de_cen],
catalog=cross_catalog, errlim=5.0)
# confused sources according to XMM data # confused sources according to XMM data
# 194.2812310 27.2174300 # 194.2812310 27.2174300

455
scripts/04_mosaics_joint.py Executable file
View File

@ -0,0 +1,455 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
НАЗВАНИЕ:
04_mosaics.py
НАЗНАЧЕНИЕ:
Собирает мозайки в разных энергетических диапазонах.
ВЫЗОВ:
esass
./04_mosaics.py
УПРАВЛЕНИЕ:
Запуск отдельных команд управляется переменными, например: do_init = True
Выбранный энергетический диапазон управляется переменной index
forced=True делает принудительную фотометрию
ПАРАМЕТРЫ:
index : Выбранный энергетический диапазон
ВЫВОД:
Выходные файлы записываются в директорию outfile_dir
ИСТОРИЯ:
Роман Кривонос, ИКИ РАН, krivonos@cosmos.ru
Март 2023
"""
from astropy.wcs import WCS
from astropy.io import fits
import sys, os, os.path, time, subprocess
#from pathlib import Path
import numpy as np
import glob
from os.path import dirname
import inspect
import pickle
import coma
from coma.utils import *
from coma.config import *
""" find Coma root dir """
root_path=dirname(dirname(dirname(inspect.getfile(coma))))
cwd = os.path.dirname(os.path.realpath(__file__))
"""
ftools does not like long file path names,
for this reason, we use relative path here
"""
#root_path='..'
print("Coma root path: {}".format(root_path))
infile_dir=root_path+'/data/processed'
outfile_dir=root_path+'/products'
create_folder(outfile_dir)
local_run = False
# check ../products/joint.evtool
do_expmap = False
do_erbox1 = False # local mode
do_erbackmap1 = False #
do_erbox2 = False # map mode, with background map
do_erbackmap2 = False #
do_erbox3 = False # map mode, with background map
do_erbackmap3 = False #
do_ersensmap = False
do_ermldet = True
do_ermldet_ds9reg = True
do_resid = False # residuals of data and source map
do_fixcat = False # only for index=0
do_fixxmm = False # prepare forced photometry, only for index=0
do_apetool = False
do_catprep = True
do_ext_bad = True # mark extended sources as 'BAD'
do_filter_catalog = True
do_cross_match = True
index=0
forced=False
""" If forced=True, take input catalog from energy range en0 """
comm='' # for 4XMM-DR12 forced photometry use '-xmm'
vign=True
attcorr=True
outkey = "mosa_joint_tm0{}".format('_attcorr' if (attcorr==True) else '')
vignetting = 'vign' if (vign==True) else 'novign'
outfile_evtool="{}/mosa_joint_tm0_attcorr_EventList_en{:02d}.fits".format(outfile_dir,eband[index]) # created by products/joint.evtool
outfile_expmap="{}/mosa_joint_tm0_attcorr_ExposureMap_en{:02d}.{}.fits".format(outfile_dir,eband[index],vignetting)
detmask="{}/mosa_parts_tm0_attcorr_DetectorMask_en{:02d}.fits".format(outfile_dir,0)
_emin_ev = "{}".format(emin_ev[index])
_emax_ev = "{}".format(emax_ev[index])
_ecf = "{}".format(ecf[index])
if(do_expmap==True):
add_expmaps(["{}/mosa_parts_tm0_attcorr_ExposureMap_en{:02d}.{}.fits".format(outfile_dir,eband[index],vignetting),
"{}/mosa_survey_tm0_attcorr_ExposureMap_en{:02d}.{}.fits".format(outfile_dir,eband[index],vignetting)],
"{}/mosa_joint_tm0_attcorr_ExposureMap_en{:02d}.{}.fits".format(outfile_dir,eband[index],vignetting))
outfile_boxlist1="{}/{}_BoxList1_en{:02d}{}".format(outfile_dir,outkey, eband[index], outfile_post)
if(do_erbox1==True):
cmd=["erbox",
"images=\'{}\'".format(outfile_evtool),
"boxlist=%s" %(outfile_boxlist1),
"expimages=\'{}\'".format(outfile_expmap),
"detmasks=\'{}\'".format(detmask),
"emin=\'{}\'".format(_emin_ev),
"emax=\'{}\'".format(_emax_ev),
"ecf=\'{}\'".format(_ecf),
"nruns=2",
"likemin=6.0",
"boxsize=4",
"compress_flag=N",
"bkgima_flag=N",
"expima_flag=Y",
"detmask_flag=Y"
]
remove_file(outfile_boxlist1)
print((" ").join(cmd))
os.system((" ").join(cmd))
save_ds9reg(outfile_boxlist1)
""" Background map 1 """
outfile_backmap1="{}_BackMap1_en{:02d}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
cheese_mask="{}_CheeseMask1_en{:02d}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
if(do_erbackmap1==True):
do_erbackmap_esass(outfile_evtool,outfile_expmap,outfile_boxlist1,detmask,emin_ev[index],emax_ev[index],
outfile_backmap1,cheese_mask)
outfile_boxlist2="{}/{}_BoxList2_en{:02d}{}".format(outfile_dir,outkey, eband[index], outfile_post)
if(do_erbox2==True):
cmd=["erbox",
"images=\'{}\'".format(outfile_evtool),
"boxlist=%s" %(outfile_boxlist2),
"expimages=\'{}\'".format(outfile_expmap),
"detmasks=\'{}\'".format(detmask),
"emin=\'{}\'".format(_emin_ev),
"emax=\'{}\'".format(_emax_ev),
"ecf=\'{}\'".format(_ecf),
"nruns=2",
"likemin=6.0",
"boxsize=4",
"compress_flag=N",
"bkgima_flag=Y",
"bkgimages={}".format(outfile_backmap1),
"expima_flag=Y",
"detmask_flag=Y"
]
remove_file(outfile_boxlist2)
print((" ").join(cmd))
os.system((" ").join(cmd))
save_ds9reg(outfile_boxlist2)
""" Background map 2 """
outfile_backmap2="{}_BackMap2_en{:02d}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
cheese_mask="{}_CheeseMask2_en{:02d}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
if(do_erbackmap2==True):
do_erbackmap_esass(outfile_evtool,outfile_expmap,outfile_boxlist2,detmask,emin_ev[index],emax_ev[index],
outfile_backmap2,cheese_mask)
outfile_boxlist3="{}/{}_BoxList3_en{:02d}{}".format(outfile_dir,outkey, eband[index], outfile_post)
if(do_erbox3==True):
cmd=["erbox",
"images=\'{}\'".format(outfile_evtool),
"boxlist=%s" %(outfile_boxlist3),
"expimages=\'{}\'".format(outfile_expmap),
"detmasks=\'{}\'".format(detmask),
"emin=\'{}\'".format(_emin_ev),
"emax=\'{}\'".format(_emax_ev),
"ecf=\'{}\'".format(_ecf),
"nruns=2",
"likemin=6.0",
"boxsize=4",
"compress_flag=N",
"bkgima_flag=Y",
"bkgimages={}".format(outfile_backmap2),
"expima_flag=Y",
"detmask_flag=Y"
]
remove_file(outfile_boxlist3)
print((" ").join(cmd))
os.system((" ").join(cmd))
save_ds9reg(outfile_boxlist3)
""" Background map 3 """
outfile_backmap3="{}_BackMap3_en{:02d}.{}{}".format(os.path.join(outfile_dir,outkey), eband[index], vignetting, outfile_post)
cheese_mask="{}_CheeseMask3_en{:02d}.{}{}".format(os.path.join(outfile_dir,outkey), eband[index], vignetting, outfile_post)
if(do_erbackmap3==True):
boxlist3 = outfile_boxlist3 if(forced == False) else "{}/{}_BoxList3_en{:02d}{}".format(outfile_dir,outkey, eband[0], outfile_post)
do_erbackmap_esass(outfile_evtool,outfile_expmap,boxlist3,detmask,emin_ev[index],emax_ev[index],
outfile_backmap3,cheese_mask)
if(forced==True):
mllist="{}_MaxLikSourceList_en{:02d}.forced{}{}".format(os.path.join(outfile_dir,outkey), eband[index], comm, outfile_post)
srcmap="{}_SourceMap_en{:02d}.forced{}{}".format(os.path.join(outfile_dir,outkey), eband[index], comm, outfile_post)
residmap="{}_ResidMap_en{:02d}.forced{}{}".format(os.path.join(outfile_dir,outkey), eband[index], comm, outfile_post)
""" for en1,2,3,4,5,6,7 give mllist from en0 as input """
boxlist3="{}_MaxLikSourceList_en{:02d}{}{}".format(os.path.join(outfile_dir,outkey), eband[0], comm, outfile_post)
#if(index==0):
boxlist3="{}_MaxLikSourceList_en{:02d}.fixed{}{}".format(os.path.join(outfile_dir,outkey), eband[0], comm, outfile_post)
if not (os.path.exists(boxlist3)):
print("{} not found. Run do_fixcat=True, index=0, forced=False".format(boxlist3))
sys.exit()
add_specific_columns(boxlist3)
fitpos_flag="fitpos_flag=no"
fitext_flag="fitext_flag=no"
nmulsou = "nmulsou=1"
nmaxfit="nmaxfit=10"
multrad="multrad=15."
cutrad="cutrad=15."
if(index == 3 or index == 4):
""" for hard band take unvignetted background """
outfile_backmap3="{}_BackMap3_en{:02d}.{}{}".format(os.path.join(outfile_dir,outkey), eband[index], "novign", outfile_post)
else:
mllist="{}_MaxLikSourceList_en{:02d}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
srcmap="{}_SourceMap_en{:02d}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
#mllist="{}_MaxLikSourceList_en{:02d}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
#srcmap="{}_SourceMap_en{:02d}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
residmap="{}_ResidMap_en{:02d}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
boxlist3 = outfile_boxlist3
fitpos_flag="fitpos_flag=yes"
fitext_flag="fitext_flag=yes"
nmulsou = "nmulsou=2"
nmaxfit="nmaxfit=4"
multrad="multrad=20."
cutrad="cutrad=20."
""" allow ermpldet to split sources (no more than two) """
cmd=["ermldet",
"mllist={}".format(mllist),
"boxlist=%s" %(boxlist3),
"images=\'{}\'".format(outfile_evtool),
"expimages=\'{}\'".format(outfile_expmap),
"detmasks=\'{}\'".format(detmask),
"bkgimages=\'{}\'".format(outfile_backmap3),
"emin=\'{}\'".format(emin_ev[index]),
"emax=\'{}\'".format(emax_ev[index]),
"ecf=\'{}\'".format(ecf[index]),
"hrdef=",
"likemin=0.",
"extlikemin=5.",
"compress_flag=N",
cutrad,
multrad,
"extmin=2.0",
"extmax=35.0",
#"bkgima_flag=Y", looks outdated
"expima_flag=Y",
"detmask_flag=Y",
"shapelet_flag=no", # !!!
"photon_flag=no", # !!!
"extentmodel=beta",
"thres_flag=N",
"thres_col=like",
"thres_val=30.",
nmaxfit,
nmulsou,
fitpos_flag,
fitext_flag,
"srcima_flag=yes",
"srcimages=\'{}\'".format(srcmap)
]
if(do_ersensmap==True):
methods = ['nearest',]# 'linear', 'cubic']
detlike=10
sensmap="{}_SensitivityMap_dl{}_en{:02d}{}".format(os.path.join(outfile_dir,outkey), detlike, eband[index], outfile_post)
create_sensmap(sensmap=sensmap,
areatab="{}_AreaTable_dl{}_en{:02d}{}".format(os.path.join(outfile_dir,outkey), detlike, eband[index], outfile_post),
expmap=outfile_expmap, backmap=outfile_backmap3,detlike=detlike,
detmask=detmask, emin=emin_ev[index], emax=emax_ev[index],ecf=ecf[index], local_run=local_run, cwd=cwd)
for method in methods:
print("Detlike {}, Method {}".format(detlike,method))
corrmap="{}_SensitivityMap_{}_dl{}_en{:02d}{}".format(os.path.join(outfile_dir,outkey), method, detlike, eband[index], outfile_post)
sensmap_corr(sensmap=sensmap, output=corrmap, method=method)
"""
detlike=6
sensmap="{}_SensitivityMap_dl{}_en{:02d}{}".format(os.path.join(outfile_dir,outkey), detlike, eband[index], outfile_post)
create_sensmap(sensmap=sensmap,
areatab="{}_AreaTable_dl{}_en{:02d}{}".format(os.path.join(outfile_dir,outkey), detlike, eband[index], outfile_post),
expmap=outfile_expmap, backmap=outfile_backmap3,detlike=detlike,
detmask=detmask, emin=emin_ev[index], emax=emax_ev[index],ecf=ecf[index], cwd=cwd)
for method in methods:
print("Detlike {}, Method {}".format(detlike,method))
corrmap="{}_SensitivityMap_{}_dl{}_en{:02d}{}".format(os.path.join(outfile_dir,outkey), method, detlike, eband[index], outfile_post)
sensmap_corr(sensmap=sensmap, output=corrmap, method=method)
"""
if(do_ermldet==True):
test_exe('ermldet')
if(vign==False):
print('Run ermldet with vignetted exposure!')
sys.exit()
remove_file(mllist)
remove_file(srcmap)
print(cmd)
runme(cmd, local_run=local_run, cwd=cwd)
print(cmd)
if(forced==True):
result = check_ermldet_forced(mllist)
# for a some reason, for an arbitrary energy band, ermldet break order of sources. Do this forced correction.
if(result == False):
correct_srcid_ermldet_forced(mllist)
if(do_ermldet_ds9reg==True):
save_ermldet_ds9reg_dl(mllist,scale=60*60, dl=6, point=True)
save_ermldet_ds9reg_id(mllist,scale=60*60, dl=6, point=True)
correct_fluxerr_ermldet_forced(mllist)
if(do_resid==True):
do_resid_map(data=outfile_evtool, model=srcmap, outfile=residmap)
if(do_fixcat==True):
if not index == 0:
print("ERROR: You can fix only reference catalog for en0.")
sys.exit()
if forced == True:
print("ERROR: You can fix only non-forced catalog for en0.")
sys.exit()
srcs_remove=[3200,1872,1134,2171,1219,448,357,973,4423,5195,3215,1134,119,
622,2916,2915,2315,6870,824,4027,1463,518,3726,393,4049,1876,2396,1569,
4837,4286,1870,3311,6752,3691,4273,1500,5303,6207,3327,133]
srcs_add = {'4XMM J130526.4+285519':[196.3603538, 28.9221712, 0.853],# 3200
'4XMM J130526.8+285452':[196.3617587,28.9147139, 0.938],# 3200
'4XMM J130123.8+284744':[195.3494948, 28.7956932, 0.692], # 1872
'4XMM J125555.1+283110':[193.9797853,28.5196369,1.648], # 448
'4XMM J125708.3+264926':[194.2847924,26.8239298,0.220],
#'4XMM J022026.3-050251':[35.1098619,-5.0476199,0.551],
#'4XMM J021925.4-042647':[34.8559099,-4.4465007,1.366],
#'4XMM J021910.9-045108':[34.7954311,-4.8522901,0.898],
#'4XMM J021945.2-045331':[34.9383593,-4.8919843,1.538],
#'4XMM J021733.8-051311':[34.3910215,-5.2199877,2.247],
}
fix_catalog(mllist=mllist,refimage=outfile_evtool, srcs_remove=srcs_remove, srcs_add=srcs_add)
"""
Note that fix_catalog added ID_SRC to each XMM source.
Next, we save forced XMM sources (with new ID_SRC!) for later catalog compilation
"""
with open(mllist.replace(".fits", ".xmm.pickle"), 'wb') as f:
pickle.dump(srcs_add, f)
if(do_fixxmm==True):
if not index == 0:
print("ERROR: You can fix only reference catalog for en0.")
sys.exit()
if forced == True:
print("ERROR: You can fix only non-forced catalog for en0.")
sys.exit()
fix_xmm_sources(mllist=mllist,refimage=outfile_evtool, xmm_catalog='../data/4XMM-DR12/4XMM_DR12cat_slim_v1.0_UDS.fits.catalog')
if(do_apetool==True):
psfmap="{}_PsfMap{}".format(os.path.join(outfile_dir,outkey), outfile_post)
#remove_file(psfmap)
#cmd=["apetool",
# "images=\'{}\'".format(outfile_evtool),
# "psfmaps=\'{}\'".format(psfmap),
# "psfmapflag=yes",]
#runme(cmd, local_run=local_run)
#sys.exit()
cmd=["apetool",
"mllist={}".format(mllist),
"apelistout={}".format(mllist), # give the same file
"images=\'{}\'".format(outfile_evtool),
"expimages=\'{}\'".format(outfile_expmap),
"detmasks=\'{}\'".format(detmask),
"bkgimages=\'{}\'".format(outfile_backmap3),
"emin=\'{}\'".format(emin_ev[index]),
"emax=\'{}\'".format(emax_ev[index]),
"srcimages=\'{}\'".format(srcmap),
"psfmaps={}".format(psfmap),
"psfmapflag=no",
"stackflag=no",
"apexflag=yes",
"apesenseflag=no",
"eefextract=0.65",
"cutrad=15",
"eindex=1",]
runme(cmd, local_run=local_run, cwd=cwd)
if(forced==True):
catprep="{}_SourceCatalog_en{:02d}.forced{}{}".format(os.path.join(outfile_dir,outkey), eband[index], comm, outfile_post)
else:
catprep="{}_SourceCatalog_en{:02d}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
if(do_catprep==True):
cmd=["catprep",
"infile={}".format(mllist),
"outfile={}".format(catprep),]
remove_file(catprep)
runme(cmd, local_run=local_run)
if(do_ext_bad==True):
make_ext_bad(infile=catprep, bkg_map=outfile_backmap3, bkg_cut=1.05, exception=[388,])
if(do_filter_catalog==True):
#filter_mllist(mllist,expcut=5000.0,dlcut=10.0,dlmin=10,dlmax=10000)
""" works the same """
filter_catprep(catprep,expcut=500.0,dlmin=10,dlmax=10000000,outkey='dl10')
#filter_catprep(catprep,expcut=5000.0,dlmin=6,dlmax=10,outkey='faint')
if(do_cross_match==True):
crossmatch_shu2019(catprep,dlmin=10,refimage=outfile_evtool,crval=[ra_cen, de_cen],
catalog=root_path+"/data/Gaia_unWISE/Gaia_unWISE_Coma.fits.catalog", errlim=5.0)
# confused sources according to XMM data
# 194.2812310 27.2174300

View File

@ -46,11 +46,11 @@ from os.path import dirname
import inspect import inspect
import pickle import pickle
import uds import coma
from uds.utils import * from coma.utils import *
from uds.config import * from coma.config import *
from uds.sherpa import * #from coma.sherpa import *
""" find UDS root dir """ """ find UDS root dir """
@ -60,31 +60,26 @@ ftools does not like long file path names,
for this reason, we use relative path here for this reason, we use relative path here
""" """
root_path='..' root_path='..'
print("UDS root path: {}".format(root_path)) print("Coma root path: {}".format(root_path))
infile_dir=root_path+'/data/processed' infile_dir=root_path+'/data/processed'
outfile_dir=root_path+'/products' outfile_dir=root_path+'/products'
create_folder(outfile_dir) create_folder(outfile_dir)
srctool_dir="{}/{}".format(outfile_dir,"srctool-products") srctool_dir="{}/{}".format(outfile_dir,"srctool-products-joint")
create_folder(srctool_dir) create_folder(srctool_dir)
outkey="tm0" outkey="mosa_joint_tm0_attcorr"
outfile_srctool="{}_SrcTool_".format(outkey) outfile_srctool="{}_SrcTool_".format(outkey)
do_init = False do_srctool = False
do_merge = False
do_srctool = True
do_grppha = False do_grppha = False
do_ecf_calc = False # for all bands
do_ecf_print = False # for all bands
do_flux_calc = False # for all bands
do_catalog = False do_catalog = False
do_extended = False do_extended = False
do_ds9reg = False do_ds9reg = False
do_euds_final = False do_coma_final = False
do_euds_dr12 = False # crossmatch eUDS with DR12 do_euds_dr12 = False # crossmatch eUDS with DR12
do_euds_stat = False do_euds_stat = False
do_euds_cds = False do_euds_cds = False
@ -108,34 +103,12 @@ vignetting = 'vign' if (vign==True) else 'novign'
events=[] events=[]
expmaps=[] expmaps=[]
bkgmaps=[] bkgmaps=[]
for tmkey in keylist_tm.keys():
print("TM{} in work... init events".format(tmkey))
for datakey in keylist_tm[tmkey]:
print("--> {}".format(datakey))
""" Подготавливаем списки событий индивидуальных наблюдений """
outfile_evtool,outfile_expmap=init_events(key=datakey,attcorr=True,
eband_index=eband[index],
infile_dir=infile_dir,
outfile_dir=outfile_dir,
do_init=do_init,
do_obsmode=False,
do_center=False,
do_evtool=False,
do_expmap=False,
vign=vign,
ra_cen=ra_cen, de_cen=de_cen,
emin_kev=emin_kev[index],
emax_kev=emax_kev[index])
events.append(outfile_evtool)
expmaps.append(outfile_expmap)
""" Собираем общий список событий """ """ Собираем общий список событий """
outfile_evtool="{}_EventList_en{}.fits".format(os.path.join(outfile_dir,outkey), outfile_evtool="{}_EventList_en{:02d}.fits".format(os.path.join(outfile_dir,outkey), eband[index])
eband[index]) outfile_expmap="{}_ExposureMap_en{:02d}.fits".format(os.path.join(outfile_dir,outkey), eband[index])
if(do_merge==True):
do_evtool_esass(events=events, outfile=outfile_evtool)
suffix_srctool=".fits" suffix_srctool=".fits"
@ -143,7 +116,7 @@ suffix_srctool=".fits"
If suffix contains no filename extension (does not contain a "."), then ".fits" If suffix contains no filename extension (does not contain a "."), then ".fits"
is also appended to the filename. """ is also appended to the filename. """
catprep="{}_SourceCatalog_en{}{}".format(os.path.join(outfile_dir,outkey), eband[0], outfile_post) catprep="{}_SourceCatalog_en{:02d}{}".format(os.path.join(outfile_dir,outkey), eband[0], outfile_post)
""" take source catalog from 0.3-2.3 keV band """ """ take source catalog from 0.3-2.3 keV band """
if not (os.path.isfile(catprep)==True): if not (os.path.isfile(catprep)==True):
@ -155,7 +128,7 @@ if(do_srctool==True):
test_exe('srctool') test_exe('srctool')
cmd=['srctool', cmd=['srctool',
"todo=\'SPEC RMF ARF\'", "todo=\'SPEC RMF ARF\'",
"insts=\'1 5 6 7\'", "insts=\'1 2 3 4 5 6 7\'",
"eventfiles={}".format(outfile_evtool), "eventfiles={}".format(outfile_evtool),
"prefix=\'{}\'".format(os.path.join(srctool_dir,outfile_srctool)), "prefix=\'{}\'".format(os.path.join(srctool_dir,outfile_srctool)),
"suffix=\'{}\'".format(suffix_srctool), "suffix=\'{}\'".format(suffix_srctool),
@ -174,61 +147,51 @@ if(do_srctool==True):
if(do_grppha==True): if(do_grppha==True):
group_spectra("{}/*020_SourceSpec_*.fits".format(srctool_dir)) group_spectra("{}/*020_SourceSpec_*.fits".format(srctool_dir))
ecfout="{}_SampleFlux_v1.pickle".format(os.path.join(outfile_dir,outkey))
if(do_ecf_calc==True):
calc_ecf("{}/tm0_SrcTool_020_ARF_?????.fits".format(srctool_dir),
catprep=catprep, emin=emin_kev, emax=emax_kev, eband=eband, outfile=ecfout, simnum=10000)
if(do_ecf_print==True):
print_ecf(infile=ecfout, emin=emin_kev, emax=emax_kev, eband=eband, skipfrac=10.0)
fluxout="{}_SherpaFlux.pickle".format(os.path.join(outfile_dir,outkey))
if(do_flux_calc==True):
calc_flux("{}/tm0_SrcTool_020_ARF_?????.fits".format(srctool_dir),
catprep=catprep, emin=emin_kev, emax=emax_kev, eband=eband, outfile=ecfout, simnum=100)
#index=0 index=0
catprep="{}_SourceCatalog_en{}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post) catprep="{}_SourceCatalog_en{:02d}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
rawcat="{}_SourceCatalog_en{}.pickle".format(os.path.join(outfile_dir,outkey), eband[index]) rawcat="{}_SourceCatalog_en{:02d}.dl10.pickle".format(os.path.join(outfile_dir,outkey), eband[index])
if(do_catalog==True): if(do_catalog==True):
forced_xmm_sources="{}_MaxLikSourceList_en{}.xmm.pickle".format(os.path.join(outfile_dir,outkey), eband[index]) """
forced_xmm_sources="{}_MaxLikSourceList_en{:02d}.xmm.pickle".format(os.path.join(outfile_dir,outkey), eband[index])
with open(forced_xmm_sources, 'rb') as f: with open(forced_xmm_sources, 'rb') as f:
print("Reading forced XMM sources from {}".format(forced_xmm_sources)) print("Reading forced XMM sources from {}".format(forced_xmm_sources))
srcs_forced = pickle.load(f) srcs_forced = pickle.load(f)
print() print()
print(srcs_forced) print(srcs_forced)
print() print()
"""
make_euds_catalog(infile='../products/tm0_SourceCatalog_en0.forced.fits', rawcat=rawcat, dlmin=10.0, dlmax=100000, ext_like=1000, make_coma_catalog(infile='../products/mosa_joint_tm0_attcorr_SourceCatalog_en00.forced.fits',
emin=emin_kev[index], emax=emax_kev[index], eband=eband[index], rawcat=rawcat, dlmin=10.0, dlmax=10000000, ext_like=1000,
infile_en00cat=catprep, emin=emin_kev[index], emax=emax_kev[index], eband=eband[index],
infile_en01cat='../products/tm0_SourceCatalog_en1.forced.fits', infile_en00cat=catprep,
infile_en02cat='../products/tm0_SourceCatalog_en2.forced.fits', infile_en01cat='../products/mosa_joint_tm0_attcorr_SourceCatalog_en01.forced.fits',
infile_en03cat='../products/tm0_SourceCatalog_en3.forced.fits', infile_en02cat='../products/mosa_joint_tm0_attcorr_SourceCatalog_en02.forced.fits',
infile_en06cat='../products/tm0_SourceCatalog_en6.forced.fits', infile_en03cat='../products/mosa_joint_tm0_attcorr_SourceCatalog_en03.forced.fits',
infile_en00sens='../products/tm0_SensitivityMap_dl10_en0.fits', infile_en04cat='../products/mosa_joint_tm0_attcorr_SourceCatalog_en04.forced.fits',
infile_en01sens='../products/tm0_SensitivityMap_dl10_en1.fits', infile_en00sens='../products/mosa_joint_tm0_attcorr_SensitivityMap_nearest_dl10_en00.fits',
infile_en02sens='../products/tm0_SensitivityMap_dl10_en2.fits', infile_en01sens='../products/mosa_joint_tm0_attcorr_SensitivityMap_nearest_dl10_en01.fits',
infile_en03sens='../products/tm0_SensitivityMap_dl10_en3.fits', infile_en02sens='../products/mosa_joint_tm0_attcorr_SensitivityMap_nearest_dl10_en02.fits',
infile_en06sens='../products/tm0_SensitivityMap_dl10_en6.fits', infile_en03sens='../products/mosa_joint_tm0_attcorr_SensitivityMap_nearest_dl10_en03.fits',
srcs_forced=srcs_forced, infile_en04sens='../products/mosa_joint_tm0_attcorr_SensitivityMap_nearest_dl10_en04.fits',
) #srcs_forced=srcs_forced,
)
if(do_extended==True): if(do_extended==True):
make_extended(infile=rawcat,outreg="{}_ExtendedCat_en{}.reg".format(os.path.join(outfile_dir,outkey), eband[index])) make_extended(infile=rawcat,outreg="{}_ExtendedCat_en{:02d}.reg".format(os.path.join(outfile_dir,outkey), eband[index]))
if(do_ds9reg==True): if(do_ds9reg==True):
#make_final_ds9reg(infile=rawcat,outreg="{}_FinalCat_dl10.reg".format(os.path.join(outfile_dir,outkey))) #make_final_ds9reg(infile=rawcat,outreg="{}_FinalCat_dl10.reg".format(os.path.join(outfile_dir,outkey)))
make_final_ds9reg(infile=rawcat,scale=(60*60)/10,outreg="{}_FinalCat_dl10_talk.reg".format(os.path.join(outfile_dir,outkey))) make_final_ds9reg(infile=rawcat,scale=(60*60)/10,outreg="{}_FinalCat_dl10_talk.reg".format(os.path.join(outfile_dir,outkey)))
make_final_ds9reg(infile=rawcat,scale=(60*60)/10,noname=True,outreg="{}_FinalCat_dl10_noname.reg".format(os.path.join(outfile_dir,outkey)))
if(do_euds_final==True): if(do_coma_final==True):
""" make final eUDS catalog """ """ make final Coma catalog """
final_euds_catalog(infile=rawcat, outfile_fits='../products/eUDS.fits') final_coma_catalog(infile=rawcat, outfile_fits='../products/eCFDS.fits')
if(do_euds_dr12==True): if(do_euds_dr12==True):
crossmatch_dr12('../products/eUDS.fits', catalog=root_path+"/data/4XMM-DR12/4XMM_DR12cat_slim_v1.0_UDS.fits.catalog", devmax=15) crossmatch_dr12('../products/eUDS.fits', catalog=root_path+"/data/4XMM-DR12/4XMM_DR12cat_slim_v1.0_UDS.fits.catalog", devmax=15)
@ -243,7 +206,7 @@ if(do_euds_cds==True):
if(do_cross_check==True): if(do_cross_check==True):
""" cross check final eUDS catalog """ """ cross check final eUDS catalog """
cross_check_euds(infile=catprep, euds='../products/eUDS.fits', outkey="../products/en{}_FinalCat_dl10".format(index)) cross_check_euds(infile=catprep, euds='../products/eUDS.fits', outkey="../products/en{:02d}_FinalCat_dl10".format(index))
if(do_xmm_catalog==True): if(do_xmm_catalog==True):
""" complile raw forced XMM catalog """ """ complile raw forced XMM catalog """