1
0
forked from erosita/uds

final catalog ready

This commit is contained in:
Roman Krivonos 2023-04-25 19:16:05 +03:00
parent 4e6025f357
commit 4e5f88e166
17 changed files with 6762 additions and 115 deletions

File diff suppressed because one or more lines are too long

10
data/4XMM-DR12/README.md Normal file
View File

@ -0,0 +1,10 @@
В данной директории находится каталог 4XMM-DR12
Источник: http://xmmssc.irap.omp.eu/Catalogue/4XMM-DR12/4XMM_DR12.html
Для вырезки источников для работы с полем UDS была использована эта команда:
```
ftselect '4XMM_DR12cat_slim_v1.0.fits[1]' 4XMM_DR12cat_slim_v1.0_UDS.fits 'SC_RA > 32.75 && SC_RA < 36.31 && SC_DEC > -6.55 && SC_DEC < -3.0' clobber=yes
```

15
data/4XMM-DR12/print_ds9reg.py Executable file
View File

@ -0,0 +1,15 @@
from astropy.io import fits
import sys
filename='4XMM_DR12cat_slim_v1.0_UDS.fits.catalog'
fout=filename.replace(".fits.catalog", ".names.reg")
hdul = fits.open(filename)
#hdul.info()
tbdata = hdul[1].data
with open("../../products/{}".format(fout), 'w') as writer:
for rec in tbdata:
#writer.write("fk5;point({}, {})\n".format(rec['sc_ra'],rec['sc_dec']))
writer.write("fk5;circle({}, {}, {}) # text={{{}}}\n".format(rec['sc_ra'],rec['sc_dec'],rec['sc_poserr']/3600,rec['IAUNAME']))

196
data/MCXC/mcxc.fits.catalog Normal file

File diff suppressed because one or more lines are too long

15
data/MCXC/print_ds9reg.py Executable file
View File

@ -0,0 +1,15 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from astropy.io import fits
hdul = fits.open('mcxc.fits.catalog')
tbdata = hdul[1].data
hdul.close()
with open("../../products/mcxc.reg", 'w') as writer:
for rec in tbdata:
writer.write("fk5;point({}, {}) # color=magenta text={{{}}}\n".format(rec['RAJ2000'],
rec['DEJ2000'],
rec['OName'],))

4
data/SXDS/README.md Normal file
View File

@ -0,0 +1,4 @@
В данной директории находится каталог Subaru/XMM-Newton deep survey (SXDS) https://ui.adsabs.harvard.edu/abs/2008ApJS..179..124U/abstract
Каталог был получен с этого ресурса: https://cdsarc.cds.unistra.fr/viz-bin/cat/J/ApJS/179/124

477
data/SXDS/SXDS.fits.catalog Normal file

File diff suppressed because one or more lines are too long

16
data/SXDS/print_ds9reg.py Executable file
View File

@ -0,0 +1,16 @@
from astropy.io import fits
import sys
filename='SXDS.fits.catalog'
fout=filename.replace(".fits.catalog", ".reg")
hdul = fits.open(filename)
#hdul.info()
tbdata = hdul[1].data
with open("../../products/{}".format(fout), 'w') as writer:
for rec in tbdata:
text="{}{}".format(rec['__UWS2008_'],rec['Note'])
#writer.write("fk5;point({}, {})\n".format(rec['sc_ra'],rec['sc_dec']))
writer.write("fk5;circle({}, {}, {}) # text={{{}}}\n".format(rec['RAJ2000'],rec['DEJ2000'],rec['e_pos']/3600,text))

10
data/X-CLASS/README.md Normal file
View File

@ -0,0 +1,10 @@
В данной директории находится каталог скоплений галактик X-CLASS из статьи https://ui.adsabs.harvard.edu/abs/2021A%26A...652A..12K/abstract
Каталог был получен с этого ресурса: https://vizier.cds.unistra.fr/viz-bin/VizieR-3?-source=J/A%2bA/652/A12/table3
Для вырезки источников для работы с полем UDS была использована эта команда:
```
ftselect 'x-class.fits[1]' x-class_UDS.fits 'RAJ2000 > 32.75 && RAJ2000 < 36.31 && DEJ2000 > -6.55 && DEJ2000 < -3.0' clobber=yes
```

18
data/X-CLASS/print_ds9reg.py Executable file
View File

@ -0,0 +1,18 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from astropy.io import fits
hdul = fits.open('x-class_UDS.fits.catalog')
tbdata = hdul[1].data
hdul.close()
with open("../../products/x-class_UDS.fits.reg", 'w') as writer:
for rec in tbdata:
writer.write("fk5;circle({}, {}, {}) # color=magenta text={{{} {:.2f} {:.2f}}}\n".format(rec['RAJ2000'],
rec['DEJ2000'],
rec['extent']/3600,
rec['XClass'],
rec['MLdet'],
rec['MLext'],))

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
НАЗВАНИЕ:
@ -41,11 +42,15 @@
from astropy.wcs import WCS
from astropy.io import fits
import sys, os, os.path, time, subprocess
from pathlib import Path
#from pathlib import Path
import numpy as np
import glob
from os.path import dirname
import inspect
import pickle
import uds
from uds.utils import *
@ -65,29 +70,37 @@ infile_dir=root_path+'/data/processed'
outfile_dir=root_path+'/products'
create_folder(outfile_dir)
local_run = False
outkey="tm0"
do_init = True
do_merge = True
do_detmask = True
do_expmap = True
do_erbox1 = True # local mode
do_erbackmap1 = True #
do_erbox2 = True # map mode, with background map
do_erbackmap2 = True #
do_erbox3 = True # map mode, with background map
do_erbackmap3 = True #
do_ersensmap = True
do_init = False
do_merge = False
do_detmask = False
do_expmap = False
do_erbox1 = False # local mode
do_erbackmap1 = False #
do_erbox2 = False # map mode, with background map
do_erbackmap2 = False #
do_erbox3 = False # map mode, with background map
do_erbackmap3 = False #
do_ersensmap = False
do_ermldet = False
do_fixcat = False # only for index=0
do_fixxmm = True # only for index=0
do_apetool = False
do_catprep = False
do_filter_catalog = False
do_cross_match = False
index=1
index=0
forced=True
forced=False
""" If forced=True, take input catalog from energy range en0 """
comm='-xmm' # for 4XMM-DR12 forced photometry use '-xmm'
vign=True
vignetting = 'vign' if (vign==True) else 'novign'
@ -107,7 +120,7 @@ for tmkey in keylist_tm.keys():
do_init=do_init,
do_obsmode=False,
do_center=False,
do_evtool=False,
do_evtool=True,
do_expmap=True,
vign=vign,
ra_cen=ra_cen, de_cen=de_cen,
@ -239,15 +252,24 @@ if(do_erbackmap3==True):
outfile_backmap3,cheese_mask)
if(forced==True):
mllist="{}_MaxLikSourceList_en{}.forced{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
srcmap="{}_SourceMap_en{}.forced{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
""" give mllist from en0 as input """
boxlist3="{}_MaxLikSourceList_en{}{}".format(os.path.join(outfile_dir,outkey), eband[0], outfile_post)
mllist="{}_MaxLikSourceList_en{}.forced{}{}".format(os.path.join(outfile_dir,outkey), eband[index], comm, outfile_post)
srcmap="{}_SourceMap_en{}.forced{}{}".format(os.path.join(outfile_dir,outkey), eband[index], comm, outfile_post)
""" for en1,2,3,6 give mllist from en0 as input """
boxlist3="{}_MaxLikSourceList_en{}.forced{}{}".format(os.path.join(outfile_dir,outkey), eband[0], comm, outfile_post)
if(index==0):
boxlist3="{}_MaxLikSourceList_en{}.fixed{}{}".format(os.path.join(outfile_dir,outkey), eband[0], comm, outfile_post)
if not (os.path.exists(boxlist3)):
print("{} not found. Run do_fixcat=True, index=0, forced=False".format(boxlist3))
sys.exit()
add_specific_columns(boxlist3)
fitpos_flag="fitpos_flag=no"
fitext_flag="fitext_flag=no"
nmulsou = "nmulsou=1"
nmaxfit="nmaxfit=1"
""" don't allow ermpldet to split sources """
if(index == 3):
if(index == 3 or index == 6):
""" for hard band take unvignetted background """
outfile_backmap3="{}_BackMap3_en{}.{}{}".format(os.path.join(outfile_dir,outkey), eband[index], "novign", outfile_post)
@ -258,6 +280,7 @@ else:
fitpos_flag="fitpos_flag=yes"
fitext_flag="fitext_flag=yes"
nmulsou = "nmulsou=2"
nmaxfit="nmaxfit=4"
""" allow ermpldet to split sources (no more than two) """
cmd=["ermldet",
@ -272,12 +295,12 @@ cmd=["ermldet",
"ecf=\'{}\'".format(ecf[index]),
"hrdef=",
"likemin=0.",
"extlikemin=6.",
"extlikemin=5.",
"compress_flag=N",
"cutrad=10.", # was 15
"cutrad=15.",
"multrad=20.",
"extmin=2.0",
"extmax=15.0",
"extmax=35.0",
#"bkgima_flag=Y", looks outdated
"expima_flag=Y",
"detmask_flag=Y",
@ -287,7 +310,7 @@ cmd=["ermldet",
"thres_flag=N",
"thres_col=like",
"thres_val=30.",
"nmaxfit=4",
nmaxfit,
nmulsou,
fitpos_flag,
fitext_flag,
@ -319,13 +342,100 @@ if(do_ermldet==True):
sys.exit()
remove_file(mllist)
remove_file(srcmap)
os.system((" ").join(cmd))
print((" ").join(cmd))
runme(cmd, local_run=local_run)
#correct_fluxerr_ermldet_forced(mllist)
if(forced==True):
check_ermldet_forced(mllist)
result = check_ermldet_forced(mllist)
""" for a some reason, for an arbitrary energy band, ermldet break order of sources. Do this forced correction. """
if(result == False):
correct_srcid_ermldet_forced(mllist)
if(do_fixcat==True):
if not index == 0:
print("ERROR: You can fix only reference catalog for en0.")
sys.exit()
if forced == True:
print("ERROR: You can fix only non-forced catalog for en0.")
sys.exit()
srcs_remove=[174,
90,
299,
300,
504,
215,
401,
20,] # keep 671 as unclassified extended source
srcs_add = {'4XMM J021925.4-042647':[34.8559099,-4.4465007, 1.366], # 147
'4XMM J021922.8-042655':[34.8451832,-4.4487901, 1.958], # 147
'4XMM J021929.4-043224':[34.8728586,-4.5400022, 0.660], # 90
'4XMM J021931.2-043222':[34.8801169,-4.5395495, 2.561], # 90
'4XMM J021911.2-050550':[34.7968110,-5.0972990, 0.732], # 504
'4XMM J021919.3-050511':[34.8307176,-5.0864242,4.988], # 504
'4XMM J021911.5-050501':[34.7981099,-5.0837146,6.834], # 504
'4XMM J021658.9-044900':[34.2455964,-4.8168126,4.449], # 300
'4XMM J021659.2-044945':[34.2468704,-4.8291892,1.548], # 300
'4XMM J021812.2-045814':[34.5510753,-4.9705972,0.550], # 215
'4XMM J021812.0-045813':[34.5502698,-4.9703004,0.497], # 215
'4XMM J021912.6-052756':[34.8028459,-5.4656239,0.579], # 401
'4XMM J021705.5-042254':[34.2730294,-4.3816810,0.288], # 20
'4XMM J021705.3-042314':[34.2720952,-4.3873162,0.587], # 20
'4XMM J021827.2-045456':[34.6134256,-4.9157208,0.252],
'4XMM J021831.3-045504':[34.6306930,-4.9178676,0.242],
'4XMM J021925.4-045201':[34.8558373,-4.8671200,0.529],
}
fix_catalog(mllist=mllist,refimage=outfile_evtool, srcs_remove=srcs_remove, srcs_add=srcs_add)
"""
Note that fix_catalog added ID_SRC to each XMM source.
Next, we save forced XMM sources (with new ID_SRC!) for later catalog compilation
"""
with open(mllist.replace(".fits", ".xmm.pickle"), 'wb') as f:
pickle.dump(srcs_add, f)
if(do_fixxmm==True):
if not index == 0:
print("ERROR: You can fix only reference catalog for en0.")
sys.exit()
if forced == True:
print("ERROR: You can fix only non-forced catalog for en0.")
sys.exit()
fix_xmm_sources(mllist=mllist,refimage=outfile_evtool, xmm_catalog='../data/4XMM-DR12/4XMM_DR12cat_slim_v1.0_UDS.fits.catalog')
if(do_apetool==True):
psfmap="{}_PsfMap{}".format(os.path.join(outfile_dir,outkey), outfile_post)
#remove_file(psfmap)
#cmd=["apetool",
# "images=\'{}\'".format(outfile_evtool),
# "psfmaps=\'{}\'".format(psfmap),
# "psfmapflag=yes",]
#runme(cmd, local_run=local_run)
cmd=["apetool",
"mllist={}".format(mllist),
"apelistout={}".format(mllist), # give the same file
"images=\'{}\'".format(outfile_evtool),
"expimages=\'{}\'".format(outfile_expmap),
"detmasks=\'{}\'".format(detmask),
"bkgimages=\'{}\'".format(outfile_backmap3),
"emin=\'{}\'".format(emin_ev[index]),
"emax=\'{}\'".format(emax_ev[index]),
"srcimages=\'{}\'".format(srcmap),
"psfmaps={}".format(psfmap),
"psfmapflag=no",
"stackflag=no",
"apexflag=yes",
"apesenseflag=no",
"eefextract=0.65",
"cutrad=15",
"eindex=1",]
runme(cmd, local_run=local_run)
if(forced==True):
catprep="{}_SourceCatalog_en{}.forced{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
catprep="{}_SourceCatalog_en{}.forced{}{}".format(os.path.join(outfile_dir,outkey), eband[index], comm, outfile_post)
else:
catprep="{}_SourceCatalog_en{}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
@ -334,8 +444,8 @@ if(do_catprep==True):
"infile={}".format(mllist),
"outfile={}".format(catprep),]
remove_file(catprep)
os.system((" ").join(cmd))
print((" ").join(cmd))
runme(cmd, local_run=local_run)
if(do_filter_catalog==True):
#filter_mllist(mllist,expcut=5000.0,dlcut=10.0,dlmin=10,dlmax=10000)

View File

@ -44,6 +44,8 @@ import numpy as np
import glob
from os.path import dirname
import inspect
import pickle
import uds
from uds.utils import *
@ -75,11 +77,15 @@ do_init = False
do_merge = False
do_srctool = False
do_grppha = False
do_ecf_calc = False
do_ecf_print = False
do_catalog = True
do_ecf_calc = False # for all bands
do_ecf_print = False # for all bands
do_catalog = False
do_extended = False
do_ds9reg = False
do_xmm_catalog = False
do_xmm_final = True
index=1
index=0
""" работаем именно в этом диапазоне, чтобы спектры покрывали все энергии """
vign=True
@ -160,9 +166,48 @@ if(do_ecf_print==True):
print_ecf(infile=ecfout, emin=emin_kev, emax=emax_kev, eband=eband, skipfrac=10.0)
index=0
catprep="{}_SourceCatalog_en{}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
rawcat="{}_SourceCatalog_en{}.pickle".format(os.path.join(outfile_dir,outkey), eband[index])
if(do_catalog==True):
make_catalog(infile=catprep, dlmin=10.0, dlmax=100000, ext_like=10, ecf=ecf[index],
forced_xmm_sources="{}_MaxLikSourceList_en{}.xmm.pickle".format(os.path.join(outfile_dir,outkey), eband[index])
with open(forced_xmm_sources, 'rb') as f:
srcs_forced = pickle.load(f)
make_catalog(infile='../products/tm0_SourceCatalog_en0.forced.fits', rawcat=rawcat, dlmin=10.0, dlmax=100000, ext_like=1000,
emin=emin_kev[index], emax=emax_kev[index], eband=eband[index],
infile_en00cat=catprep,
infile_en01cat='../products/tm0_SourceCatalog_en1.forced.fits',
infile_en02cat='../products/tm0_SourceCatalog_en2.forced.fits',
infile_en03cat='../products/tm0_SourceCatalog_en3.forced.fits',
infile_en03sens='../products/tm0_SensitivityMap_dl10_en3.fits')
infile_en06cat='../products/tm0_SourceCatalog_en6.forced.fits',
infile_en00sens='../products/tm0_SensitivityMap_dl10_en0.fits',
infile_en01sens='../products/tm0_SensitivityMap_dl10_en1.fits',
infile_en02sens='../products/tm0_SensitivityMap_dl10_en2.fits',
infile_en03sens='../products/tm0_SensitivityMap_dl10_en3.fits',
infile_en06sens='../products/tm0_SensitivityMap_dl10_en6.fits',
srcs_forced=srcs_forced,
)
if(do_extended==True):
make_extended(infile=rawcat,outreg="{}_ExtendedCat_en{}.reg".format(os.path.join(outfile_dir,outkey), eband[index]))
if(do_ds9reg==True):
make_final_ds9reg(infile=rawcat,outreg="{}_FinalCat_dl10.reg".format(os.path.join(outfile_dir,outkey)))
if(do_xmm_catalog==True):
""" complile raw forced XMM catalog """
make_xmm_catalog(infile_en00cat='../products/tm0_SourceCatalog_en0.forced-xmm.fits',
infile_en01cat='../products/tm0_SourceCatalog_en1.forced-xmm.fits',
infile_en02cat='../products/tm0_SourceCatalog_en2.forced-xmm.fits',
infile_en03cat='../products/tm0_SourceCatalog_en3.forced-xmm.fits',
infile_en06cat='../products/tm0_SourceCatalog_en6.forced-xmm.fits',
forced_xmm_sources='../products/tm0_MaxLikSourceList_en0.fixed-xmm.pickle',
outfile='../products/tm0_4XMM-DR12.pickle')
if(do_xmm_final==True):
""" make final XMM-forced catalog """
final_xmm_catalog(infile='../products/tm0_4XMM-DR12.pickle', outfile_fits='../products/eUDS_4XMM-DR12.fits')

View File

@ -38,3 +38,7 @@ source <MY PATH>/eSASS4EDR/bin/esass-init.csh
### 05_scrtool.py
Запускает scrtool для самого широкого канала 0.2-10 кэВ, чтобы спектры имели самое полное покрытие по энергиям. Список источников берется из 0.3-2.3 кэВ.
Вычисляет ECF для всех диапазонов.
Делает принудительную фотометрию в выбранных каналах (параметр```forced=True```). Внимание! ermldet из eSASS4EDR не делает ассимитричные ошибки на потоки. Мы запускаем более последнюю версию ermldet (v1.56/2.18 esass_200412 Jul 2 12:04:46 2022). Для этого используется параметр ```local_run=True```, который высвечивает какую команду надо запустить на другой машине и ждет ввода.

View File

@ -44,16 +44,16 @@ wcslist={'tm1_obs_1':[34.7279760,-5.0680267],
'tm6_scan_4':[34.5405596,-4.8088748]}
""" Это просто индекс диапазона для выходных файлов. """
eband=[ "0", "1", "2", "3", "4", "5"]
eband=[ "0", "1", "2", "3", "4", "5", "6"]
""" Диапазоны энергий. """
emin_ev=[ 300, 300, 600, 2300, 200, 300]
emax_ev=[2300, 600, 2300, 5000, 10000,8000]
emin_ev=[ 300, 300, 600, 2300, 200, 300, 5000]
emax_ev=[2300, 600, 2300, 5000, 10000,8000, 8000]
emin_kev=[0.3, 0.3, 0.6, 2.3, 0.2, 0.3]
emax_kev=[2.3, 0.6, 2.3, 5.0, 10.0, 8.0]
emin_kev=[0.3, 0.3, 0.6, 2.3, 0.2, 0.3, 5.0]
emax_kev=[2.3, 0.6, 2.3, 5.0, 10.0, 8.0, 8.0]
#ecf = [ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]
ecf = [9.7817E+11, 3.2982E+12, 1.3903E+12, 2.3322E+12, 5.2022E+11, 5.8453E+11]
ecf = [9.7817E+11, 3.2982E+12, 1.3903E+12, 2.3322E+12, 5.2022E+11, 5.8453E+11, 3.8468E+12]
"""
*** en0 ecf 9.7817E+11 +/- 2.4606E+10 2.52% N=17

File diff suppressed because it is too large Load Diff