Compare commits

..

31 Commits

Author SHA1 Message Date
d13db7d42f final 2024-10-14 14:53:16 +03:00
1f7815e0c5 makecds added 2024-05-15 16:21:15 +03:00
ad7e18637b Minor changes 2024-03-18 14:49:52 +03:00
afe3862cda Many changes 2024-03-18 14:46:38 +03:00
4e5f88e166 final catalog ready 2023-04-25 19:16:05 +03:00
4e6025f357 update 2023-03-29 17:28:29 +03:00
09deb0e4f0 05_srctool added 2023-03-16 19:31:59 +03:00
d67146208e evening job 2023-03-09 21:10:54 +03:00
e7f6f9d257 evening job 2023-03-09 21:10:39 +03:00
b94818554f 8 March work 2023-03-08 19:58:59 +03:00
fd370a8209 clean 2023-03-07 16:05:36 +03:00
faf0b65b25 clean 2023-03-07 16:04:53 +03:00
3e0e0a17ed rm *reg 2023-03-07 16:03:38 +03:00
68f6dffc26 cross match 2023-03-07 16:02:08 +03:00
ae61b520cf cross match 2023-03-07 16:01:23 +03:00
bfb7f7dc21 Gaia-unWISE 2023-03-07 15:33:11 +03:00
f3e7051835 commit 2023-03-06 15:43:28 +03:00
182ede9d0a Minor 2023-02-16 19:36:16 +03:00
d8e7eb9408 Changes 2023-02-16 18:22:07 +03:00
e27d4a5e62 Changes 2023-02-16 18:21:30 +03:00
8ceaf6d75c First steps 2023-02-15 17:19:29 +03:00
6a93354c41 First steps 2023-02-15 17:18:38 +03:00
e64fef3d6d First steps 2023-02-15 17:18:03 +03:00
c7057054d9 First steps 2023-02-15 17:15:13 +03:00
d3565aa9ba First steps 2023-02-15 17:14:46 +03:00
88a8bf5877 Minor 2023-02-15 13:47:53 +03:00
0749a635ef Minor 2023-02-15 13:46:32 +03:00
ca754ffb2a ignore 2023-02-15 13:35:51 +03:00
def5d4aebb Checks 2023-02-15 13:34:52 +03:00
a1d16472ea Checks 2023-02-15 13:33:33 +03:00
bb128b2936 check 2023-02-15 13:27:44 +03:00
52 changed files with 66550 additions and 29 deletions

73
.gitignore vendored
View File

@@ -1,5 +1,76 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
products/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
*~ *~
*# *#
.#* .#*
*.egg_info
# Astronomy
*.eps
*.cds
*.fits *.fits
*.fits.gz
*.fits.fz
*.awk
*.tex
*.evt
*.evt.gz
*.lock
*.reg
*.cross
*.ref
*.src
*.dat
*.xfm
*.log
*.tmp
*.attcorr
*.aspsol
*.txt
*.png
*.grp
*.csv
.*
*.qdp
*.pickle
!.gitignore

View File

@@ -18,13 +18,20 @@ source ./venv/bin/activate.csh
``` ```
mkdir ~/work; cd ~/work mkdir ~/work; cd ~/work
git clone git@danko.iki.rssi.ru:erosita/uds.git git clone git@danko.iki.rssi.ru:erosita/uds.git
cd uds
``` ```
## Шаг 3. Устанавливаем проект в ваше виртуальное окружение ## Шаг 3. Устанавливаем проект в ваше виртуальное окружение
``` pip install uds ```
```
cd uds
pip install --editable uds/
```
Обратите внимание на параметр **--editable**, он позволяет вам редактировать исходный код данного пакета и сразу его выполнять. Если вы не планируете модифицировать локальную копию кода, можете убрать этот параметр.
После работы можете удалить проект: После работы можете удалить проект:
``` pip uninstall uds ``` ``` pip uninstall uds ```
## Работа с данными
Непосредственная работа с данными происходит в директории scripts, где нужно последовательно запускать скрипты обработки. В данной директории находится подробное описание всех действий.
Работа начинается со списка событий, расположенных в директории

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

15
data/4XMM-DR12/README.md Normal file
View File

@@ -0,0 +1,15 @@
В данной директории находится каталог 4XMM-DR12
Источник: http://xmmssc.irap.omp.eu/Catalogue/4XMM-DR12/4XMM_DR12.html
Для вырезки источников для работы с полем UDS была использована эта команда:
```
ftselect '4XMM_DR12cat_slim_v1.0.fits[1]' 4XMM_DR12cat_slim_v1.0_UDS.fits 'SC_RA > 32.75 && SC_RA < 36.31 && SC_DEC > -6.55 && SC_DEC < -3.0' clobber=yes
ftselect '4XMM_DR12cat_v1.0.fits[1]' 4XMM_DR12cat_v1.0_UDS.fits 'SC_RA > 32.75 && SC_RA < 36.31 && SC_DEC > -6.55 && SC_DEC < -3.0' clobber=yes
Demo with DR13:
ftselect '4XMM_slim_DR13cat_v1.0.fits[1]' 4XMM_slim_DR13cat_v1.0_DEMO.fits 'SC_RA > 25.75 && SC_RA < 50.31 && SC_DEC > -16.55 && SC_DEC < 13.0' clobber=yes
```

BIN
data/4XMM-DR12/Rplots.pdf Normal file

Binary file not shown.

20
data/4XMM-DR12/barplot.r Normal file
View File

@@ -0,0 +1,20 @@
library(ggplot2)
# see https://r-graph-gallery.com/4-barplot-with-error-bar.html
png(file = "barplot.png")
# create dummy data
data <- data.frame(
name=c('','yy','zz','ff','DR12'),
flux=c(1,2,3,4,5),
err=c(1,0.2,3,2,4)
)
# Most basic error bar
ggplot(data) +
geom_bar(aes(x=name, y=flux), stat="identity", fill="skyblue", alpha=0.7) +
geom_errorbar( aes(x=name, ymin=flux-err, ymax=flux+err), width=0.4, colour="black", alpha=0.9, linewidth=2)
dev.off()

View File

@@ -0,0 +1,40 @@
options(digits = 7) # knitr sometimes decreases number of digits shown
require(stats)
xgrid <- c(1e-17,1e-16,1e-15,1e-14,1e-13,1e-12);
postscript(paste("../../products/eUDS_4XMM-DR12.flux.eps",sep=""), horizontal = FALSE, onefile = FALSE, paper = "special",width = 9.0, height = 6.0)
par(oma=c(1,1,1,1))
#layout(matrix(c(1,2), 2, 1, byrow = TRUE), heights = c(1.5,1), TRUE)
par(mar=c(4.3, 4.3, 1, 1))
par(cex.lab=1.2)
par(cex.axis=1.2)
a <- read.table("../../products/eUDS_4XMM-DR12.flux.cvs", col.names=c("eflux","eerr","xflux","xerr"))
xlim <- c(1e-15,1e-12)
ylim <- c(1e-16,1e-12)
plot(NULL, pch=21, bg="gold",ylim=ylim, xlim=xlim, main="", ylab="eUDS, erg/s/cm2",type="p",xaxt="n",xlab="4XMM-DR12, erg/s/cm2", log="xy")
axis(side=1, at=xgrid, labels=xgrid)
segments(a$xflux,a$eflux-a$eerr,a$xflux,a$eflux+a$eerr)
segments(a$xflux-a$xerr,a$eflux,a$xflux+a$xerr,a$eflux)
#segments(a$xlo,d,a$xhi,d)
points(a$xflux,a$eflux,pch=21, bg="gold")
lines(c(1e-17,1e-12),c(1e-17,1e-12))
lines(c(1e-17,1e-12),10*c(1e-17,1e-12),lt=2)
lines(c(1e-17,1e-12),0.1*c(1e-17,1e-12),lt=2)
grid()
abline(h=0, col = "black",lty=2)
abline(v=xgrid, col="lightgray", lty="dotted")
dev.off()

19
data/4XMM-DR12/print_ds9reg.py Executable file
View File

@@ -0,0 +1,19 @@
from astropy.io import fits
import sys
#filename='4XMM_DR12cat_slim_v1.0_UDS.fits.catalog'
filename='4XMM_slim_DR13cat_v1.0_DEMO.fits'
fout=filename.replace(".fits.catalog", ".names.reg")
hdul = fits.open(filename)
#hdul.info()
tbdata = hdul[1].data
hdul.close()
#with open("./{}".format(fout), 'w') as writer:
for rec in tbdata:
print("fk5;circle({}, {}, 0.008)".format(rec['sc_ra'],rec['sc_dec']))
#writer.write("fk5;circle({}, {}, {}) # text={{{}}}\n".format(rec['sc_ra'],rec['sc_dec'],rec['sc_poserr']/3600,rec['IAUNAME']))

Binary file not shown.

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,10 @@
В данной директории находится каталог Gaia-unWISE из статьи https://ui.adsabs.harvard.edu/abs/2019MNRAS.489.4741S/abstract
Каталог был получен с этого ресурса: https://zenodo.org/record/6837642#.ZAcufYBByRR
Для вырезки источников для работы с полем UDS была использована эта команда:
```
ftselect 'Gaia_unWISE_AGNs.fits[1]' Gaia_unWISE_UDS.fits 'RA > 32.75 && RA < 36.31 && DEC > -6.55 && DEC < -3.0' clobber=yes
```

196
data/MCXC/mcxc.fits.catalog Normal file

File diff suppressed because one or more lines are too long

15
data/MCXC/print_ds9reg.py Executable file
View File

@@ -0,0 +1,15 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from astropy.io import fits
hdul = fits.open('mcxc.fits.catalog')
tbdata = hdul[1].data
hdul.close()
with open("../../products/mcxc.reg", 'w') as writer:
for rec in tbdata:
writer.write("fk5;point({}, {}) # color=magenta text={{{}}}\n".format(rec['RAJ2000'],
rec['DEJ2000'],
rec['OName'],))

File diff suppressed because one or more lines are too long

13
data/NuSTAR/print_ds9reg.py Executable file
View File

@@ -0,0 +1,13 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from astropy.io import fits
hdul = fits.open('nustar.fits.catalog')
tbdata = hdul[1].data
hdul.close()
with open("../../products/nustar_UDS.reg", 'w') as writer:
for rec in tbdata:
writer.write("fk5;circle({}, {}, 0.0025) # color=magenta text={{{}}}\n".format(rec['RAJ2000'], rec['DEJ2000'], rec['NuSTAR']))

4
data/SXDS/README.md Normal file
View File

@@ -0,0 +1,4 @@
В данной директории находится каталог Subaru/XMM-Newton deep survey (SXDS) https://ui.adsabs.harvard.edu/abs/2008ApJS..179..124U/abstract
Каталог был получен с этого ресурса: https://cdsarc.cds.unistra.fr/viz-bin/cat/J/ApJS/179/124

477
data/SXDS/SXDS.fits.catalog Normal file

File diff suppressed because one or more lines are too long

16
data/SXDS/print_ds9reg.py Executable file
View File

@@ -0,0 +1,16 @@
from astropy.io import fits
import sys
filename='SXDS.fits.catalog'
fout=filename.replace(".fits.catalog", ".reg")
hdul = fits.open(filename)
#hdul.info()
tbdata = hdul[1].data
with open("../../products/{}".format(fout), 'w') as writer:
for rec in tbdata:
text="{}{}".format(rec['__UWS2008_'],rec['Note'])
#writer.write("fk5;point({}, {})\n".format(rec['sc_ra'],rec['sc_dec']))
writer.write("fk5;circle({}, {}, {}) # text={{{}}}\n".format(rec['RAJ2000'],rec['DEJ2000'],rec['e_pos']/3600,text))

10
data/X-CLASS/README.md Normal file
View File

@@ -0,0 +1,10 @@
В данной директории находится каталог скоплений галактик X-CLASS из статьи https://ui.adsabs.harvard.edu/abs/2021A%26A...652A..12K/abstract
Каталог был получен с этого ресурса: https://vizier.cds.unistra.fr/viz-bin/VizieR-3?-source=J/A%2bA/652/A12/table3
Для вырезки источников для работы с полем UDS была использована эта команда:
```
ftselect 'x-class.fits[1]' x-class_UDS.fits 'RAJ2000 > 32.75 && RAJ2000 < 36.31 && DEJ2000 > -6.55 && DEJ2000 < -3.0' clobber=yes
```

18
data/X-CLASS/print_ds9reg.py Executable file
View File

@@ -0,0 +1,18 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from astropy.io import fits
hdul = fits.open('x-class_UDS.fits.catalog')
tbdata = hdul[1].data
hdul.close()
with open("../../products/x-class_UDS.fits.reg", 'w') as writer:
for rec in tbdata:
writer.write("fk5;circle({}, {}, {}) # color=magenta text={{{} {:.2f} {:.2f}}}\n".format(rec['RAJ2000'],
rec['DEJ2000'],
rec['extent']/3600,
rec['XClass'],
rec['MLdet'],
rec['MLext'],))

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

3
data/evtlists/tm1.txt Normal file
View File

@@ -0,0 +1,3 @@
/srg/work/krivonos/erosita/work/events/cef_43140_1_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43146_1_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43147_1_P003_00.fits.gz

9
data/evtlists/tm5.txt Normal file
View File

@@ -0,0 +1,9 @@
/srg/work/krivonos/erosita/work/events/cef_43086_5_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43087_5_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43092_5_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43093_5_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43097_5_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43098_5_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43099_5_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43103_5_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43104_5_P003_00.fits.gz

19
data/evtlists/tm6.txt Normal file
View File

@@ -0,0 +1,19 @@
/srg/work/krivonos/erosita/work/events/cef_43050_6_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43051_6_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43062_6_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43063_6_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43064_6_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43065_6_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43066_6_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43067_6_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43068_6_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43069_6_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43070_6_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43071_6_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43072_6_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43073_6_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43074_6_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43098_6_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43099_6_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43103_6_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43104_6_P003_00.fits.gz

6
data/evtlists/tm7.txt Normal file
View File

@@ -0,0 +1,6 @@
#/srg/work/krivonos/erosita/work/events/cef_43122_7_P003_00.fits.gz
#/srg/work/krivonos/erosita/work/events/cef_43123_7_P003_00.fits.gz
#/srg/work/krivonos/erosita/work/events/cef_43127_7_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43129_7_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43133_7_P003_00.fits.gz
/srg/work/krivonos/erosita/work/events/cef_43134_7_P003_00.fits.gz

11
data/xspec/ML0001.xcm Normal file
View File

@@ -0,0 +1,11 @@
method leven 10 0.01
abund wilm
xsect vern
cosmo 70 0 0.73
xset delta 0.01
systematic 0
model TBabs*powerlaw
0.0123819 1 0 0 100000 1e+06
2 -1 -3 -2 9 10
0.000106473 0.01 0 0 1e+20 1e+24
bayes off

11
data/xspec/ML0003.xcm Normal file
View File

@@ -0,0 +1,11 @@
method leven 10 0.01
abund wilm
xsect vern
cosmo 70 0 0.73
xset delta 0.01
systematic 0
model TBabs*powerlaw
0.00327282 1 0 0 100000 1e+06
2 -1 -3 -2 9 10
0.000121635 0.01 0 0 1e+20 1e+24
bayes off

11
data/xspec/ML0004.xcm Normal file
View File

@@ -0,0 +1,11 @@
method leven 10 0.01
abund wilm
xsect vern
cosmo 70 0 0.73
xset delta 0.01
systematic 0
model TBabs*powerlaw
0.00240283 1 0 0 100000 1e+06
2 -1 -3 -2 9 10
0.000297459 0.01 0 0 1e+20 1e+24
bayes off

11
data/xspec/ML0005.xcm Normal file
View File

@@ -0,0 +1,11 @@
method leven 10 0.01
abund wilm
xsect vern
cosmo 70 0 0.73
xset delta 0.01
systematic 0
model TBabs*powerlaw
0.0320991 1 0 0 100000 1e+06
2 -1 -3 -2 9 10
0.000123814 0.01 0 0 1e+20 1e+24
bayes off

11
data/xspec/ecf.xcm Normal file
View File

@@ -0,0 +1,11 @@
method leven 10 0.01
abund wilm
xsect vern
cosmo 70 0 0.73
xset delta 0.01
systematic 0
model TBabs*powerlaw
0.02 -1 0 0 100000 1e+06
2 -1 -3 -2 9 10
4.83094e-05 0.01 0 0 1e+20 1e+24
bayes off

8
data/xspec/load.xcm Normal file
View File

@@ -0,0 +1,8 @@
data tm0_SrcTool_020_SourceSpec_00005.fits.grp
ign bad
ign **-0.2
ign 10.-**
cpd /xs
setpl en
pl lda

61
scripts/00_check.py Executable file
View File

@@ -0,0 +1,61 @@
#!/usr/bin/env python
"""Печатает информацию о наблюдениях
"""
from astropy.wcs import WCS
from astropy.io import fits
import sys, os, os.path, time, subprocess
from pathlib import Path
import numpy as np
import glob
from os.path import dirname
import inspect
import uds
from uds.utils import *
from uds.config import *
outkey="mosa_tm0"
""" find UDS root dir """
root_path=dirname(dirname(dirname(inspect.getfile(uds))))
print("UDS root path: {}".format(root_path))
infile_dir=root_path+'/data/processed'
outfile_dir=root_path+'/products'
create_folder(outfile_dir)
index=5
events=[]
expmaps=[]
totexp=0.0
for tmkey in keylist_tm.keys():
print("TM{} in work... init events".format(tmkey))
for datakey in keylist_tm[tmkey]:
#print("--> {}".format(datakey))
""" Запускаем полностью в холостом режиме, нам нужно получить только названия файлов """
outfile_evtool,outfile_expmap=init_events(key=datakey, eband_index=eband[index],
infile_dir=infile_dir,
outfile_dir=outfile_dir,
do_init=False,
do_obsmode=False,
do_center=False,
do_evtool=False,
do_expmap=False,
ra_cen=ra_cen, de_cen=de_cen,
emin_kev=emin_kev[index],
emax_kev=emax_kev[index])
events.append(outfile_evtool)
expmaps.append(outfile_expmap)
tstart, tstop = read_tstart_tstop(infile=outfile_evtool)
totexp=totexp+(tstop-tstart)
print("{}, {} -- {}, {} -- {}, {:.2f} ks".format(outfile_evtool,tstart,tstop,
mission2date_utc(tstart),
mission2date_utc(tstop),
(tstop-tstart)/1000))
print("\n***\n*** Total exposure: {:.1f} ks\n***".format(totexp))

59
scripts/01_init_events.py Normal file → Executable file
View File

@@ -1,19 +1,58 @@
#!/usr/bin/env python
"""Создает начальные списки событий и помещает их в uds/data/processed
Оригинальные файлы со списками событий задаются в файлах uds/data/evtlists/*.txt
"""
import os
import inspect
from os.path import dirname
import uds
from uds.config import * from uds.config import *
from uds.utils import * from uds.utils import *
""" find UDS root dir """
root_path=dirname(dirname(dirname(inspect.getfile(uds))))
print("UDS root path: {}".format(root_path))
el=root_path+'/data/evtlists/'
pr=root_path+'/data/processed/'
region="box({},{},4d,4d,0)".format(ra_cen,de_cen)
""" Selection region """
""" """
evtool eventfiles=@./tm6_img_1.txt gti='620174080. 620178002.620032' outfile=./tm6_park_1.fits flag=0x2000 emin=0.2 emax=10.0 region='box(34.5550100,-4.7177600,153.0',153.0',0)' image=yes # TM1
evtool eventfiles=@./tm6_img_scan1.txt gti='620178002.620032 620192246.62720' outfile=./tm6_scan_1.fits flag=0x2000 emin=0.2 emax=10.0 region='box(34.5550100,-4.7177600,153.0',153.0',0)' image=yes do_evtool_esass(evlist=el+'tm1.txt', outfile=pr+'tm1_obs_1.fits', gti='621296896. 621304128.', rmlock=True)
evtool eventfiles=@./tm6_img_scan2.txt gti='620192448. 620194624.' outfile=./tm6_park_2.fits flag=0x2000 emin=0.2 emax=10.0 region='box(34.5550100,-4.7177600,153.0',153.0',0)' image=yes # TM5
evtool eventfiles=@./tm6_img_scan2.txt gti='620194666.606144 620208904.673408' outfile=./tm6_scan_2.fits flag=0x2000 emin=0.2 emax=10.0 region='box(34.5550100,-4.7177600,153.0',153.0',0)' image=yes do_evtool_esass(evlist=el+'tm5.txt', outfile=pr+'tm5_obs_1.fits', gti='620606016. 620614848.', rmlock=True)
do_evtool_esass(evlist=el+'tm5.txt', outfile=pr+'tm5_obs_2.fits', gti='620676992. 620689792.', rmlock=True)
evtool eventfiles=@./tm6_img_scan3.txt gti='620209162.670976 620211316.650304' outfile=./tm6_park_3.fits flag=0x2000 emin=0.2 emax=10.0 region='box(34.5550100,-4.7177600,153.0',153.0',0)' image=yes # TM6
evtool eventfiles=@./tm6_img_scan3.txt gti='620211328. 620225600.' outfile=./tm6_scan_3.fits flag=0x2000 emin=0.2 emax=10.0 region='box(34.5550100,-4.7177600,153.0',153.0',0)' image=yes do_evtool_esass(evlist=el+'tm6.txt', outfile=pr+'tm6_park_1.fits', gti='620174080. 620178002.620032', rmlock=True)
do_evtool_esass(evlist=el+'tm6.txt', outfile=pr+'tm6_scan_1.fits', gti='620178002.620032 620192246.62720', rmlock=True)
do_evtool_esass(evlist=el+'tm6.txt', outfile=pr+'tm6_park_2.fits', gti='620192448. 620194624.', rmlock=True)
do_evtool_esass(evlist=el+'tm6.txt', outfile=pr+'tm6_scan_2.fits', gti='620194666.606144 620208904.673408', rmlock=True)
do_evtool_esass(evlist=el+'tm6.txt', outfile=pr+'tm6_park_3.fits', gti='620209162.670976 620211316.650304', rmlock=True)
do_evtool_esass(evlist=el+'tm6.txt', outfile=pr+'tm6_scan_3.fits', gti='620211328. 620225600.', rmlock=True)
do_evtool_esass(evlist=el+'tm6.txt', outfile=pr+'tm6_park_4.fits', gti='620225853.609024 620227974.68832', rmlock=True)
do_evtool_esass(evlist=el+'tm6.txt', outfile=pr+'tm6_scan_4.fits', gti='620227904. 620242176.', rmlock=True)
do_evtool_esass(evlist=el+'tm6.txt', outfile=pr+'tm6_obs_1.fits', gti='620242432. 620258368.', rmlock=True)
evtool eventfiles=@./tm6_img_scan3.txt gti='620225853.609024 620227974.68832' outfile=./tm6_park_4.fits flag=0x2000 emin=0.2 emax=10.0 region='box(34.5550100,-4.7177600,153.0',153.0',0)' image=yes do_evtool_esass(evlist=el+'tm6.txt', outfile=pr+'tm6_obs_2.fits', gti='620607424. 620614656.', rmlock=True)
evtool eventfiles=@./tm6_img_scan3.txt gti='620227904. 620242176.' outfile=./tm6_scan_4.fits flag=0x2000 emin=0.2 emax=10.0 region='box(34.5550100,-4.7177600,153.0',153.0',0)' image=yes do_badpix_tm6(filename=pr+'tm6_obs_2.fits')
do_evtool_esass(evlist=el+'tm6.txt', outfile=pr+'tm6_obs_3.fits', gti='620676992. 620690368.', rmlock=True)
do_badpix_tm6(filename=pr+'tm6_obs_3.fits')
""" """
# TM7
do_evtool_esass(evlist=el+'tm7.txt', outfile=pr+'tm7_obs_1.fits', gti='621043136. 621052416.', emin=0.2, emax=10.0, region=region, rmlock=True)
#do_evtool_esass(evlist=el+'tm7.txt', outfile=pr+'tm7_obs_2.fits', gti='621110272. 621117952.', region=region, rmlock=True)
print("WORK {}".format(work_dir))
printme()

95
scripts/02_merge_events.py Executable file
View File

@@ -0,0 +1,95 @@
#!/usr/bin/env python
"""Создает объедененный список событий и помещает его в uds/products
Этот список событий нужен, в основном для извлечения спектров с помощью srctool
"""
from astropy.wcs import WCS
from astropy.io import fits
import sys, os, os.path, time, subprocess
from pathlib import Path
import numpy as np
import glob
from os.path import dirname
import inspect
import uds
from uds.utils import *
from uds.config import *
outkey="mosa_tm0"
""" find UDS root dir """
#root_path=dirname(dirname(dirname(inspect.getfile(uds))))
"""
ftools does not like long file path names, for this reason, we use relative path here
"""
root_path='..'
print("UDS root path: {}".format(root_path))
infile_dir=root_path+'/data/processed'
outfile_dir=root_path+'/products'
create_folder(outfile_dir)
index=3 # select energy band
do_init = True
do_merge = True
do_rate = False
do_adapt = True # requires CIAO
vign=False
vignetting = 'vign' if (vign==True) else 'novign'
events=[]
expmaps=[]
bkgmaps=[]
for tmkey in keylist_tm.keys():
print("TM{} in work... init events".format(tmkey))
for datakey in keylist_tm[tmkey]:
#if not ("scan" in datakey):
# continue
print("--> {}".format(datakey))
""" Подготавливаем списки событий индивидуальных наблюдений """
outfile_evtool,outfile_expmap=init_events(key=datakey, eband_index=eband[index],
infile_dir=infile_dir,
outfile_dir=outfile_dir,
do_init=do_init,
do_obsmode=False,
do_center=False,
do_evtool=True,
do_expmap=False,
vign=vign,
ra_cen=ra_cen, de_cen=de_cen,
emin_kev=emin_kev[index],
emax_kev=emax_kev[index])
events.append(outfile_evtool)
expmaps.append(outfile_expmap)
bkgmaps.append("{}_BackMap3_en{}.fits".format(os.path.join(outfile_dir,datakey), eband[0]))
""" Собираем общий список событий """
outfile_evtool="{}_EventList_en{}.fits".format(os.path.join(outfile_dir,outkey), eband[index])
outfile_expmap="{}_ExposureMap_en{}.{}.fits".format(os.path.join(outfile_dir,outkey), eband[index], vignetting)
outfile_bkgmap="{}_BackMap_en{}.{}.fits".format(os.path.join(outfile_dir,outkey), eband[index], vignetting)
if(do_merge==True):
do_evtool_esass(events=events, outfile=outfile_evtool)
do_fimgmerge_ftools(maps=expmaps, outfile=outfile_expmap)
do_fimgmerge_ftools(maps=bkgmaps, outfile=outfile_bkgmap)
outfile_rate="{}_RateMap_en{}.{}.fits".format(os.path.join(outfile_dir,outkey), eband[index], vignetting)
if(do_rate==True):
make_rate_map(cntmap=outfile_evtool, expmap=outfile_expmap, outfile=outfile_rate)
function='gaussian'
outfile_adapt="{}_ImageAdapt_en{}.{}.{}.fits".format(os.path.join(outfile_dir,outkey), eband[index], function, vignetting)
if(do_adapt==True):
do_adapt_ciao(infile=outfile_evtool, outfile=outfile_adapt, expmap=outfile_expmap, function=function, expcut=100)

424
scripts/03_init_obs.py Executable file
View File

@@ -0,0 +1,424 @@
#!/usr/bin/env python
"""
НАЗВАНИЕ:
01_init_obs.py
НАЗНАЧЕНИЕ:
Подготавливает списки событий в разных энергетических диапазонах.
Производит списки источников в наждом наблюдении и делает астрокоррекцию с помощью wcs_match/wcs_update
ВЫЗОВ:
conda activate ciao-4.15
esass
./01_init_obs.py
УПРАВЛЕНИЕ:
Запуск отдельных команд управляется переменными, например: do_init = True
Выбранные энергетические диапазоны управляется массивом eband_selected
ПАРАМЕТРЫ:
eband_selected : Выбранные энергетические диапазоны
ВЫВОД:
Выходные файлы записываются в директорию outfile_dir
ИСТОРИЯ:
Роман Кривонос, ИКИ РАН, krivonos@cosmos.ru
Март 2023
"""
from astropy.wcs import WCS
from astropy.io import fits
import sys, os, os.path, time, subprocess
from pathlib import Path
import numpy as np
import glob
from multiprocessing import Pool
from os.path import dirname
import inspect
import uds
from uds.utils import *
from uds.config import *
""" find UDS root dir """
root_path=dirname(dirname(dirname(inspect.getfile(uds))))
print("UDS root path: {}".format(root_path))
infile_dir=root_path+'/data/processed'
outfile_dir=root_path+'/products'
create_folder(outfile_dir)
run_Pool=False
do_init = False
do_ermask = False
do_erbox1 = False # local mode
do_erbackmap1 = False #
do_erbox2 = False # map mode, with background map
do_erbackmap2 = False #
do_erbox3 = False # map mode, with background map
do_erbackmap3 = False #
do_ermldet = False
do_catprep = False
do_cross_match = False
do_astro_corr = False # search optimal shift
do_astro_update = True
do_wcs_match = False # Chandra task -- DEPRECATED
do_wcs_update = False # Chandra task -- DEPRECATED
eband_selected=[5]
vign=True
vignetting = 'vign' if (vign==True) else 'novign'
def runme(datakey):
""" runs datakey over energy bands """
events=[]
expmaps=[]
outfile_boxlist1=[]
outfile_boxlist2=[]
outfile_boxlist3=[]
outfile_backmap1=[]
outfile_backmap2=[]
outfile_backmap3=[]
cheesemask=[]
bkgimage=[]
srcmaps=[]
print(datakey)
print('module name:', __name__)
print('parent process:', os.getppid())
print('process id:', os.getpid())
for ii in range(len(eband_selected)):
index=eband_selected[ii]
print("\t>>> Energy band en{} -- {}-{} keV".format(eband[index],emin_kev[index],emax_kev[index]))
outfile_evtool, outfile_expmap = init_events(key=datakey, eband_index=eband[index],
infile_dir=infile_dir,
outfile_dir=outfile_dir,
do_init=do_init,
do_obsmode=True,
do_center=True,
do_evtool=True,
do_expmap=True,
vign=vign,
ra_cen=ra_cen, de_cen=de_cen,
emin_kev=emin_kev[index],
emax_kev=emax_kev[index])
expmaps.append(outfile_expmap)
events.append(outfile_evtool)
# After astrometry-corrected files (*.attcorr.fits) are obtained, one can take them as original, in order to check the full chain:
#events.append(outfile_evtool.replace(".fits", ".attcorr.fits"))
""" Detmask """
detmask="{}_DetectionMask{}".format(os.path.join(outfile_dir,datakey), outfile_post)
if(do_ermask==True):
cmd=["ermask",
"expimage=%s" %(expmaps[0]), # use the first exposure maps calculated for that skyfield, independent of the energy band
"detmask=%s" %(detmask),
"threshold1=0.01",
"threshold2=10.0",
"regionfile_flag=no"
]
remove_file(detmask)
print((" ").join(cmd))
os.system((" ").join(cmd))
for ii in range(len(eband_selected)):
index=eband_selected[ii]
print("\t>>> Energy band en{} -- {}-{} keV".format(eband[index],emin_kev[index],emax_kev[index]))
""" erbox in local mode """
outfile_boxlist1.append("{}_BoxList1_en{}{}".format(os.path.join(outfile_dir,datakey), eband[index], outfile_post))
if(do_erbox1==True):
""" erbox in local mode """
cmd=["erbox",
"images=%s" %(events[ii]),
"boxlist=%s" %(outfile_boxlist1[ii]),
"expimages=%s" %(expmaps[ii]),
"detmasks=%s" %(detmask),
"emin=%s" %(emin_ev[index]),
"emax=%s" %(emax_ev[index]),
"ecf=1.0",
"nruns=2",
"likemin=6.0",
"boxsize=4",
"compress_flag=N",
"bkgima_flag=N",
"expima_flag=Y",
"detmask_flag=Y"
]
remove_file(outfile_boxlist1[ii])
print((" ").join(cmd))
os.system((" ").join(cmd))
save_ds9reg(outfile_boxlist1[ii])
outfile_backmap1.append("{}_BackMap1_en{}{}".format(os.path.join(outfile_dir,datakey), eband[index], outfile_post))
cheese_mask="{}_CheeseMask1_en{}{}".format(os.path.join(outfile_dir,datakey), eband[index], outfile_post)
if(do_erbackmap1==True):
""" back map 1 """
cmd=["erbackmap",
"image=%s" %(events[ii]),
"expimage=%s" %(expmaps[ii]),
"boxlist=%s" %(outfile_boxlist1[ii]),
"detmask=%s" %(detmask),
"emin=%s" %(emin_ev[index]),
"emax=%s" %(emax_ev[index]),
"bkgimage=%s" %(outfile_backmap1[ii]),
"cheesemask=%s" %(cheese_mask),
"idband=1",
"scut=0.001",
"mlmin=6",
"maxcut=0.5",
"fitmethod=smooth smoothval=15",
"snr=40.",
]
remove_file(cheese_mask)
remove_file(outfile_backmap1[ii])
os.system((" ").join(cmd))
print((" ").join(cmd))
outfile_boxlist2.append("{}_BoxList2_en{}{}".format(os.path.join(outfile_dir,datakey), eband[index], outfile_post))
if(do_erbox2==True):
""" erbox in background mode """
cmd=["erbox",
"images=%s" %(events[ii]),
"boxlist=%s" %(outfile_boxlist2[ii]),
"expimages=%s" %(expmaps[ii]),
"detmasks=%s" %(detmask),
"emin=%s" %(emin_ev[index]),
"emax=%s" %(emax_ev[index]),
"ecf=1.0",
"nruns=2",
"likemin=4.0",
"boxsize=4",
"compress_flag=N",
"bkgima_flag=Y",
"bkgimages={}".format(outfile_backmap1[ii]),
"expima_flag=Y",
"detmask_flag=Y"
]
remove_file(outfile_boxlist2[ii])
print((" ").join(cmd))
os.system((" ").join(cmd))
save_ds9reg(outfile_boxlist2[ii])
outfile_backmap2.append("{}_BackMap2_en{}{}".format(os.path.join(outfile_dir,datakey), eband[index], outfile_post))
cheese_mask="{}_CheeseMask2_en{}{}".format(os.path.join(outfile_dir,datakey), eband[index], outfile_post)
if(do_erbackmap2==True):
""" back map 2 """
cmd=["erbackmap",
"image=%s" %(events[ii]),
"expimage=%s" %(expmaps[ii]),
"boxlist=%s" %(outfile_boxlist2[ii]),
"detmask=%s" %(detmask),
"emin=%s" %(emin_ev[index]),
"emax=%s" %(emax_ev[index]),
"bkgimage=%s" %(outfile_backmap2[ii]),
"cheesemask=%s" %(cheese_mask),
"idband=1",
"scut=0.001",
"mlmin=6",
"maxcut=0.5",
"fitmethod=smooth smoothval=15",
"snr=40.",
]
remove_file(cheese_mask)
remove_file(outfile_backmap2[ii])
os.system((" ").join(cmd))
print((" ").join(cmd))
outfile_boxlist3.append("{}_BoxList3_en{}{}".format(os.path.join(outfile_dir,datakey), eband[index], outfile_post))
if(do_erbox3==True):
""" erbox in map mode FINAL """
cmd=["erbox",
"images=%s" %(events[ii]),
"boxlist=%s" %(outfile_boxlist3[ii]),
"expimages=%s" %(expmaps[ii]),
"detmasks=%s" %(detmask),
"emin=%s" %(emin_ev[index]),
"emax=%s" %(emax_ev[index]),
"ecf=1.0",
"nruns=2",
"likemin=4.0",
"boxsize=4",
"compress_flag=N",
"bkgima_flag=Y",
"bkgimages={}".format(outfile_backmap2[ii]),
"expima_flag=Y",
"detmask_flag=Y"
]
remove_file(outfile_boxlist3[ii])
print((" ").join(cmd))
os.system((" ").join(cmd))
save_ds9reg(outfile_boxlist3[ii])
outfile_backmap3.append("{}_BackMap3_en{}{}".format(os.path.join(outfile_dir,datakey), eband[index], outfile_post))
cheese_mask="{}_CheeseMask3_en{}{}".format(os.path.join(outfile_dir,datakey), eband[index], outfile_post)
if(do_erbackmap3==True):
""" back map 3 FINAL """
cmd=["erbackmap",
"image=%s" %(events[ii]),
"expimage=%s" %(expmaps[ii]),
"boxlist=%s" %(outfile_boxlist3[ii]),
"detmask=%s" %(detmask),
"emin=%s" %(emin_ev[index]),
"emax=%s" %(emax_ev[index]),
"bkgimage=%s" %(outfile_backmap3[ii]),
"cheesemask=%s" %(cheese_mask),
"idband=1",
"scut=0.001",
"mlmin=6",
"maxcut=0.5",
"fitmethod=smooth smoothval=15",
"snr=40.",
]
remove_file(cheese_mask)
remove_file(outfile_backmap3[ii])
os.system((" ").join(cmd))
print((" ").join(cmd))
mllist="{}_MaxLikSourceList_en{}{}".format(os.path.join(outfile_dir,datakey), eband[index], outfile_post)
srcmap="{}_SourceMap_en{}{}".format(os.path.join(outfile_dir,datakey), eband[index], outfile_post)
cmd=["ermldet",
"mllist=%s" %(mllist),
"boxlist=%s" %(outfile_boxlist3[ii]),
"images=%s" %(events[ii]),
"expimages=%s" %(expmaps[ii]),
"detmasks=%s" %(detmask),
"bkgimages=%s" %(outfile_backmap3[ii]),
"emin=%s" %(emin_ev[index]),
"emax=%s" %(emax_ev[index]),
"hrdef=",
"ecf={}".format(ecf[index]),
"likemin=5.",
"extlikemin=6.",
"compress_flag=N",
"cutrad=15.",
"multrad=20.",
"extmin=2.0",
"extmax=15.0",
#"bkgima_flag=Y", looks outdated
"expima_flag=Y",
"detmask_flag=Y",
"extentmodel=beta",
"thres_flag=N",
"thres_col=like",
"thres_val=30.",
"nmaxfit=4",
"nmulsou=2",
"fitext_flag=yes",
"srcima_flag=yes",
"srcimages=%s" %(srcmap)
]
if(do_ermldet==True):
test_exe('ermldet')
remove_file(mllist)
remove_file(srcmap)
os.system((" ").join(cmd))
print((" ").join(cmd))
save_ermldet_ds9reg(mllist,scale=60*60)
catprep="{}_SourceCatalog_en{}{}".format(os.path.join(outfile_dir,datakey), eband[index], outfile_post)
catprep_en0="{}_SourceCatalog_en{}{}".format(os.path.join(outfile_dir,datakey), eband[0], outfile_post)
if(do_catprep==True):
cmd=["catprep",
"infile={}".format(mllist),
"outfile={}".format(catprep),]
remove_file(catprep)
os.system((" ").join(cmd))
print((" ").join(cmd))
save_catprep_ds9reg(catprep,scale=60*60)
if(do_cross_match==True):
crossmatch_shu2019(catprep,dlmin=10,refimage=events[ii],crval=wcslist[datakey],
catalog=root_path+"/data/Gaia_unWISE/Gaia_unWISE_UDS.fits.catalog",errlim=5.0)
if(do_astro_corr==True and eband[index]=='0'):
""" run astro_corr for 0.3-2.3 keV only """
wcs_astro_corr(catprep)
#wcs_match_ciao(catprep, method='rst',radius=12,residlim=0,residtype=0,residfac=1)
if(do_astro_update==True):
""" run astro_corr for 0.3-2.3 keV only """
attcorr=wcs_update_shift(events[ii],flog=catprep_en0.replace(".fits", ".shift.log"))
do_evtool_esass(evfile=attcorr,outfile=attcorr,rmlock=False, do_center=True, ra_cen=ra_cen, de_cen=de_cen)
if(do_wcs_match==True and eband[index]=='0'):
""" run wcs_match for 0.3-2.3 keV only """
wcs_match_ciao(catprep, method='trans',radius=12,residlim=5)
#wcs_match_ciao(catprep, method='rst',radius=12,residlim=0,residtype=0,residfac=1)
if(do_wcs_update==True):
""" use 0.3-2.3 keV transform matrix for all other bands """
attcorr=wcs_update_ciao(events[ii],crval=wcslist[datakey],transformfile=catprep_en0.replace(".fits", ".xfm"),clean=False)
do_evtool_esass(evfile=attcorr,outfile=attcorr,rmlock=False, do_center=True, ra_cen=ra_cen, de_cen=de_cen)
"""
# individual run, testing
runme("tm7_obs_1")
runme("tm5_obs_1")
runme("tm6_scan_1")
"""
if(run_Pool==True):
# parallel run
items=[]
for tmkey in keylist_tm.keys():
for datakey in keylist_tm[tmkey]:
items.append(datakey)
with Pool() as pool:
pool.map(runme, items)
else:
# conventional run
for tmkey in keylist_tm.keys():
for datakey in keylist_tm[tmkey]:
print("--> {}".format(datakey))
runme(datakey)
#sys.exit()

458
scripts/04_mosaics.py Executable file
View File

@@ -0,0 +1,458 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
НАЗВАНИЕ:
04_mosaics.py
НАЗНАЧЕНИЕ:
Собирает мозайки в разных энергетических диапазонах.
ВЫЗОВ:
esass
./04_mosaics.py
УПРАВЛЕНИЕ:
Запуск отдельных команд управляется переменными, например: do_init = True
Выбранный энергетический диапазон управляется переменной index
forced=True делает принудительную фотометрию
ПАРАМЕТРЫ:
index : Выбранный энергетический диапазон
ВЫВОД:
Выходные файлы записываются в директорию outfile_dir
ИСТОРИЯ:
Роман Кривонос, ИКИ РАН, krivonos@cosmos.ru
Март 2023
"""
from astropy.wcs import WCS
from astropy.io import fits
import sys, os, os.path, time, subprocess
#from pathlib import Path
import numpy as np
import glob
from os.path import dirname
import inspect
import pickle
import uds
from uds.utils import *
from uds.config import *
""" find UDS root dir """
#root_path=dirname(dirname(dirname(inspect.getfile(uds))))
"""
ftools does not like long file path names,
for this reason, we use relative path here
"""
root_path='..'
print("UDS root path: {}".format(root_path))
infile_dir=root_path+'/data/processed'
outfile_dir=root_path+'/products'
create_folder(outfile_dir)
local_run = False
outkey="tm0"
do_init = True
do_merge = True
do_detmask = True
do_expmap = True
do_erbox1 = True # local mode
do_erbackmap1 = True #
do_erbox2 = True # map mode, with background map
do_erbackmap2 = True #
do_erbox3 = True # map mode, with background map
do_erbackmap3 = True #
do_ersensmap = False
do_ermldet = True
do_fixcat = False # only for index=0
do_fixxmm = False # prepare forced photometry, only for index=0
do_apetool = False
do_catprep = True
do_filter_catalog = False
do_cross_match = False
index=3
forced=False
""" If forced=True, take input catalog from energy range en0 """
comm='' # for 4XMM-DR12 forced photometry use '-xmm'
vign=True
vignetting = 'vign' if (vign==True) else 'novign'
attcorr=True
events=[]
expmaps=[]
bkgmaps=[]
for tmkey in keylist_tm.keys():
print("TM{} in work... init events".format(tmkey))
for datakey in keylist_tm[tmkey]:
print("--> {}".format(datakey))
""" Подготавливаем списки событий индивидуальных наблюдений """
outfile_evtool,outfile_expmap=init_events(key=datakey,attcorr=attcorr,
eband_index=eband[index],
infile_dir=infile_dir,
outfile_dir=outfile_dir,
do_init=do_init,
do_obsmode=False,
do_center=False,
do_evtool=True,
do_expmap=False,
vign=vign,
ra_cen=ra_cen, de_cen=de_cen,
emin_kev=emin_kev[index],
emax_kev=emax_kev[index])
events.append(outfile_evtool)
expmaps.append(outfile_expmap)
""" Собираем общий список событий """
outfile_evtool="{}_EventList_en{}.fits".format(os.path.join(outfile_dir,outkey),
eband[index])
if(do_merge==True):
do_evtool_esass(events=events, outfile=outfile_evtool)
""" makes detmask from TM exposures """
detmask="{}/{}_DetectorMask_en{}{}".format(outfile_dir,
outkey,
eband[index],
outfile_post)
if(do_detmask==True):
create_detmask_merged(expmaps,detmask,minval=100)
"""
Собираем общую карту экспозиции, обратите внимание на коэффициент 7.
Экспозиция рассчитывается на 7 телескопов.
outfile_expmap="{}_ExposureMap_en{}.{}.fits".format(os.path.join(outfile_dir,outkey), eband[index], vignetting)
outfile_bkgmap="{}_BackMap_en{}.{}.fits".format(os.path.join(outfile_dir,outkey), eband[index], vignetting)
"""
outfile_expmap="{}_ExposureMap_en{}.{}{}".format(os.path.join(outfile_dir,outkey),
eband[index],vignetting,
outfile_post)
if(do_expmap==True):
create_expmap_merged(expmaps,outfile_expmap,scale=7.0)
outfile_boxlist1="{}/{}_BoxList1_en{}{}".format(outfile_dir,outkey, eband[index], outfile_post)
if(do_erbox1==True):
cmd=["erbox",
"images=\'{}\'".format(outfile_evtool),
"boxlist=%s" %(outfile_boxlist1),
"expimages=\'{}\'".format(outfile_expmap),
"detmasks=\'{}\'".format(detmask),
"emin=\'{}\'".format(emin_ev[index]),
"emax=\'{}\'".format(emax_ev[index]),
"ecf=\'{}\'".format(ecf[index]),
"nruns=2",
"likemin=6.0",
"boxsize=4",
"compress_flag=N",
"bkgima_flag=N",
"expima_flag=Y",
"detmask_flag=Y"
]
remove_file(outfile_boxlist1)
print((" ").join(cmd))
os.system((" ").join(cmd))
save_ds9reg(outfile_boxlist1)
""" Background map 1 """
outfile_backmap1="{}_BackMap1_en{}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
cheese_mask="{}_CheeseMask1_en{}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
if(do_erbackmap1==True):
do_erbackmap_esass(outfile_evtool,outfile_expmap,outfile_boxlist1,detmask,emin_ev[index],emax_ev[index],
outfile_backmap1,cheese_mask)
outfile_boxlist2="{}/{}_BoxList2_en{}{}".format(outfile_dir,outkey, eband[index], outfile_post)
if(do_erbox2==True):
cmd=["erbox",
"images=\'{}\'".format(outfile_evtool),
"boxlist=%s" %(outfile_boxlist2),
"expimages=\'{}\'".format(outfile_expmap),
"detmasks=\'{}\'".format(detmask),
"emin=\'{}\'".format(emin_ev[index]),
"emax=\'{}\'".format(emax_ev[index]),
"ecf=\'{}\'".format(ecf[index]),
"nruns=2",
"likemin=6.0",
"boxsize=4",
"compress_flag=N",
"bkgima_flag=Y",
"bkgimages={}".format(outfile_backmap1),
"expima_flag=Y",
"detmask_flag=Y"
]
remove_file(outfile_boxlist2)
print((" ").join(cmd))
os.system((" ").join(cmd))
save_ds9reg(outfile_boxlist2)
""" Background map 2 """
outfile_backmap2="{}_BackMap2_en{}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
cheese_mask="{}_CheeseMask2_en{}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
if(do_erbackmap2==True):
do_erbackmap_esass(outfile_evtool,outfile_expmap,outfile_boxlist2,detmask,emin_ev[index],emax_ev[index],
outfile_backmap2,cheese_mask)
outfile_boxlist3="{}/{}_BoxList3_en{}{}".format(outfile_dir,outkey, eband[index], outfile_post)
if(do_erbox3==True):
cmd=["erbox",
"images=\'{}\'".format(outfile_evtool),
"boxlist=%s" %(outfile_boxlist3),
"expimages=\'{}\'".format(outfile_expmap),
"detmasks=\'{}\'".format(detmask),
"emin=\'{}\'".format(emin_ev[index]),
"emax=\'{}\'".format(emax_ev[index]),
"ecf=\'{}\'".format(ecf[index]),
"nruns=2",
"likemin=6.0",
"boxsize=4",
"compress_flag=N",
"bkgima_flag=Y",
"bkgimages={}".format(outfile_backmap2),
"expima_flag=Y",
"detmask_flag=Y"
]
remove_file(outfile_boxlist3)
print((" ").join(cmd))
os.system((" ").join(cmd))
save_ds9reg(outfile_boxlist3)
""" Background map 3 """
outfile_backmap3="{}_BackMap3_en{}.{}{}".format(os.path.join(outfile_dir,outkey), eband[index], vignetting, outfile_post)
cheese_mask="{}_CheeseMask3_en{}.{}{}".format(os.path.join(outfile_dir,outkey), eband[index], vignetting, outfile_post)
if(do_erbackmap3==True):
boxlist3 = outfile_boxlist3 if(forced == False) else "{}/{}_BoxList3_en{}{}".format(outfile_dir,outkey, eband[0], outfile_post)
do_erbackmap_esass(outfile_evtool,outfile_expmap,boxlist3,detmask,emin_ev[index],emax_ev[index],
outfile_backmap3,cheese_mask)
if(forced==True):
mllist="{}_MaxLikSourceList_en{}.forced{}{}".format(os.path.join(outfile_dir,outkey), eband[index], comm, outfile_post)
srcmap="{}_SourceMap_en{}.forced{}{}".format(os.path.join(outfile_dir,outkey), eband[index], comm, outfile_post)
""" for en1,2,3,6 give mllist from en0 as input """
#boxlist3="{}_MaxLikSourceList_en{}.forced{}{}".format(os.path.join(outfile_dir,outkey), eband[0], comm, outfile_post)
#if(index==0):
boxlist3="{}_MaxLikSourceList_en{}.fixed{}{}".format(os.path.join(outfile_dir,outkey), eband[0], comm, outfile_post)
if not (os.path.exists(boxlist3)):
print("{} not found. Run do_fixcat=True, index=0, forced=False".format(boxlist3))
sys.exit()
add_specific_columns(boxlist3)
fitpos_flag="fitpos_flag=no"
fitext_flag="fitext_flag=no"
nmulsou = "nmulsou=1"
nmaxfit="nmaxfit=10"
multrad="multrad=15."
cutrad="cutrad=15."
if(index == 3 or index == 6):
""" for hard band take unvignetted background """
outfile_backmap3="{}_BackMap3_en{}.{}{}".format(os.path.join(outfile_dir,outkey), eband[index], "novign", outfile_post)
else:
mllist="{}_MaxLikSourceList_en{}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
srcmap="{}_SourceMap_en{}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
boxlist3 = outfile_boxlist3
fitpos_flag="fitpos_flag=yes"
fitext_flag="fitext_flag=yes"
nmulsou = "nmulsou=2"
nmaxfit="nmaxfit=4"
multrad="multrad=20."
cutrad="cutrad=20."
""" allow ermpldet to split sources (no more than two) """
cmd=["ermldet",
"mllist={}".format(mllist),
"boxlist=%s" %(boxlist3),
"images=\'{}\'".format(outfile_evtool),
"expimages=\'{}\'".format(outfile_expmap),
"detmasks=\'{}\'".format(detmask),
"bkgimages=\'{}\'".format(outfile_backmap3),
"emin=\'{}\'".format(emin_ev[index]),
"emax=\'{}\'".format(emax_ev[index]),
"ecf=\'{}\'".format(ecf[index]),
"hrdef=",
"likemin=0.",
"extlikemin=5.",
"compress_flag=N",
cutrad,
multrad,
"extmin=2.0",
"extmax=35.0",
#"bkgima_flag=Y", looks outdated
"expima_flag=Y",
"detmask_flag=Y",
"shapelet_flag=yes",
"photon_flag=yes",
"extentmodel=beta",
"thres_flag=N",
"thres_col=like",
"thres_val=30.",
nmaxfit,
nmulsou,
fitpos_flag,
fitext_flag,
"srcima_flag=yes",
"srcimages=\'{}\'".format(srcmap)
]
if(do_ersensmap==True):
detlike=10
create_sensmap(sensmap="{}_SensitivityMap_dl{}_en{}{}".format(os.path.join(outfile_dir,outkey), detlike,
eband[index], outfile_post),
areatab="{}_AreaTable_dl{}_en{}{}".format(os.path.join(outfile_dir,outkey),
detlike, eband[index], outfile_post),
expmap=outfile_expmap, backmap=outfile_backmap3,detlike=detlike,
detmask=detmask, emin=emin_ev[index], emax=emax_ev[index],ecf=ecf[index])
"""
detlike=6
create_sensmap(sensmap="{}_SensitivityMap_dl{}_en{}{}".format(os.path.join(outfile_dir,outkey), detlike,
eband[index], outfile_post),
areatab="{}_AreaTable_dl{}_en{}{}".format(os.path.join(outfile_dir,outkey),
detlike, eband[index], outfile_post),
expmap=outfile_expmap, backmap=outfile_backmap3,detlike=detlike,
detmask=detmask, emin=emin_ev[index], emax=emax_ev[index],ecf=ecf[index])
"""
if(do_ermldet==True):
test_exe('ermldet')
if(vign==False):
print('Run ermldet with vignetted exposure!')
sys.exit()
remove_file(mllist)
remove_file(srcmap)
print(cmd)
runme(cmd, local_run=local_run)
print(cmd)
save_ermldet_ds9reg(mllist,scale=60*60,label='det_like')
save_ermldet_ds9reg(mllist,scale=60*60,label='id_src')
correct_fluxerr_ermldet_forced(mllist)
if(forced==True):
result = check_ermldet_forced(mllist)
# for a some reason, for an arbitrary energy band, ermldet break order of sources. Do this forced correction.
if(result == False):
correct_srcid_ermldet_forced(mllist)
if(do_fixcat==True):
if not index == 0:
print("ERROR: You can fix only reference catalog for en0.")
sys.exit()
if forced == True:
print("ERROR: You can fix only non-forced catalog for en0.")
sys.exit()
srcs_remove=[341,446,346,96]
srcs_add = {'4XMM J021738.8-051257':[34.4117002, -5.2159135, 0.624],# 341
'4XMM J021733.8-051311':[34.3910215,-5.2199877,2.247],# 341
'4XMM J021929.4-051220':[34.8725460,-5.2056849,1.074],#446
'4XMM J021930.7-051225':[34.8782267,-5.2072112,0.624],# 446
'4XMM J021945.2-045331':[34.9383593,-4.8919843,1.538],#346
'4XMM J021929.4-043224':[34.8728586,-4.5400022,0.659555],#96
#'4XMM J021929.4-043224':[34.8728586,-4.5400022, 0.660],
#'4XMM J021831.8-050059':[34.6328841,-5.0163909,1.529],
#'4XMM J022131.1-050027':[35.3797879,-5.0075498,0.941],
#'4XMM J022129.5-045914':[35.3732136,-4.9874025,0.332],
#'4XMM J022026.3-050251':[35.1098619,-5.0476199,0.551],
#'4XMM J021925.4-042647':[34.8559099,-4.4465007,1.366],
#'4XMM J021910.9-045108':[34.7954311,-4.8522901,0.898],
#'4XMM J021945.2-045331':[34.9383593,-4.8919843,1.538],
#'4XMM J021733.8-051311':[34.3910215,-5.2199877,2.247],
}
fix_catalog(mllist=mllist,refimage=outfile_evtool, srcs_remove=srcs_remove, srcs_add=srcs_add)
"""
Note that fix_catalog added ID_SRC to each XMM source.
Next, we save forced XMM sources (with new ID_SRC!) for later catalog compilation
"""
with open(mllist.replace(".fits", ".xmm.pickle"), 'wb') as f:
pickle.dump(srcs_add, f)
if(do_fixxmm==True):
if not index == 0:
print("ERROR: You can fix only reference catalog for en0.")
sys.exit()
if forced == True:
print("ERROR: You can fix only non-forced catalog for en0.")
sys.exit()
fix_xmm_sources(mllist=mllist,refimage=outfile_evtool, xmm_catalog='../data/4XMM-DR12/4XMM_DR12cat_slim_v1.0_UDS.fits.catalog')
if(do_apetool==True):
psfmap="{}_PsfMap{}".format(os.path.join(outfile_dir,outkey), outfile_post)
#remove_file(psfmap)
#cmd=["apetool",
# "images=\'{}\'".format(outfile_evtool),
# "psfmaps=\'{}\'".format(psfmap),
# "psfmapflag=yes",]
#runme(cmd, local_run=local_run)
cmd=["apetool",
"mllist={}".format(mllist),
"apelistout={}".format(mllist), # give the same file
"images=\'{}\'".format(outfile_evtool),
"expimages=\'{}\'".format(outfile_expmap),
"detmasks=\'{}\'".format(detmask),
"bkgimages=\'{}\'".format(outfile_backmap3),
"emin=\'{}\'".format(emin_ev[index]),
"emax=\'{}\'".format(emax_ev[index]),
"srcimages=\'{}\'".format(srcmap),
"psfmaps={}".format(psfmap),
"psfmapflag=no",
"stackflag=no",
"apexflag=yes",
"apesenseflag=no",
"eefextract=0.65",
"cutrad=15",
"eindex=1",]
runme(cmd, local_run=local_run)
if(forced==True):
catprep="{}_SourceCatalog_en{}.forced{}{}".format(os.path.join(outfile_dir,outkey), eband[index], comm, outfile_post)
else:
catprep="{}_SourceCatalog_en{}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
if(do_catprep==True):
cmd=["catprep",
"infile={}".format(mllist),
"outfile={}".format(catprep),]
remove_file(catprep)
runme(cmd, local_run=local_run)
if(do_filter_catalog==True):
#filter_mllist(mllist,expcut=5000.0,dlcut=10.0,dlmin=10,dlmax=10000)
""" works the same """
filter_catprep(catprep,expcut=5000.0,dlmin=10,dlmax=10000,outkey='bright')
#filter_catprep(catprep,expcut=5000.0,dlmin=6,dlmax=10,outkey='faint')
if(do_cross_match==True):
crossmatch_shu2019(catprep,dlmin=10,refimage=outfile_evtool,crval=[ra_cen, de_cen],
catalog=root_path+"/data/Gaia_unWISE/Gaia_unWISE_UDS.fits.catalog")

286
scripts/05_srctool.py Executable file
View File

@@ -0,0 +1,286 @@
#!/usr/bin/env python
"""
НАЗВАНИЕ:
05_srctool.py
НАЗНАЧЕНИЕ:
Запускает scrtool для самого широкого канала 0.2-10 кэВ, чтобы спектры имели самое полное покрытие по энергиям. Список источников берется из 0.3-2.3 кэВ.
ВЫЗОВ:
esass
./05_srctool.py
УПРАВЛЕНИЕ:
Требуется запуск предыдущего скрипта 04_mosaics.py
ПАРАМЕТРЫ:
index=4 : Выбранный энергетический диапазон
ВЫВОД:
Выходные файлы записываются в директорию outfile_dir/srctool_dir
ИСТОРИЯ:
Роман Кривонос, ИКИ РАН, krivonos@cosmos.ru
Март 2023
"""
from astropy.wcs import WCS
from astropy.io import fits
import sys, os, os.path, time, subprocess
from pathlib import Path
import numpy as np
import glob
from os.path import dirname
import inspect
import pickle
import uds
from uds.utils import *
from uds.config import *
from uds.sherpa import *
""" find UDS root dir """
#root_path=dirname(dirname(dirname(inspect.getfile(uds))))
"""
ftools does not like long file path names,
for this reason, we use relative path here
"""
root_path='..'
print("UDS root path: {}".format(root_path))
infile_dir=root_path+'/data/processed'
outfile_dir=root_path+'/products'
create_folder(outfile_dir)
srctool_dir="{}/{}".format(outfile_dir,"srctool-products")
create_folder(srctool_dir)
outkey="tm0"
outfile_srctool="{}_SrcTool_".format(outkey)
do_init = False
do_merge = False
do_srctool = True
do_grppha = False
do_ecf_calc = False # for all bands
do_ecf_print = False # for all bands
do_flux_calc = False # for all bands
do_catalog = False
do_extended = False
do_ds9reg = False
do_euds_final = False
do_euds_dr12 = False # crossmatch eUDS with DR12
do_euds_stat = False
do_euds_cds = False
do_xmm_catalog = False
do_xmm_final = False
do_xmm_xmatch = False
do_xmm_ds9reg = False
do_xmm_cds = False
do_euds_cosmatch = False
do_cross_check = False # Check whether all E3,E6 sources are detected in E0
index=5
""" чтобы спектры покрывали все энергии работаем в диапазоне 5 """
vign=True
vignetting = 'vign' if (vign==True) else 'novign'
events=[]
expmaps=[]
bkgmaps=[]
for tmkey in keylist_tm.keys():
print("TM{} in work... init events".format(tmkey))
for datakey in keylist_tm[tmkey]:
print("--> {}".format(datakey))
""" Подготавливаем списки событий индивидуальных наблюдений """
outfile_evtool,outfile_expmap=init_events(key=datakey,attcorr=True,
eband_index=eband[index],
infile_dir=infile_dir,
outfile_dir=outfile_dir,
do_init=do_init,
do_obsmode=False,
do_center=False,
do_evtool=False,
do_expmap=False,
vign=vign,
ra_cen=ra_cen, de_cen=de_cen,
emin_kev=emin_kev[index],
emax_kev=emax_kev[index])
events.append(outfile_evtool)
expmaps.append(outfile_expmap)
""" Собираем общий список событий """
outfile_evtool="{}_EventList_en{}.fits".format(os.path.join(outfile_dir,outkey),
eband[index])
if(do_merge==True):
do_evtool_esass(events=events, outfile=outfile_evtool)
suffix_srctool=".fits"
""" Output filename suffix - all output filenames appended with this string.
If suffix contains no filename extension (does not contain a "."), then ".fits"
is also appended to the filename. """
catprep="{}_SourceCatalog_en{}{}".format(os.path.join(outfile_dir,outkey), eband[0], outfile_post)
""" take source catalog from 0.3-2.3 keV band """
if not (os.path.isfile(catprep)==True):
print("{} not found, run 04_mosaics.py?".format(catprep))
sys.exit()
if(do_srctool==True):
test_exe('srctool')
cmd=['srctool',
"todo=\'SPEC RMF ARF\'",
"insts=\'1 5 6 7\'",
"eventfiles={}".format(outfile_evtool),
"prefix=\'{}\'".format(os.path.join(srctool_dir,outfile_srctool)),
"suffix=\'{}\'".format(suffix_srctool),
"srccoord={}".format("../products/eUDS_for_srctool_test.fits"),
# the same as original file eUDS.fits, but changed ML_CTS --> ML_CTS_0, ML_BKG --> ML_BKG_0, ML_EXP --> ML_EXP_0
#"srcreg=\'fk5;circle * * 60s\'",
#"backreg=\'fk5;annulus * * 90s 120s\'",
"srcreg=AUTO",
"backreg=AUTO",
"clobber=yes",]
#print((" ").join(cmd))
#os.system((" ").join(cmd))
#print((" ").join(cmd))
runme(cmd, local_run=True)
if(do_grppha==True):
group_spectra("{}/*020_SourceSpec_*.fits".format(srctool_dir))
ecfout="{}_SampleFlux_v1.pickle".format(os.path.join(outfile_dir,outkey))
if(do_ecf_calc==True):
calc_ecf("{}/tm0_SrcTool_020_ARF_?????.fits".format(srctool_dir),
catprep=catprep, emin=emin_kev, emax=emax_kev, eband=eband, outfile=ecfout, simnum=10000)
if(do_ecf_print==True):
print_ecf(infile=ecfout, emin=emin_kev, emax=emax_kev, eband=eband, skipfrac=10.0)
fluxout="{}_SherpaFlux.pickle".format(os.path.join(outfile_dir,outkey))
if(do_flux_calc==True):
calc_flux("{}/tm0_SrcTool_020_ARF_?????.fits".format(srctool_dir),
catprep=catprep, emin=emin_kev, emax=emax_kev, eband=eband, outfile=ecfout, simnum=100)
#index=0
catprep="{}_SourceCatalog_en{}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
rawcat="{}_SourceCatalog_en{}.pickle".format(os.path.join(outfile_dir,outkey), eband[index])
if(do_catalog==True):
forced_xmm_sources="{}_MaxLikSourceList_en{}.xmm.pickle".format(os.path.join(outfile_dir,outkey), eband[index])
with open(forced_xmm_sources, 'rb') as f:
print("Reading forced XMM sources from {}".format(forced_xmm_sources))
srcs_forced = pickle.load(f)
print()
print(srcs_forced)
print()
make_euds_catalog(infile='../products/tm0_SourceCatalog_en0.forced.fits', rawcat=rawcat, dlmin=10.0, dlmax=100000, ext_like=1000,
emin=emin_kev[index], emax=emax_kev[index], eband=eband[index],
infile_en00cat=catprep,
infile_en01cat='../products/tm0_SourceCatalog_en1.forced.fits',
infile_en02cat='../products/tm0_SourceCatalog_en2.forced.fits',
infile_en03cat='../products/tm0_SourceCatalog_en3.forced.fits',
infile_en06cat='../products/tm0_SourceCatalog_en6.forced.fits',
infile_en00sens='../products/tm0_SensitivityMap_dl10_en0.fits',
infile_en01sens='../products/tm0_SensitivityMap_dl10_en1.fits',
infile_en02sens='../products/tm0_SensitivityMap_dl10_en2.fits',
infile_en03sens='../products/tm0_SensitivityMap_dl10_en3.fits',
infile_en06sens='../products/tm0_SensitivityMap_dl10_en6.fits',
srcs_forced=srcs_forced,
)
if(do_extended==True):
make_extended(infile=rawcat,outreg="{}_ExtendedCat_en{}.reg".format(os.path.join(outfile_dir,outkey), eband[index]))
if(do_ds9reg==True):
#make_final_ds9reg(infile=rawcat,outreg="{}_FinalCat_dl10.reg".format(os.path.join(outfile_dir,outkey)))
make_final_ds9reg(infile=rawcat,scale=(60*60)/10,outreg="{}_FinalCat_dl10_talk.reg".format(os.path.join(outfile_dir,outkey)))
if(do_euds_final==True):
""" make final eUDS catalog """
final_euds_catalog(infile=rawcat, outfile_fits='../products/eUDS.fits')
if(do_euds_dr12==True):
crossmatch_dr12('../products/eUDS.fits', catalog=root_path+"/data/4XMM-DR12/4XMM_DR12cat_slim_v1.0_UDS.fits.catalog", devmax=15)
if(do_euds_stat==True):
make_euds_stat(infile="../products/eUDS.fits",fluxlim=5e-14)
if(do_euds_cds==True):
make_euds_cds(infile="../products/eUDS.fits",outfile='../products/eUDS.cds')
if(do_cross_check==True):
""" cross check final eUDS catalog """
cross_check_euds(infile=catprep, euds='../products/eUDS.fits', outkey="../products/en{}_FinalCat_dl10".format(index))
if(do_xmm_catalog==True):
""" complile raw forced XMM catalog """
make_xmm_catalog(infile_en00cat='../products/tm0_SourceCatalog_en0.forced-xmm.fits',
infile_en01cat='../products/tm0_SourceCatalog_en1.forced-xmm.fits',
infile_en02cat='../products/tm0_SourceCatalog_en2.forced-xmm.fits',
infile_en03cat='../products/tm0_SourceCatalog_en3.forced-xmm.fits',
infile_en06cat='../products/tm0_SourceCatalog_en6.forced-xmm.fits',
forced_xmm_sources='../products/tm0_MaxLikSourceList_en0.fixed-xmm.pickle',
outfile='../products/tm0_4XMM-DR12.pickle')
if(do_xmm_final==True):
""" make final XMM-forced catalog """
final_xmm_catalog(infile='../products/tm0_4XMM-DR12.pickle', outfile_fits='../products/eUDS_4XMM-DR12.fits')
if(do_xmm_xmatch==True):
""" cross-match XMM-forced catalog
outfile_cvs contains 0.3-2.3 keV eUDS flux (col1=flux,col2=err) vs. 4XMM-DR12 flux (col3=flux, col4=err)
XMM flux was converted from 0.2-2.0 keV to 0.3-2.3 keV using wabs*powerlow with wabs=0.02 gamma=2.0
"""
final_xmm_xmatch(infile='../products/eUDS_4XMM-DR12.fits',
xmmslim='../data/4XMM-DR12/4XMM_DR12cat_slim_v1.0_UDS.fits.catalog',
xmmfull='../data/4XMM-DR12/4XMM_DR12cat_v1.0_UDS.fits.catalog',
xmmlim=2e-14,
outfile_flux="../products/eUDS_4XMM-DR12.flux.csv")
if(do_xmm_ds9reg==True):
""" show XMM-forced catalog """
make_xmm_ds9reg_confused(infile='../products/eUDS_4XMM-DR12.fits', outfile='../products/eUDS_4XMM-DR12.confused.reg')
""" obsolete """
if(do_xmm_cds==True):
make_xmm_cds(infile="../products/eUDS_4XMM-DR12.fits", outfile='../products/eUDS_4XMM-DR12.cds')
if(do_euds_cosmatch==True):
""" prepare eUDS catalog for CosMatch (Mescheryakov) """
make_euds_cosmatch(infile='../products/eUDS.fits', outfile='../products/eUDS-CosMatch.fits')

445
scripts/06_plot.py Executable file
View File

@@ -0,0 +1,445 @@
#!/usr/bin/env python
"""
НАЗВАНИЕ:
05_srctool.py
НАЗНАЧЕНИЕ:
Запускает scrtool для самого широкого канала 0.2-10 кэВ, чтобы спектры имели самое полное покрытие по энергиям. Список источников берется из 0.3-2.3 кэВ.
ВЫЗОВ:
esass
./05_srctool.py
УПРАВЛЕНИЕ:
Требуется запуск предыдущего скрипта 04_mosaics.py
ПАРАМЕТРЫ:
index=4 : Выбранный энергетический диапазон
ВЫВОД:
Выходные файлы записываются в директорию outfile_dir/srctool_dir
ИСТОРИЯ:
Роман Кривонос, ИКИ РАН, krivonos@cosmos.ru
Март 2023
"""
from astropy.wcs import WCS
from astropy.io import fits
import sys, os, os.path, time, subprocess
from pathlib import Path
import numpy as np
import glob
from os.path import dirname
import inspect
import uds
from scipy.stats import norm
import matplotlib.pyplot as plt
import pandas as pd
from uds.utils import *
from uds.config import *
from uds.sherpa import *
""" find UDS root dir """
#root_path=dirname(dirname(dirname(inspect.getfile(uds))))
"""
ftools does not like long file path names,
for this reason, we use relative path here
"""
root_path='..'
print("UDS root path: {}".format(root_path))
infile_dir=root_path+'/data/processed'
outfile_dir=root_path+'/products'
create_folder(outfile_dir)
srctool_dir="{}/{}".format(outfile_dir,"srctool-products")
create_folder(srctool_dir)
outkey="tm0"
outfile_srctool="{}_SrcTool_".format(outkey)
do_flux_distr = False
do_sens_curve = False
do_4xmm_ratio = False
do_euds_radec_err = False
do_euds_dr12_diff = False
do_euds_dr12_stat = True
do_print_ecf = False
index=0
catalog = "{}_SourceCatalog_en{}.main.selected.csv".format(os.path.join(outfile_dir,outkey), eband[0])
if not (os.path.isfile(catalog)==True):
print("{} not found, run 05_srctool.py?".format(catalog))
sys.exit()
if(do_flux_distr==True):
data, logbins, mean, median = get_log_distr(infile=catalog, field='ml_rate', minval=1e-3, maxval=2)
fig, ax = plt.subplots()
for axis in ['top','bottom','left','right']:
ax.spines[axis].set_linewidth(1)
ax.tick_params(axis="both", width=1, labelsize=14)
plt.hist(data, bins=logbins, histtype='step', color='blue', linewidth=1, linestyle='solid')
plt.xlabel('Count rate (counts s$^{-1}$)',fontsize=14, fontweight='normal')
plt.ylabel('Number',fontsize=14, fontweight='normal')
plt.grid(visible=True)
plt.xscale('log')
plt.yscale('log')
plt.savefig(catalog.replace("main.selected.csv", "ml_rate.png"), bbox_inches='tight')
plt.close(fig)
data, logbins, mean, median = get_log_distr(infile=catalog, field='ml_flux', minval=1e-15, maxval=2e-12)
fig, ax = plt.subplots()
for axis in ['top','bottom','left','right']:
ax.spines[axis].set_linewidth(1)
ax.tick_params(axis="both", width=1, labelsize=14)
plt.hist(data, bins=logbins, histtype='step', color='blue', linewidth=1, linestyle='solid')
plt.xlabel('Energy flux (erg s$^{-1}$ cm$^{-2}$)',fontsize=14, fontweight='normal')
plt.ylabel('Number',fontsize=14, fontweight='normal')
plt.grid(visible=True)
plt.xscale('log')
plt.yscale('log')
plt.savefig(catalog.replace("main.selected.csv", "ml_flux.png"), bbox_inches='tight')
plt.close(fig)
if(do_sens_curve==True):
coeff = 2.4336e-13 / 3.4012e-13 # see below
areatab="{}_AreaTable_dl10_en{}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
hdul = fits.open(areatab)
tbdata = hdul[1].data
""" convert limflux from 0.3-2.3 keV to 0.5-2 keV """
limflux_dl10 = tbdata['LIMFLUX']*coeff
area_dl10 = tbdata['SKY_AREA']
hdul.close()
areatab="{}_AreaTable_dl6_en{}{}".format(os.path.join(outfile_dir,outkey), eband[index], outfile_post)
hdul = fits.open(areatab)
tbdata = hdul[1].data
""" convert limflux from 0.3-2.3 keV to 0.5-2 keV """
limflux_dl6 = tbdata['LIMFLUX']*coeff
area_dl6 = tbdata['SKY_AREA']
hdul.close()
fig, ax = plt.subplots()
for axis in ['top','bottom','left','right']:
ax.spines[axis].set_linewidth(1)
ax.tick_params(axis="both", width=1, labelsize=14)
ax.set_xlim([3e-16,1e-13])
ax.set_ylim([0.3,160])
plt.plot(limflux_dl10,area_dl10, color="black", linestyle='solid',label="eUDS (DL10)")
plt.plot(limflux_dl6,area_dl6, color="black", linestyle='dashed', label="eUDS (DL6)")
df = pandas.read_csv("../data/surveys/eFEDS.dat",header=None,names=['limflux','area'])
plt.plot(df['limflux'],df['area'], color="brown", linestyle='solid',label="eFEDS (DL6)")
df = pandas.read_csv("../data/surveys/cosmos-legacy.dat",header=None,names=['limflux','area'])
plt.plot(df['limflux'],df['area'], color="blue", linestyle='solid',label="COSMOS Legacy")
df = pandas.read_csv("../data/surveys/CDWFS.dat",header=None,names=['limflux','area'])
plt.plot(df['limflux'],df['area'], color="green", linestyle='solid', label="CDWFS")
df = pandas.read_csv("../data/surveys/XMM-RM.dat",header=None,names=['limflux','area'])
plt.plot(df['limflux'],df['area'], color="magenta", linestyle='solid',label="XMM-RM")
df = pandas.read_csv("../data/surveys/XMM-XXL-N.dat",header=None,names=['limflux','area'])
plt.plot(df['limflux'],df['area'], color="red", linestyle='solid',label="XMM-XXL-N")
ax.legend()
#plt.hist(data, bins=logbins, histtype='step', color='blue', linewidth=1, linestyle='solid')
plt.xlabel('Limiting flux (0.5-2 keV, erg s$^{-1}$ cm$^{-2}$)',fontsize=14, fontweight='normal')
plt.ylabel('Area (deg$^{2}$)',fontsize=14, fontweight='normal')
plt.grid(visible=True)
plt.xscale('log')
plt.yscale('log')
png="{}_AreaTable_en{}.png".format(os.path.join(outfile_dir,outkey), eband[index])
plt.savefig(png, bbox_inches='tight')
plt.close(fig)
"""
========================================================================
Model TBabs<1>*powerlaw<2> Source No.: 1 Active/On
Model Model Component Parameter Unit Value
par comp
1 1 TBabs nH 10^22 2.00000E-02 frozen
2 2 powerlaw PhoIndex 2.00000 frozen
3 2 powerlaw norm 1.14851E-04 +/- 3.83988E-06
________________________________________________________________________
Model Flux 0.00028334 photons (3.4012e-13 ergs/cm^2/s) range (0.30000 - 2.3000 keV)
Model Flux 0.0001619 photons (2.4336e-13 ergs/cm^2/s) range (0.50000 - 2.0000 keV)
"""
if(do_4xmm_ratio==True):
filename="../products/eUDS_4XMM-DR12.flux.csv"
data, logbins, mean, median = get_log_distr(infile=filename, field='ratio', minval=8e-2, maxval=60, nbin=60)
print("Median {}".format(median))
print(" Mean {}".format(mean))
print("Ntotal {}".format(len(data)))
fig, ax = plt.subplots()
#plt.figure(figsize=(5,5))
#plt.figure().set_figheight(3.6)
#plt.rcParams['figure.figsize'] = [4, 4]
for axis in ['top','bottom','left','right']:
ax.spines[axis].set_linewidth(1)
ax.tick_params(axis="both", width=1, labelsize=14)
plt.hist(data, bins=logbins, histtype='step', color='blue', linewidth=1, linestyle='solid')
plt.axvline(x = median, color = 'black', label = 'Median value', linestyle='dashed')
plt.xlabel('Energy flux ratio',fontsize=14, fontweight='normal')
plt.ylabel('Number',fontsize=14, fontweight='normal')
plt.grid(visible=True)
plt.xscale('log')
plt.yscale('log')
plt.savefig(filename.replace(".csv", ".distr.png"), bbox_inches='tight')
plt.close(fig)
df = pandas.read_csv(filename)
fig, ax = plt.subplots()
#plt.rcParams['figure.figsize'] = [4, 4]
#plt.figure(figsize=(5,5))
#plt.figure().set_figheight(4)
for axis in ['top','bottom','left','right']:
ax.spines[axis].set_linewidth(1)
ax.tick_params(axis="both", width=1, labelsize=14)
plt.plot(np.array([1e-17,1e-11]), np.array([1e-17,1e-11]), linestyle = 'solid', color='black')
plt.plot(np.array([1e-17,1e-11]), 10*np.array([1e-17,1e-11]), linestyle = 'dashed', color='black')
plt.plot(np.array([1e-17,1e-11]), 0.1*np.array([1e-17,1e-11]), linestyle = 'dotted', color='black')
#plt.plot(np.array([1e-17,1e-11]), median*np.array([1e-17,1e-11]), linestyle = 'dashed', color='blue')
plt.errorbar(df['dr12_flux'],df['euds_flux'], yerr=df['euds_flux_err'], xerr=df['dr12_flux_err'], linestyle='None', color='black', label='Errors')
plt.plot(df['dr12_flux'],df['euds_flux'], marker="o", linewidth=1, linestyle='None', markerfacecolor='Gold',markeredgecolor="black",)
#plt.axvline(x = median, color = 'black', label = 'Median value', linestyle='dashed')
plt.xlabel('4XMM-DR12 Energy flux (erg s$^{-1}$ cm$^{-2}$)',fontsize=14, fontweight='normal')
plt.ylabel('eUDS Energy flux (erg s$^{-1}$ cm$^{-2}$)',fontsize=14, fontweight='normal')
plt.grid(visible=True)
plt.xlim(2e-16,1e-12)
plt.ylim(1e-15,2e-12)
plt.xscale('log')
plt.yscale('log')
plt.savefig(filename.replace(".csv", ".png"), bbox_inches='tight')
plt.close(fig)
if(do_euds_radec_err==True):
filename="../products/eUDS.fits"
ecat={}
hdul = fits.open(filename)
etab = hdul[1].data
hdul.close()
x=[]
y=[]
for s in etab:
if ("XMM" in s['DR12_IAU_NAME']):
print("Skip ",s['DR12_IAU_NAME'])
continue
if (s['EXT_LIKE']>0.0):
print("Skip extended ",s['ID_SRC'])
continue
x.append(s['DET_LIKE'])
y.append(s['RADEC_ERR'])
fig, ax = plt.subplots()
for axis in ['top','bottom','left','right']:
ax.spines[axis].set_linewidth(1)
ax.tick_params(axis="both", width=1, labelsize=14)
ax.set_xlim([10,10000])
ax.set_ylim([0.1,40])
plt.plot(x,y,".", color="gold", markersize=12, markeredgewidth=1.5, markeredgecolor="black")
plt.xlabel('DET_LIKE',fontsize=14, fontweight='normal')
plt.ylabel('RADEC_ERR (arcsec)',fontsize=14, fontweight='normal')
plt.grid(visible=True)
plt.xscale('log')
plt.yscale('log')
png="../products/en0_radec_err.png"
plt.savefig(png, bbox_inches='tight')
plt.close(fig)
stat={}
hdul = fits.open("../products/eUDS.dr12.cross.fits")
tab = hdul[1].data
hdul.close()
print(len(np.unique(tab['ID'])))
for rec in tab:
sid=rec['ID']
if sid in stat.keys():
stat[sid]=stat[sid]+1
else:
stat[sid]=1
unique=0
double=0
triple=0
for key in stat.keys():
if(stat[key]==1):
unique=unique+1
if(stat[key]==2):
double=double+1
if(stat[key]==3):
triple=triple+1
print("unique: {}, double: {}. triple: {}".format(unique, double, triple))
if(do_euds_dr12_diff==True):
# read forced first
# fieldnames = ['euds_det_like', 'euds_flux', 'euds_flux_err', 'dr12_flux', 'dr12_flux_err', 'ratio']
filename="../products/eUDS_4XMM-DR12.flux.csv"
df = pandas.read_csv(filename)
# calculate difference similar to Bruner
threshold=5e-14
# forced catalog contains CONF flag, take it to remove from histogram
hdul = fits.open("../products/eUDS_4XMM-DR12.fits")
forced = hdul[1].data
fcat={}
for rec in forced:
key=rec['DR12_SRCID']
fcat[key]={'conf':rec['CONF'],}
# read cross-match results (not forced)
hdul = fits.open('../products/eUDS.dr12.cross.fits')
tb = hdul[1].data
cross_diff=[]
cross_ratio=[]
mean=0.0
count=0
for ind,s in enumerate(tb):
key=rec['DR12_SRCID']
#if not (tb[ind]['dr12_flux']>threshold and tb[ind]['src_flux']>threshold):
if not (tb[ind]['dr12_flux']>threshold):
continue
ape_flux=(tb[ind]['ape_cts']-tb[ind]['ape_bkg'])/tb[ind]['ape_exp']/ecf[index]
cross_dr12_flux=(tb[ind]['dr12_flux'])
cross_dr12_flux_error=(tb[ind]['dr12_flux_error'])
cross_euds_flux=(tb[ind]['src_flux'])
#cross_euds_flux=ape_flux
cross_euds_flux_error=(tb[ind]['src_flux_error'])
print(cross_euds_flux,cross_euds_flux_error,tb[ind]['ape_cts'],tb[ind]['det_like'])
d=(cross_dr12_flux - cross_euds_flux) / np.sqrt(cross_dr12_flux_error**2 + cross_euds_flux_error**2)
#if(d < -15):
# continue
r=cross_euds_flux/cross_dr12_flux
cross_diff.append(d)
cross_ratio.append(r)
#if(d < -15.0):
#print("GGG",d,r,cross_euds_flux,cross_dr12_flux)
#print(tb[ind]['DR12_NAME'],tb[ind]['DR12_RA'],tb[ind]['DR12_DEC'])
mean=mean + (cross_euds_flux/cross_dr12_flux)
count=count+1
print("min={:.2f}, max={:.2f}, mean={:.2f}, median={:.2f}, std={:.2f} N={}".format(min(cross_diff),
max(cross_diff),
np.mean(cross_diff),
np.median(cross_diff),
np.std(cross_diff),
len(cross_diff)))
print("ratio mean",mean/count)
print("ratio median",np.median(cross_ratio))
nbin=50
bins=np.linspace(-30,30, nbin)
fig, ax = plt.subplots()
for axis in ['top','bottom','left','right']:
ax.spines[axis].set_linewidth(1)
ax.tick_params(axis="both", width=1, labelsize=14)
plt.hist(cross_diff, bins=bins,histtype='step', color='green', linewidth=1, linestyle='solid', density=True)
#x = np.arange(-10, 10, 0.001)
#plot normal distribution with mean 0 and standard deviation 1
#plt.plot(x, norm.pdf(x, 0, 1), color='red', linewidth=2)
plt.ylabel('Relative fraction',fontsize=14, fontweight='normal')
plt.xlabel('(F$_{XMM}$-F$_{eUDS}$)/$\sqrt{\Delta F_{XMM}^{2}+\Delta F_{eUDS}^{2}}$',fontsize=14, fontweight='normal')
plt.grid(visible=True)
plt.xscale('linear')
plt.yscale('linear')
ax.set_xlim([-31, 31])
ax.set_ylim([0, 0.2])
plt.savefig("../products/cross-match_diff.png", bbox_inches='tight')
plt.close(fig)
nbin=200
bins=np.linspace(-1,1.0, nbin)
fig, ax = plt.subplots()
for axis in ['top','bottom','left','right']:
ax.spines[axis].set_linewidth(1)
ax.tick_params(axis="both", width=1, labelsize=14)
plt.hist(cross_ratio, bins=bins, histtype='step', color='green', linewidth=1, linestyle='solid', density=False)
#x = np.arange(-10, 10, 0.001)
#plot normal distribution with mean 0 and standard deviation 1
#plt.plot(x, norm.pdf(x, 0, 1), color='red', linewidth=2)
plt.xlabel('eUDS and 4XMM-DR12 flux ratio',fontsize=14, fontweight='normal')
plt.ylabel('Relative fraction',fontsize=14, fontweight='normal')
plt.grid(visible=True)
plt.xscale('linear')
plt.yscale('linear')
plt.savefig("../products/cross-match_ratio.png", bbox_inches='tight')
plt.close(fig)
if(do_print_ecf==True):
filename='../data/ECF/ecf_tbabspow_g2nh0.02.pkl'
with open(filename, 'rb') as f:
ecf_table = pickle.load(f)
"""
for key in table.keys():
print("{} --> {}".format(key,table[key]))
"""
print(ecf_table[(0.3,2.3)])
print(ecf_table[(0.3,0.6)])
print(ecf_table[(0.6,2.3)])
print(ecf_table[(2.3,5.0)])
print(ecf_table[(5.0,8.0)])
print()
print(ecf_table[(0.5,1.0)]) # 4XMM-DR12 EP2 band
print(ecf_table[(1.0,2.0)]) # 4XMM-DR12 EP3 band
if(do_euds_dr12_stat==True):
dr12_stat(infile='../products/eUDS_4XMM-DR12.fits',
xmmslim='../data/4XMM-DR12/4XMM_DR12cat_slim_v1.0_UDS.fits.catalog',
xmmlim=None,
outfile_reg='../products/dr12_fluxlim.reg')

44
scripts/README.md Normal file
View File

@@ -0,0 +1,44 @@
Подготовка рабочего окружения:
```
source <MY PATH>/venv/bin/activate.csh
source <MY PATH>/eSASS4EDR/bin/esass-init.csh
```
### 01_init_events.py
Создает начальные списки событий и помещает их в ```uds/data/processed```
Оригинальные файлы со списками событий задаются в файлах ```uds/data/evtlists/*.txt```
### 02_merge_events.py
Создает объедененный список событий и помещает его в ```uds/products```. Этот список событий нужен, в основном для извлечения спектров с помощью ```srctool```.
Попутно этот скрипт унифицирует оригинальные списки событий для последующей обработки. А именно, корректируются слова OBS_MODE=POINING/SURVEY в зависимости от типа наблюдения и производится центрирование на одни и те же координаты с помощью команды ```radec2xy```.
Для запуска адаптивного сглаживания ```do_adapt = True``` требуется запустить окружение ```ciao```, так как нужна команда ```dmimgadapt```
```
conda activate ciao-4.15
source <MY PATH>/eSASS4EDR/bin/esass-init.csh
```
### 03_init_obs.py
1) Подготавливает списки событий в разных энергетических диапазонах.
2) Запускает ```erbox``` в три этапа, чтобы получить рабочий список источников для ```ermldet```.
3) Запускает ```ermldet```
4) Делает кросс-корреляцию с каталогом Gaia-unWISE ```do_cross_match=True```, которая создает три файла: ```.cross``` -- все пересечения, и ```.ref``` / ```.src``` -- входные каталоги для последующей команды ```wcs_match```
5) Делает матрицу преобразования координат и корректирует списки событий. Для запуска команд```wcs_match/wcs_update``` требуется запустить окружение ```ciao``` (см. выше)
### 04_mosaics.py
Создает сборные изображения (мозайки) в разных энергетических диапазонах.
### 05_scrtool.py
Запускает scrtool для самого широкого канала 0.2-10 кэВ, чтобы спектры имели самое полное покрытие по энергиям. Список источников берется из 0.3-2.3 кэВ.
Вычисляет ECF для всех диапазонов.
Делает принудительную фотометрию в выбранных каналах (параметр```forced=True```). Внимание! ermldet из eSASS4EDR не делает ассимитричные ошибки на потоки. Мы запускаем более последнюю версию ermldet (v1.56/2.18 esass_200412 Jul 2 12:04:46 2022). Для этого используется параметр ```local_run=True```, который высвечивает какую команду надо запустить на другой машине и ждет ввода.

683
scripts/makecds.py Normal file
View File

@@ -0,0 +1,683 @@
import cdspyreadme
from astropy.io import fits
from astropy.table import Table
def make_euds_cds(infile1=None,outfile1=None,infile2=None,outfile2=None,fluxlim=None):
print("Reading {}".format(infile1))
#########################
""" Read eUDS catalog """
#########################
tablemaker = cdspyreadme.CDSTablesMaker()
r = Table.read(infile1)
tab=Table([
r['ID_SRC'], # 0
r['NAME'], # 1
r['RA'], # 2
r['DEC'], # 3
r['RADEC_ERR'], # 4 Positional error (68% confidence)
r['EXT'], # 5 Source extent
r['EXT_ERR'], # 6 Extent uncertainty
r['EXT_LIKE'], # 7
r['DET_LIKE'], # 8
r['ML_RATE'], # 9
r['ML_RATE_ERR'], # 10
r['ML_CTS'], # 11
r['ML_CTS_ERR'], # 12
r['ML_FLUX'], # 13
r['ML_FLUX_ERR'], # 14
r['ML_EXP'], # 15
r['ML_BKG'], # 16
r['DR12_IAU_NAME'] , # 17 4XMM J021738.8-051257
r['DET_LIKE_1'] , # 18
r['ML_RATE_1'] , # 19
r['ML_RATE_ERR_1'] , # 20
r['ML_CTS_1'] , # 21
r['ML_CTS_ERR_1'] , # 22
r['ML_FLUX_1'] , # 23
r['ML_FLUX_ERR_1'] , # 24
r['ML_EXP_1'] , # 25
r['ML_BKG_1'] , # 26
r['DET_LIKE_2'] , # 27
r['ML_RATE_2'] , # 28
r['ML_RATE_ERR_2'] , # 29
r['ML_CTS_2'] , # 30
r['ML_CTS_ERR_2'] , # 31
r['ML_FLUX_2'] , # 32
r['ML_FLUX_ERR_2'] , # 33
r['ML_EXP_2'] , # 34
r['ML_BKG_2'] , # 35
r['DET_LIKE_3'] , # 36
r['ML_RATE_3'] , # 37
r['ML_RATE_ERR_3'] , # 38
r['ML_CTS_3'] , # 39
r['ML_CTS_ERR_3'] , # 40
r['ML_FLUX_3'] , # 41
r['ML_FLUX_ERR_3'] , # 42
r['ML_EXP_3'] , # 43
r['ML_BKG_3'] , # 44
r['DET_LIKE_4'] , # 45
r['ML_RATE_4'] , # 46
r['ML_RATE_ERR_4'] , # 47
r['ML_CTS_4'] , # 48
r['ML_CTS_ERR_4'] , # 49
r['ML_FLUX_4'] , # 50
r['ML_FLUX_ERR_4'] , # 51
r['ML_EXP_4'] , # 52
r['ML_BKG_4'] , # 53
])
table = tablemaker.addTable(tab, name=outfile1, description="eUDS X-ray catalog, converted from eUDS.fits")
col = table.get_column('ID_SRC') # 0
col.name="SrcID"
col.set_format("I3")
col.description="Source ID (ID_SRC)"
col = table.get_column('NAME') # 1
col.name="Name"
col.set_format("A21")
col.description="Source name, eUDS JHHMMSS.s+DDMMSS"
col = table.get_column('RA') # 2
col.set_format("F10.6")
col.name="RAdeg"
col.description="RA (J2000) (RA)"
col = table.get_column('DEC') # 3
col.name="DEdeg"
col.set_format("F10.6")
col.description="Dec (J2000) (RA)"
col = table.get_column('RADEC_ERR') # 4 Positional error (68% confidence)
col.set_format("F6.3")
col.name="ePos"
col.description="Positional uncertainty (RADEC_ERR)"
col = table.get_column('EXT') # 5 Source extent
col.set_format("F5.2")
col.name="Ext"
col.description="Source extent (EXT)"
col = table.get_column('EXT_ERR') # 6 Extent uncertainty
col.set_format("F5.2")
col.name="e_Ext"
col.description="Extent error (EXT_ERR)"
col = table.get_column('EXT_LIKE') # 7
col.set_format("F10.6")
col.name="ExtLike"
col.description="Extent likelihood (EXT_LIKE)"
col = table.get_column('DET_LIKE') # 8
col.set_format("F12.6")
col.name="DetLike"
col.description="Detection likelihood measured by PSF-fitting; 0.3-2.3 keV (DET_LIKE)"
col = table.get_column('ML_RATE') # 9
col.name="MLRate"
col.set_format("F8.6")
col.description="Source count rate measured by PSF-fitting; 0.3-2.3 keV (ML_RATE)"
col = table.get_column('ML_RATE_ERR') # 10
col.name="e_MLRate"
col.set_format("F8.6")
col.description="1 sigma count rate error; 0.3-2.3 keV (ML_RATE_ERR)"
col = table.get_column('ML_CTS') # 11
col.name="MLCts"
col.set_format("F11.6")
col.description="Source net counts measured from count rate; 0.3-2.3 keV (ML_CTS)"
col = table.get_column('ML_CTS_ERR') # 12
col.name="e_MLCts"
col.set_format("F11.6")
col.description="1 sigma counts error; 0.3-2.3 keV (ML_CTS_ERR)"
col = table.get_column('ML_FLUX') # 13
col.name="MLFlux"
col.set_format("E12.6")
col.description="Source flux converted from count rate; 0.3-2.3 keV (ML_FLUX)"
col = table.get_column('ML_FLUX_ERR') # 14
col.name="e_MLFlux"
col.set_format("E12.6")
col.description="1 sigma flux error; 0.3-2.3 keV (ML_FLUX_ERR)"
col = table.get_column('ML_EXP') # 15
col.name="MLExp"
col.set_format("E12.6")
col.description="Vignetted exposure value; 0.3-2.3 keV (ML_EXP)"
col = table.get_column('ML_BKG') # 16
col.name="MLBkg"
col.set_format("F12.6")
col.description="Background at the source position; 0.3-2.3 keV (ML_BKG)"
col = table.get_column('DR12_IAU_NAME') # 17 4XMM J021738.8-051257
col.name="DR12Name"
col.set_format("A21")
col.description="4XMM-DR12 source name used for forced photometry (DR12_IAU_NAME)"
col = table.get_column('DET_LIKE_1') # 18
col.name="DetLike1"
col.set_format("F12.6")
col.description="Detection likelihood measured by PSF-fitting; 0.3-0.6 keV (DET_LIKE_1)"
col = table.get_column('ML_RATE_1') # 19
col.name="MLRate1"
col.set_format("F8.6")
col.description="Source count rate measured by PSF-fitting; 0.3-0.6 keV (ML_RATE_1)"
col = table.get_column('ML_RATE_ERR_1') # 20
col.name="e_MLRate1"
col.set_format("F8.6")
col.description="1 sigma count rate error; 0.3-0.6 keV (ML_RATE_ERR_1)"
col = table.get_column('ML_CTS_1') # 21
col.name="MLCts1"
col.set_format("F11.6")
col.description="Source net counts measured from count rate; 0.3-0.6 keV (ML_CTS_1)"
col = table.get_column('ML_CTS_ERR_1') # 22
col.name="e_MLCts1"
col.set_format("F11.6")
col.description="1 sigma counts error; 0.3-0.6 keV (ML_CTS_ERR_1)"
col = table.get_column('ML_FLUX_1') # 23
col.name="MLFlux1"
col.set_format("E12.6")
col.description="Source flux converted from count rate; 0.3-0.6 keV (ML_FLUX_1)"
col = table.get_column('ML_FLUX_ERR_1') # 24
col.name="e_MLFlux1"
col.set_format("E12.6")
col.description="1 sigma flux error; 0.3-0.6 keV (ML_FLUX_ERR_1)"
col = table.get_column('ML_EXP_1') # 25
col.name="MLExp1"
col.set_format("E12.6")
col.description="Vignetted exposure value; 0.3-0.6 keV (ML_EXP_1)"
col = table.get_column('ML_BKG_1') # 26
col.name="MLBkg1"
col.set_format("F12.6")
col.description="Background at the source position; 0.3-0.6 keV (ML_BKG_1)"
col = table.get_column('DET_LIKE_2') # 27
col.name="DetLike2"
col.set_format("F12.6")
col.description="Detection likelihood measured by PSF-fitting; 0.6-2.3 keV (DET_LIKE_2)"
col = table.get_column('ML_RATE_2') # 28
col.name="MLRate2"
col.set_format("F8.6")
col.description="Source count rate measured by PSF-fitting; 0.6-2.3 keV (ML_RATE_2)"
col = table.get_column('ML_RATE_ERR_2') # 29
col.name="e_MLRate2"
col.set_format("F8.6")
col.description="1 sigma count rate error; 0.6-2.3 keV (ML_RATE_ERR_2)"
col = table.get_column('ML_CTS_2') # 30
col.name="MLCts2"
col.set_format("F11.6")
col.description="Source net counts measured from count rate; 0.6-2.3 keV (ML_CTS_2)"
col = table.get_column('ML_CTS_ERR_2') # 31
col.name="e_MLCts2"
col.set_format("F11.6")
col.description="1 sigma counts error; 0.6-2.3 keV (ML_CTS_ERR_2)"
col = table.get_column('ML_FLUX_2') # 32
col.name="MLFlux2"
col.set_format("E12.6")
col.description="Source flux converted from count rate; 0.6-2.3 keV (ML_FLUX_2)"
col = table.get_column('ML_FLUX_ERR_2') # 33
col.name="e_MLFlux2"
col.set_format("E12.6")
col.description="1 sigma flux error; 0.6-2.3 keV (ML_FLUX_ERR_2)"
col = table.get_column('ML_EXP_2') # 34
col.name="MLExp2"
col.set_format("E12.6")
col.description="Vignetted exposure value; 0.6-2.3 keV (ML_EXP_2)"
col = table.get_column('ML_BKG_2') # 35
col.name="MLBkg2"
col.set_format("F12.6")
col.description="Background at the source position; 0.6-2.3 keV (ML_BKG_2)"
col = table.get_column('DET_LIKE_3') # 36
col.name="DetLike3"
col.set_format("F12.6")
col.description="Detection likelihood measured by PSF-fitting; 2.3-5 keV (DET_LIKE_3)"
col = table.get_column('ML_RATE_3') # 37
col.name="MLRate3"
col.set_format("F8.6")
col.description="Source count rate measured by PSF-fitting; 2.3-5 keV (ML_RATE_3)"
col = table.get_column('ML_RATE_ERR_3') # 38
col.name="e_MLRate3"
col.set_format("F8.6")
col.description="1 sigma count rate error; 2.3-5 keV (ML_RATE_ERR_3)"
col = table.get_column('ML_CTS_3') # 39
col.name="MLCts3"
col.set_format("F11.6")
col.description="Source net counts measured from count rate; 2.3-5 keV (ML_CTS_3)"
col = table.get_column('ML_CTS_ERR_3') # 40
col.name="e_MLCts3"
col.set_format("F11.6")
col.description="1 sigma counts error; 2.3-5 keV (ML_CTS_ERR_3)"
col = table.get_column('ML_FLUX_3') # 41
col.name="MLFlux3"
col.set_format("E12.6")
col.description="Source flux converted from count rate; 2.3-5 keV (ML_FLUX_3)"
col = table.get_column('ML_FLUX_ERR_3') # 42
col.name="e_MLFlux3"
col.set_format("E12.6")
col.description="1 sigma flux error; 2.3-5 keV (ML_FLUX_ERR_3)"
col = table.get_column('ML_EXP_3') # 43
col.name="MLExp3"
col.set_format("E12.6")
col.description="Vignetted exposure value; 2.3-5 keV (ML_EXP_3)"
col = table.get_column('ML_BKG_3') # 44
col.name="MLBkg3"
col.set_format("F12.6")
col.description="Background at the source position; 2.3-5 keV (ML_BKG_3)"
col = table.get_column('DET_LIKE_4') # 45
col.name="DetLike4"
col.set_format("F12.6")
col.description="Detection likelihood measured by PSF-fitting; 5-8 keV (DET_LIKE_4)"
col = table.get_column('ML_RATE_4') # 46
col.name="MLRate4"
col.set_format("F8.6")
col.description="Source count rate measured by PSF-fitting; 5-8 keV (ML_RATE_4)"
col = table.get_column('ML_RATE_ERR_4') # 47
col.name="e_MLRate4"
col.set_format("F8.6")
col.description="1 sigma count rate error; 5-8 keV (ML_RATE_ERR_4)"
col = table.get_column('ML_CTS_4') # 48
col.name="MLCts4"
col.set_format("F11.6")
col.description="Source net counts measured from count rate; 5-8 keV (ML_CTS_4)"
col = table.get_column('ML_CTS_ERR_4') # 49
col.name="e_MLCts4"
col.set_format("F11.6")
col.description="1 sigma counts error; 5-8 keV (ML_CTS_ERR_4)"
col = table.get_column('ML_FLUX_4') # 50
col.name="MLFLux4"
col.set_format("E12.6")
col.description="Source flux converted from count rate; 5-8 keV (ML_FLUX_4)"
col = table.get_column('ML_FLUX_ERR_4') # 51
col.name="e_MLFlux4"
col.set_format("E12.6")
col.description="1 sigma flux error; 5-8 keV (ML_FLUX_ERR_4)"
col = table.get_column('ML_EXP_4') # 52
col.name="MLExp4"
col.set_format("E12.6")
col.description="Vignetted exposure value; 5-8 keV (ML_EXP_4)"
col = table.get_column('ML_BKG_4') # 53
col.name="MLBkg4"
col.set_format("F12.6")
col.description="Background at the source position; 5-8 keV (ML_BKG_4)"
#####################################
""" Read 4XMM-DR12 forced catalog """
#####################################
print("Reading {}".format(infile2))
r = Table.read(infile2)
tab2=Table([
r['DR12_IAU_NAME'], # 0
r['DR12_SRCID'], # 1
r['DR12_RA'], # 2
r['DR12_DEC'], # 3
r['DR12_RADEC_ERR'], # 4
r['DET_LIKE_0'] , # 5
r['DET_LIKE_1'] , # 6
r['DET_LIKE_2'] , # 7
r['DET_LIKE_3'] , # 8
r['DET_LIKE_4'] , # 9
r['ML_RATE_0'] , # 10
r['ML_RATE_1'] , # 11
r['ML_RATE_2'] , # 12
r['ML_RATE_3'] , # 13
r['ML_RATE_4'] , # 14
r['ML_RATE_ERR_0'] , # 15
r['ML_RATE_ERR_1'] , # 16
r['ML_RATE_ERR_2'] , # 17
r['ML_RATE_ERR_3'] , # 18
r['ML_RATE_ERR_4'] , # 19
r['ML_CTS_0'] , # 20
r['ML_CTS_1'] , # 21
r['ML_CTS_2'] , # 22
r['ML_CTS_3'] , # 23
r['ML_CTS_4'] , # 24
r['ML_CTS_ERR_0'] , # 25
r['ML_CTS_ERR_1'] , # 26
r['ML_CTS_ERR_2'] , # 27
r['ML_CTS_ERR_3'] , # 28
r['ML_CTS_ERR_4'] , # 29
r['ML_FLUX_0'] , # 30
r['ML_FLUX_1'] , # 31
r['ML_FLUX_2'] , # 32
r['ML_FLUX_3'] , # 33
r['ML_FLUX_4'] , # 34
r['ML_FLUX_ERR_0'] , # 35
r['ML_FLUX_ERR_1'] , # 36
r['ML_FLUX_ERR_2'] , # 37
r['ML_FLUX_ERR_3'] , # 38
r['ML_FLUX_ERR_4'] , # 39
r['ML_EXP_0'] , # 40
r['ML_EXP_1'] , # 41
r['ML_EXP_2'] , # 42
r['ML_EXP_3'] , # 43
r['ML_EXP_4'] , # 44
r['ML_BKG_0'] , # 45
r['ML_BKG_1'] , # 46
r['ML_BKG_2'] , # 47
r['ML_BKG_3'] , # 48
r['ML_BKG_4'] , # 49
r['CONF'], # 50
])
table2 = tablemaker.addTable(tab2, name=outfile2, description="eUDS forced photometry of 4XMM-DR12 sources, converted from eUDS_4XMM-DR12.fits")
col = table2.get_column('DR12_IAU_NAME') # 0
col.name="DR12Name"
col.set_format("A21")
col.description="4XMM-DR12 source IAU name (DR12_IAU_NAME)"
col = table2.get_column('DR12_SRCID') # 1
col.name="DR12SrcID"
col.set_format("I15")
col.description="Source ID (64-bit integer, DR12_SRCID)"
col = table2.get_column('DR12_RA') # 2
col.name="RAdeg"
col.set_format("F10.6")
col.description="Right ascension (J2000, DR12_RA)"
col = table2.get_column('DR12_DEC') # 3
col.name="DEdeg"
col.set_format("F10.6")
col.description="Declination (J2000, DR12_DEC)"
col = table2.get_column('DR12_RADEC_ERR') # 4
col.name="ePos"
col.set_format("F6.3")
col.description="Positional error (DR12_RADEC_ERR)"
col = table2.get_column('DET_LIKE_0') # 5
col.name="DetLike0"
col.set_format("F12.6")
col.description="Detection likelihood; 0.3-2.3 keV (DET_LIKE_0)"
col = table2.get_column('DET_LIKE_1') # 6
col.name="DetLike1"
col.set_format("F12.6")
col.description="Detection likelihood; 0.3-0.6 keV (DET_LIKE_1)"
col = table2.get_column('DET_LIKE_2') # 7
col.name="DetLike2"
col.set_format("F12.6")
col.description="Detection likelihood; 0.6-2.3 keV (DET_LIKE_2)"
col = table2.get_column('DET_LIKE_3') # 8
col.name="DetLike3"
col.set_format("F12.6")
col.description="Detection likelihood; 2.3-5 keV (DET_LIKE_3)"
col = table2.get_column('DET_LIKE_4') # 9
col.name="DetLike4"
col.set_format("F12.6")
col.description="Detection likelihood; 5-8 keV (DET_LIKE_4)"
col = table2.get_column('ML_RATE_0') # 10
col.name="MLRate0"
col.set_format("F10.6")
col.description="Source count rate measured by PSF-fitting; 0.3-2.3 keV (ML_RATE_0)"
col = table2.get_column('ML_RATE_1') # 11
col.name="MLRate1"
col.set_format("F10.6")
col.description="Source count rate measured by PSF-fitting; 0.3-0.6 keV (ML_RATE_1)"
col = table2.get_column('ML_RATE_2') # 12
col.name="MLRate2"
col.set_format("F10.6")
col.description="Source count rate measured by PSF-fitting; 0.6-2.3 keV (ML_RATE_2)"
col = table2.get_column('ML_RATE_3') # 13
col.name="MLRate3"
col.set_format("F10.6")
col.description="Source count rate measured by PSF-fitting; 2.3-5 keV (ML_RATE_3)"
col = table2.get_column('ML_RATE_4') # 14
col.name="MLRate4"
col.set_format("F10.6")
col.description="Source count rate measured by PSF-fitting; 5-8 keV (ML_RATE_4)"
col = table2.get_column('ML_RATE_ERR_0') # 15
col.name="e_MLRate0"
col.set_format("F10.6")
col.description="1 sigma count rate error; 0.3-2.3 keV (ML_RATE_ERR_0)"
col = table2.get_column('ML_RATE_ERR_1') # 16
col.name="e_MLRate1"
col.set_format("F10.6")
col.description="1 sigma count rate error; 0.3-0.6 keV (ML_RATE_ERR_1)"
col = table2.get_column('ML_RATE_ERR_2') # 17
col.name="e_MLRate2"
col.set_format("F10.6")
col.description="1 sigma count rate error; 0.6-2.3 keV (ML_RATE_ERR_2)"
col = table2.get_column('ML_RATE_ERR_3') # 18
col.name="e_MLRate3"
col.set_format("F10.6")
col.description="1 sigma count rate error; 2.3-5 keV (ML_RATE_ERR_3)"
col = table2.get_column('ML_RATE_ERR_4') # 19
col.name="e_MLRate4"
col.set_format("F10.6")
col.description="1 sigma count rate error; 5-8 keV (ML_RATE_ERR_4)"
col = table2.get_column('ML_CTS_0') # 20
col.name="MLCts0"
col.set_format("F11.6")
col.description="Source net counts measured from count rate; 0.3-2.3 keV (ML_CTS_0)"
col = table2.get_column('ML_CTS_1') # 21
col.name="MLCts1"
col.set_format("F11.6")
col.description="Source net counts measured from count rate; 0.3-0.6 keV (ML_CTS_1)"
col = table2.get_column('ML_CTS_2') # 22
col.name="MLCts2"
col.set_format("F11.6")
col.description="Source net counts measured from count rate; 0.6-2.3 keV (ML_CTS_2)"
col = table2.get_column('ML_CTS_3') # 23
col.name="MLCts3"
col.set_format("F11.6")
col.description="Source net counts measured from count rate; 2.3-5 keV (ML_CTS_3)"
col = table2.get_column('ML_CTS_4') # 24
col.name="MLCts4"
col.set_format("F11.6")
col.description="Source net counts measured from count rate; 5-8 keV (ML_CTS_4)"
col = table2.get_column('ML_CTS_ERR_0') # 25
col.name="e_MLCts0"
col.set_format("F12.6")
col.description="1 sigma counts error; 0.3-2.3 keV (ML_CTS_ERR_0)"
col = table2.get_column('ML_CTS_ERR_1') # 26
col.name="e_MLCts1"
col.set_format("F12.6")
col.description="1 sigma counts error; 0.3-0.6 keV (ML_CTS_ERR_1)"
col = table2.get_column('ML_CTS_ERR_2') # 27
col.name="e_MLCts2"
col.set_format("F12.6")
col.description="1 sigma counts error; 0.6-2.3 keV (ML_CTS_ERR_2)"
col = table2.get_column('ML_CTS_ERR_3') # 28
col.name="e_MLCts3"
col.set_format("F12.6")
col.description="1 sigma counts error; 2.3-5 keV (ML_CTS_ERR_3)"
col = table2.get_column('ML_CTS_ERR_4') # 29
col.name="e_MLCts4"
col.set_format("F12.6")
col.description="1 sigma counts error; 5-8 keV (ML_CTS_ERR_4)"
col = table2.get_column('ML_FLUX_0') # 30
col.name="MLFlux0"
col.set_format("E12.6")
col.description="Source flux converted from count rate; 0.3-2.3 keV (ML_FLUX_0)"
col = table2.get_column('ML_FLUX_1') # 31
col.name="MLFlux1"
col.set_format("E12.6")
col.description="Source flux converted from count rate; 0.3-0.6 keV (ML_FLUX_1)"
col = table2.get_column('ML_FLUX_2') # 32
col.name="MLFlux2"
col.set_format("E12.6")
col.description="Source flux converted from count rate; 0.6-2.3 keV (ML_FLUX_2)"
col = table2.get_column('ML_FLUX_3') # 33
col.name="MLFlux3"
col.set_format("E12.6")
col.description="Source flux converted from count rate; 2.3-5 keV (ML_FLUX_3)"
col = table2.get_column('ML_FLUX_4') # 34
col.name="MLFlux4"
col.set_format("E12.6")
col.description="Source flux converted from count rate; 5-8 keV (ML_FLUX_4)"
col = table2.get_column('ML_FLUX_ERR_0') # 35
col.name="e_MLFlux0"
col.set_format("E12.6")
col.description="1 sigma flux error; 0.3-2.3 keV (ML_FLUX_ERR_0)"
col = table2.get_column('ML_FLUX_ERR_1') # 36
col.name="e_MLFlux1"
col.set_format("E12.6")
col.description="1 sigma flux error; 0.3-0.6 keV (ML_FLUX_ERR_1)"
col = table2.get_column('ML_FLUX_ERR_2') # 37
col.name="e_MLFlux2"
col.set_format("E12.6")
col.description="1 sigma flux error; 0.6-2.3 keV (ML_FLUX_ERR_2)"
col = table2.get_column('ML_FLUX_ERR_3') # 38
col.name="e_MLFlux3"
col.set_format("E12.6")
col.description="1 sigma flux error; 2.3-5 keV (ML_FLUX_ERR_3)"
col = table2.get_column('ML_FLUX_ERR_4') # 39
col.name="e_MLFlux4"
col.set_format("E12.6")
col.description="1 sigma flux error; 5-8 keV (ML_FLUX_ERR_4)"
col = table2.get_column('ML_EXP_0') # 40
col.name="MLExp0"
col.set_format("E10.6")
col.description="Vignetted exposure value; 0.3-2.3 keV (ML_EXP_0)"
col = table2.get_column('ML_EXP_1') # 41
col.name="MLExp1"
col.set_format("E10.6")
col.description="Vignetted exposure value; 0.3-0.6 keV (ML_EXP_1)"
col = table2.get_column('ML_EXP_2') # 42
col.name="MLExp2"
col.set_format("E10.6")
col.description="Vignetted exposure value; 0.6-2.3 keV (ML_EXP_2)"
col = table2.get_column('ML_EXP_3') # 43
col.name="MLExp3"
col.set_format("E10.6")
col.description="Vignetted exposure value; 2.3-5 keV (ML_EXP_3)"
col = table2.get_column('ML_EXP_4') # 44
col.name="MLExp4"
col.set_format("E10.6")
col.description="Vignetted exposure value; 5-8 keV (ML_EXP_4)"
col = table2.get_column('ML_BKG_0') # 45
col.name="MLBkg0"
col.set_format("F12.6")
col.description="Background at the source position; 0.3-2.3 keV (ML_BKG_0)"
col = table2.get_column('ML_BKG_1') # 46
col.name="MLBkg1"
col.set_format("F12.6")
col.description="Background at the source position; 0.3-0.6 keV (ML_BKG_1)"
col = table2.get_column('ML_BKG_2') # 47
col.name="MLBkg2"
col.set_format("F12.6")
col.description="Background at the source position; 0.6-2.3 keV (ML_BKG_2)"
col = table2.get_column('ML_BKG_3') # 48
col.name="MLBkg3"
col.set_format("F12.6")
col.description="Background at the source position; 2.3-5 keV (ML_BKG_3)"
col = table2.get_column('ML_BKG_4') # 49
col.name="MLBkg4"
col.set_format("F12.6")
col.description="Background at the source position; 5-8 keV (ML_BKG_4)"
col = table2.get_column('CONF') # 50
col.name="Conf"
col.set_format("I1")
col.description="True if source is located within 60 arcsec from another source with 0.3-2.3 keV flux > 1E14 erg/s/cm**2 (CONF)"
tablemaker.writeCDSTables()
# Customize ReadMe output
tablemaker.title = "eUDS: The SRG/eROSITA X-ray Survey of the UKIDSS Ultra Deep Survey Field. Catalogue of Sources."
tablemaker.author = 'Krivonos R.'
tablemaker.authors="Gilfanov M., Medvedev P., Sazonov S., Sunyaev R."
tablemaker.date = 2024
tablemaker.bibcode = "2024MNRAS.528.1264K"
tablemaker.keywords = "catalogues - surveys - X-ray: general"
tablemaker.abstract = "The eROSITA X-ray telescope on board the Spectrum-Roentgen-Gamma (SRG) spacecraft observed the field of the UKIDSS Ultra-Deep Survey (UDS) in August-September 2019, during its flight to Sun-Earth L2 point. The resulting eROSITA UDS (or eUDS) survey was thus the first eROSITA X-ray imaging survey, which demonstrated the capability of the telescope to perform uniform observations of large sky areas. With a moderate single-camera exposure of 150 ks, eUDS covered 5 deg^2^ with the limiting flux ranging between 4*10^-15^ and 5*10^-14^erg/cm^2^/s in the 0.3-2.3 keV band. We present a catalogue of 647 sources detected at likelihood >10 (4sigma) during the eUDS. The catalogue provides information on the source fluxes in the main energy band 0.3-2.3 keV and forced photometry in a number of bands between 0.3 and 8 keV. Using the deeper 4XMM-DR12 catalogue, we have identified 22 strongly variable objects that have brightened or faded by at least a factor of ten during the eROSITA observations compared to previous observations by XMM-Newton. We also provide a catalogue of 22 sources detected by eROSITA in the hard energy band of 2.3-5 keV."
tablemaker.more_description = "The catalogue (fits/eUDS.fits) contains 647 sources detected in the 0.3-2.3 keV band using eSASS adopting a detection likelihood threshold of 10. The catalogue provides information on the source fluxes in the main energy band 0: 0.3-2.3 keV and forced photometry in 1: 0.3-0.6, 2: 0.6-2.3, 3: 2.3-5.0 and 4: 5.0-8.0 keV bands. The catalogue of forced PSF-fitting fluxes (fits/eUDS_4XMM-DR12.fits) contains 3603 XMM-Newton sources from 4XMM-DR12 catalogue with the eUDS exposure of more than 100 s. The catalogue provides information on the source forced photometry in the energy bands 0: 0.3-2.3, 1: 0.3-0.6, 2: 0.6-2.3, 3: 2.3-5.0 and 4: 5.0-8.0 keV. To warn about the potential impact of spatial confusion with a nearby bright source, we added a confusion flag (see original paper for details)."
tablemaker.putRef("J/A+A/661/A1","The eFEDS X-ray catalogs (Brunner+, 2022)")
tablemaker.putRef("J/A+A/661/A38","SRG/ART-XC 1st year all-sky X-ray survey (Pavlinsky+, 2022)")
tablemaker.putRef("https://www.srg.cosmos.ru/eUDS","eUDS catalog")
#tablemaker.makeReadMe()
with open("ReadMe.tmp","w") as fd:
tablemaker.makeReadMe(out=fd)
make_euds_cds(infile1="../uds/products/eUDS.fits",outfile1='table1.dat',
infile2="../uds/products/eUDS_4XMM-DR12.fits",outfile2='table2.dat')

View File

@@ -0,0 +1,55 @@
import glob, sys
from astropy.io import fits
import statistics
import pandas as pd
import numpy as np
import csv
colnames=['Match', 'Ref', 'Dup', 'RA', 'Dec', 'Incl']
flist=glob.glob('../products/tm[1,5,6,7]*en0*src.fits')
total1=0
total2=0
for f in flist:
hdul = fits.open(f)
try:
table=hdul[1].data['RADEC_ERR']
except:
continue
hdul.close()
dref=f.replace("src.fits","ref.fits")
hdul = fits.open(dref)
try:
rtable=hdul[1].data['RA']
except:
continue
hdul.close()
err=[]
err10=[]
dfile=f.replace("shu2019.src.fits","xfm.log.dat.awk")
with open(dfile) as csvfile:
spamreader = csv.reader(csvfile, delimiter=' ')
total=0
for row in spamreader:
if(row[5] == 'N'):
continue
for ii,values in np.ndenumerate(rtable):
if(int(row[2]) == int(ii[0])):
total=total+1
#print(">>",row[5],rtable[ii],table[ii],ii[0])
if(table[ii]>0.0):
err.append(table[ii])
if(table[ii]>10.0):
err10.append(table[ii])
total1=total1+total
total2=total2+len(err)
#print("{} MIN {:.2f} MAX {:.2f} ({}/{})".format(f[12:15],min(err),max(err),len(err10),len(err)))
print("{} MIN {:.2f} MAX {:.2f} ({}/{})".format(f,min(err),max(err),len(err10),len(err)))
#sys.exit()
#print(total1,total2)

View File

@@ -0,0 +1,67 @@
import glob, sys
from astropy.io import fits
import statistics
import pandas as pd
import numpy as np
import csv
from uds.config import *
colnames=['Match', 'Ref', 'Dup', 'RA', 'Dec', 'Incl']
flist=glob.glob('../products/tm[1,5,6,7]*en0*.xfm')
total1=0
total2=0
for f in flist:
pos = f.find("_SourceCatalog")
key=f[12:pos]
hdul = fits.open(f)
try:
tab=hdul[1].data
except:
continue
hdul.close()
dsrc=f.replace(".xfm",".shu2019.src.fits")
hdul = fits.open(dsrc)
try:
hdr=hdul[0].header
cdelt=hdr['CDELT2']*3600
except:
continue
hdul.close()
a11=tab[0]['a11']
a12=tab[0]['a12']
a22=tab[0]['a22']
a21=tab[0]['a21']
x_scale=tab[0]['x_scale']
y_scale=tab[0]['y_scale']
t1=tab[0]['t1']*cdelt
t2=tab[0]['t2']*cdelt
sx1=np.sign(a11)*np.sqrt(a11**2 + a12**2)
sy1=np.sign(a22)*np.sqrt(a21**2 + a22**2)
sx2=np.sign(a11)*np.sqrt(a11**2 + a21**2)
sy2=np.sign(a22)*np.sqrt(a12**2 + a22**2)
a1 = np.arctan2(a12/sx1,a11/sy2) * 180 / np.pi * 60
a2 = np.arctan2(a12/sx1,a22/sy2) * 180 / np.pi * 60
b1 = np.arctan2(a21,a11) * 180 / np.pi * 60
b2 = np.arctan2(a21,a22) * 180 / np.pi * 60
det=a11*a22-a12*a21
alpha=355*np.pi/180
a3 = np.arctan2(np.sin(alpha),np.cos(alpha)) * 180 / np.pi
print("{:+.8} {:+.8}".format(a11,a12))
print("{:+.8} {:+.8}".format(a21,a22))
#print("{} d {:.10f} s {:.8f} {:.8f} {:.8f} {:.8f} | {:+.8f} {:+.8f} {:+.8f} {:+.8f} t {:+.2f} {:+.2f}".format(obslist[key],np.sqrt(det)*cdelt,sx1*cdelt,sx2*cdelt,sy1*cdelt,sy2*cdelt,a1,a2,b1,b2,t1,t2))
#print("{} d {:.10f} s {:.8f} {:.8f} {:.8f} {:.8f} | {:+.8f} {:+.8f} {:+.8f} {:+.8f} t {:+.2f} {:+.2f}".format(obslist[key],np.sqrt(det)*cdelt,sx1*cdelt,sx2*cdelt,sy1*cdelt,sy2*cdelt,a1,a2,b1,b2,t1,t2))

468
uds/uds/astrometry.py Normal file
View File

@@ -0,0 +1,468 @@
import glob
import os
import sys
import numpy as np
from astropy.io import fits
import pickle
from scipy.stats import sem
from astropy.wcs import WCS
from astropy.coordinates import SkyCoord # High-level coordinates
from astropy.coordinates import ICRS, Galactic, FK4, FK5 # Low-level frames
from astropy.coordinates import Angle, Latitude, Longitude # Angles
import matplotlib.pyplot as plt
from astropy.table import QTable, Table, Column
from mpdaf.obj import WCS as mWCS
from sherpa.astro.ui import *
from uds.config import *
from scipy.optimize import minimize
from random import uniform
def sky2pix(wcs,dsrc):
# converts ra,dec --> x,y
# checks
#sk = wcs.pix2sky([0.0,0.0])
#print(sk)
#px = wcs.sky2pix([-6.86525105,36.54071803],nearest=False)
#print(px)
#sk = wcs.pix2sky([px[0][1],px[0][0]])
#print(sk)
for idx, s in enumerate(dsrc):
ra0=s['ra']
dec0=s['dec']
px = wcs.sky2pix([dec0,ra0],nearest=False)
s['x']=px[0][1]
s['y']=px[0][0]
#sk = wcs.pix2sky([y0,x0])
#ra=sk[0][1]
#dec=sk[0][0]
#px = wcs.sky2pix([dec,ra],nearest=False)
#x=px[0][1]
#y=px[0][0]
#print("{:.4f} {:.4f} --> {} {} --> {:.4f} {:.4f} --> {} {}".format(ra0,dec0,x0,y0,ra,dec,x,y))
def pix2sky(wcs,dsrc):
# converts x,y --> ra1,dec1
for idx, s in enumerate(dsrc):
sk = wcs.pix2sky([s['y'],s['x']])
s['ra1']=sk[0][1]
s['dec1']=sk[0][0]
def plot_resid(dsrc,dref,crval1,crval2):
# calculates RMS of original ra,dec
center_crd = SkyCoord(crval1, crval2, frame=FK5(), unit="deg")
offset=[]
resid=[]
error=[]
for idx, s in enumerate(dsrc):
src_crd = SkyCoord(dsrc[idx]['ra'], dsrc[idx]['dec'], frame=FK5(), unit="deg")
ref_crd = SkyCoord(dref[idx]['ra'], dref[idx]['dec'], frame=FK5(), unit="deg")
offset.append(src_crd.separation(center_crd).arcmin)
resid.append(src_crd.separation(ref_crd).arcsec)
error.append(s['radec_err'])
indices = sorted(
range(len(offset)),
key=lambda index: offset[index]
)
return ([offset[index] for index in indices],
[resid[index] for index in indices],
[error[index] for index in indices])
def plot_resid1(dsrc,dref,crval1,crval2):
# calculates RMS of original ra,dec
center_crd = SkyCoord(crval1, crval2, frame=FK5(), unit="deg")
offset=[]
resid=[]
error=[]
for idx, s in enumerate(dsrc):
src_crd = SkyCoord(dsrc[idx]['ra1'], dsrc[idx]['dec1'], frame=FK5(), unit="deg")
ref_crd = SkyCoord(dref[idx]['ra'], dref[idx]['dec'], frame=FK5(), unit="deg")
offset.append(src_crd.separation(center_crd).arcmin)
resid.append(src_crd.separation(ref_crd).arcsec)
error.append(s['radec_err'])
indices = sorted(
range(len(offset)),
key=lambda index: offset[index]
)
return ([offset[index] for index in indices],
[resid[index] for index in indices],
[error[index] for index in indices])
def get_rms(dsrc,dref):
# calculates RMS of original ra,dec
resid=0.0
n=0
for idx, s in enumerate(dsrc):
radec_err=s['radec_err']
src_crd = SkyCoord(dsrc[idx]['ra'], dsrc[idx]['dec'], frame=FK5(), unit="deg")
ref_crd = SkyCoord(dref[idx]['ra'], dref[idx]['dec'], frame=FK5(), unit="deg")
sep=src_crd.separation(ref_crd).arcsec
resid = resid+sep**2
n=n+1
return np.sqrt(resid/n)
def get_rms_w(dsrc,dref):
# calculates RMS of original ra,dec
resid=0.0
n=0
A=0.0
B=0.0
for idx, s in enumerate(dsrc):
radec_err=s['radec_err']
src_crd = SkyCoord(dsrc[idx]['ra'], dsrc[idx]['dec'], frame=FK5(), unit="deg")
ref_crd = SkyCoord(dref[idx]['ra'], dref[idx]['dec'], frame=FK5(), unit="deg")
sep=src_crd.separation(ref_crd).arcsec
A=A+sep/radec_err**2
B=B+1.0/radec_err**2
n=n+1
return A/B
# A=Total(table_lc_sav.rate/(table_lc_sav.err)^2)
# B=Total(1.0/(table_lc_sav.err)^2)
def get_rms1_w(dsrc,dref):
# calculates RMS of original ra,dec
resid=0.0
n=0
A=0.0
B=0.0
for idx, s in enumerate(dsrc):
radec_err=s['radec_err']
src_crd = SkyCoord(dsrc[idx]['ra1'], dsrc[idx]['dec1'], frame=FK5(), unit="deg")
ref_crd = SkyCoord(dref[idx]['ra'], dref[idx]['dec'], frame=FK5(), unit="deg")
sep=src_crd.separation(ref_crd).arcsec
A=A+sep/radec_err**2
B=B+1.0/radec_err**2
n=n+1
return A/B
def get_chi2_radec(dsrc,dref):
# calculates RMS of original ra,dec
resid=0.0
n=0
for idx, s in enumerate(dsrc):
radec_err=s['radec_err']
src_crd = SkyCoord(dsrc[idx]['ra'], dsrc[idx]['dec'], frame=FK5(), unit="deg")
ref_crd = SkyCoord(dref[idx]['ra'], dref[idx]['dec'], frame=FK5(), unit="deg")
sep=src_crd.separation(ref_crd).arcsec
resid = resid+sep**2/radec_err**2
n=n+1
return resid
def get_rms1(dsrc,dref):
# calculates RMS of modified ra1,dec1
resid=0.0
n=0
for idx, s in enumerate(dsrc):
radec_err=s['radec_err']
src_crd = SkyCoord(dsrc[idx]['ra1'], dsrc[idx]['dec1'], frame=FK5(), unit="deg")
ref_crd = SkyCoord(dref[idx]['ra'], dref[idx]['dec'], frame=FK5(), unit="deg")
sep=src_crd.separation(ref_crd).arcsec
resid = resid+sep*sep
n=n+1
return np.sqrt(resid/n)
def get_chi2_radec1(dsrc,dref):
# calculates RMS of modified ra1,dec1
resid=0.0
n=0
for idx, s in enumerate(dsrc):
radec_err=s['radec_err']
src_crd = SkyCoord(dsrc[idx]['ra1'], dsrc[idx]['dec1'], frame=FK5(), unit="deg")
ref_crd = SkyCoord(dref[idx]['ra'], dref[idx]['dec'], frame=FK5(), unit="deg")
sep=src_crd.separation(ref_crd).arcsec
resid = resid+sep**2/radec_err**2
n=n+1
return resid
def do_transform_resid(params, wcs, dsrc, dref, crval1, crval2):
angle_in, crval1_in, crval2_in = params
# fills x,y
sky2pix(wcs,dsrc)
wcs1=wcs.copy()
# do transform here
#wcs1.rotate(angle_in/60)
wcs1.set_crval1(crval1_in)
wcs1.set_crval2(crval2_in)
# fills ra1,dec1
pix2sky(wcs1,dsrc)
# caclulate new statistics
resid1 = get_chi2_radec1(dsrc,dref)
#print("--> {:+.4f} {:+.4f} {:.4f}".format((crval1-wcs.get_crval1())*3600,(crval2-wcs.get_crval2())*3600,resid1))
return resid1
def do_astro_corr(src=None, ref=None, catalog=None):
""" calculates astronomy correction """
log=catalog.replace(".fits", ".shift.log")
log0=catalog.replace(".fits", ".orig.qdp")
log1=catalog.replace(".fits", ".shift.qdp")
fout=catalog.replace(".fits", ".shift-map.fits")
title=os.path.split(catalog)[1]
key=title.replace("_SourceCatalog_en0.fits","")
#png=catalog.replace(".fits", ".opti.png")
#png=png.replace(key,"{}_{}".format(obslist[key],key))
t = Table(names=('rota', 'shift_ra','shift_dec','chi2',), dtype=('f8','f8','f8','f8'))
print(src)
hdul = fits.open(src)
src_map_data = hdul[0].data
src_map_hdr = hdul[0].header
src_tab_data = hdul[1].data
src_tab_hdr = hdul[1].header
hdul.close()
dsrc=[]
for rec in src_tab_data:
d = dict(ra = rec['RA'], dec = rec['DEC'], ra_err = rec['RA_ERR'], dec_err = rec['DEC_ERR'], radec_err = rec['RADEC_ERR'])
dsrc.append(d)
print(ref)
hdul = fits.open(ref)
ref_map_data = hdul[0].data
ref_map_hdr = hdul[0].header
ref_tab_data = hdul[1].data
ref_tab_hdr = hdul[1].header
hdul.close()
dref=[]
for rec in ref_tab_data:
d = dict(ra = rec['RA'], dec = rec['DEC'], ra_err = rec['RA_ERR'], dec_err = rec['DEC_ERR'])
dref.append(d)
rms = get_rms(dsrc,dref)
chi2_radec = get_chi2_radec(dsrc,dref)
wcs_src=mWCS(src_map_hdr)
wcs_src.info()
crval1=wcs_src.get_crval1()
crval2=wcs_src.get_crval2()
#print(crval1,crval2)
offset0, sep0, err0 = plot_resid(dsrc,dref,crval1,crval2)
with open(log0, 'w') as writer:
writer.write("label Y Separation, arcsec\nlabel X Offset, arcmin\nr y -1 15\n")
writer.write("ma 9 on\n")
writer.write("read serr 2\n")
for idx, s in enumerate(offset0):
writer.write("{:.2f} {:.2f} {:.2f}\n".format(offset0[idx],sep0[idx],err0[idx]))
wcs_ref=mWCS(ref_map_hdr)
wcs_ref.info()
chi2_radec1_opt=1000
rota_opt=0.0
crval1_opt=crval1
crval2_opt=crval2
rota_min=-30
rota_max=30
Rsim=10.0
Nsim=20000
crd = SkyCoord(crval1, crval2, frame=FK5(), unit="deg")
i=0
while i < Nsim:
#rota_rand = uniform(rota_min, rota_max)
rota_rand=0.0
crval1_rand = uniform(crval1-Rsim/3600, crval1+Rsim/3600)
crval2_rand = uniform(crval2-Rsim/3600, crval2+Rsim/3600)
#crval1_rand = crval1
#crval2_rand = crval2
sim_crd = SkyCoord(crval1_rand, crval2_rand, frame=FK5(), unit="deg")
if(sim_crd.separation(crd).arcsec > Rsim):
continue
chi2_radec1 = do_transform_resid([rota_rand, crval1_rand, crval2_rand], wcs_src, dsrc, dref, crval1, crval2)
t.add_row((rota_rand,(crval1-crval1_rand)*3600,(crval2-crval2_rand)*3600,chi2_radec1))
if(chi2_radec1 < chi2_radec1_opt):
chi2_radec1_opt = chi2_radec1
rota_opt = rota_rand
crval1_opt = crval1_rand
crval2_opt = crval2_rand
i += 1
# set optimal parameters
r = do_transform_resid([rota_opt, crval1_opt, crval2_opt], wcs_src, dsrc, dref, crval1, crval2)
print(chi2_radec1_opt,'==',r)
shift_ra=(crval1-crval1_opt)*3600
shift_dec=(crval2-crval2_opt)*3600
t.write(fout, format='fits', overwrite=True)
rms1 = get_rms1(dsrc,dref)
offset1, sep1, err1 = plot_resid1(dsrc,dref,crval1,crval2)
with open(log1, 'w') as writer:
writer.write("label Y Separation, arcsec\nlabel X Offset, arcmin\nr y -1 15\n")
writer.write("ma 9 on\n")
writer.write("read serr 2\n")
for idx, s in enumerate(offset1):
writer.write("{:.2f} {:.2f} {:.2f}\n".format(offset1[idx],sep1[idx],err1[idx]))
with open(log, 'w') as writer:
writer.write("{} {:+.2f} {:+.2f} {:+.2f} ({:.2f} - {:.2f}) = {:.2f} {:.2f} ({:.2f}) {:.2f} ({:.2f}) N={} -- {}\n".format(obslist[key],
rota_opt,
shift_ra,
shift_dec,
get_chi2_radec(dsrc,dref),
get_chi2_radec1(dsrc,dref),
chi2_radec-chi2_radec1_opt,
rms,get_rms_w(dsrc,dref),
rms1,get_rms1_w(dsrc,dref),
len(dsrc),key))
"""
Nsim=20000 Shift only
N01 +0.00 +4.76 +0.07 (171.22 - 28.06) = 143.15 6.35 (5.11) 3.92 (1.40) N=22 -- tm1_obs_1
N02 +0.00 +2.77 -2.74 (354.15 - 225.27) = 128.88 10.10 (5.34) 9.38 (2.61) N=31 -- tm5_obs_1
N03 +0.00 +3.88 -3.65 (542.25 - 303.25) = 239.00 8.74 (6.87) 6.73 (3.07) N=36 -- tm5_obs_2
N04 +0.00 +0.28 +0.03 (43.16 - 43.04) = 0.12 6.04 (3.39) 6.07 (3.36) N=17 -- tm6_park_1
N05 +0.00 +0.65 -0.78 (175.10 - 169.55) = 5.56 6.77 (4.47) 6.54 (4.51) N=38 -- tm6_scan_1
N06 +0.00 -0.38 +0.27 (26.60 - 26.37) = 0.23 6.54 (4.27) 6.52 (4.23) N=11 -- tm6_park_2
N07 +0.00 +0.04 -0.35 (128.82 - 127.98) = 0.84 5.76 (4.21) 5.75 (4.18) N=35 -- tm6_scan_2
N08 +0.00 +0.58 -0.22 (10.07 - 9.79) = 0.29 3.94 (2.83) 3.89 (2.82) N= 9 -- tm6_park_3
N09 +0.00 +1.35 -0.56 (95.06 - 87.04) = 8.02 5.73 (4.45) 5.60 (4.28) N=38 -- tm6_scan_3
N10 +0.00 +0.56 +0.36 (15.71 - 15.41) = 0.30 4.22 (3.49) 4.22 (3.43) N=10 -- tm6_park_4
N11 +0.00 +1.08 -0.62 (116.34 - 108.60) = 7.73 5.38 (4.41) 5.11 (4.26) N=37 -- tm6_scan_4
N12 +0.00 -0.56 -1.56 (84.93 - 59.86) = 25.07 4.20 (2.45) 4.09 (1.88) N=30 -- tm6_obs_1
N13 +0.00 +0.19 -1.03 (17.58 - 13.28) = 4.30 3.38 (1.90) 3.23 (1.31) N=13 -- tm6_obs_2_badpix
N14 +0.00 +1.03 -1.99 (63.23 - 35.27) = 27.97 4.36 (2.73) 3.27 (2.05) N=23 -- tm6_obs_3_badpix
N15 +0.00 +0.88 -1.66 (187.51 - 164.26) = 23.25 6.93 (3.29) 6.62 (2.68) N=33 -- tm7_obs_1
N16 +0.00 +2.74 -0.52 (113.70 - 75.09) = 38.61 5.34 (4.23) 4.73 (3.42) N=26 -- tm7_obs_2
Nsim=20000 Shift+Rotation
N01 -1.50 +4.93 +0.00 (171.22 - 27.67) = 143.55 6.35 (5.11) 3.83 (1.38) N=22 -- tm1_obs_1
N02 -2.15 +2.47 -3.05 (354.15 - 223.27) = 130.88 10.10 (5.34) 9.29 (2.71) N=31 -- tm5_obs_1
N03 -5.08 +3.87 -4.04 (542.25 - 296.00) = 246.25 8.74 (6.87) 6.93 (2.82) N=36 -- tm5_obs_2
N04 +5.90 -0.19 +0.50 (43.16 - 39.56) = 3.60 6.04 (3.39) 5.57 (3.57) N=17 -- tm6_park_1
N05 -5.84 +1.37 -0.85 (175.10 - 111.02) = 64.08 6.77 (4.47) 5.53 (3.58) N=38 -- tm6_scan_1
N06 -0.30 -0.27 +0.31 (26.60 - 26.36) = 0.24 6.54 (4.27) 6.50 (4.27) N=11 -- tm6_park_2
N07 -5.49 +0.73 -0.98 (128.82 - 86.16) = 42.66 5.76 (4.21) 4.94 (3.62) N=35 -- tm6_scan_2
N08 -0.28 +0.51 -0.29 (10.07 - 9.78) = 0.29 3.94 (2.83) 3.90 (2.82) N= 9 -- tm6_park_3
N09 -4.56 +0.91 -0.40 (95.06 - 64.56) = 30.49 5.73 (4.45) 4.93 (3.56) N=38 -- tm6_scan_3
N10 +2.01 +0.54 +0.63 (15.71 - 15.17) = 0.54 4.22 (3.49) 4.03 (3.39) N=10 -- tm6_park_4
N11 -3.83 +1.01 -1.29 (116.34 - 82.25) = 34.08 5.38 (4.41) 4.75 (3.72) N=37 -- tm6_scan_4
N12 -1.35 -0.23 -1.27 (84.93 - 60.30) = 24.63 4.20 (2.45) 4.20 (1.82) N=30 -- tm6_obs_1
N13 -1.52 +0.17 -1.38 (17.58 - 13.30) = 4.29 3.38 (1.90) 3.21 (1.28) N=13 -- tm6_obs_2_badpix
N14 -1.24 +0.63 -2.18 (63.23 - 34.42) = 28.81 4.36 (2.73) 3.28 (1.92) N=23 -- tm6_obs_3_badpix
N15 -5.23 +0.82 -1.47 (187.51 - 161.20) = 26.31 6.93 (3.29) 6.31 (2.66) N=33 -- tm7_obs_1
N16 -5.41 +2.47 -0.97 (113.70 - 60.40) = 53.30 5.34 (4.23) 4.59 (2.89) N=26 -- tm7_obs_2
"""
def do_astro_corr_rota(src=None, ref=None, catalog=None):
""" calculates astronomy correction """
title=os.path.split(catalog)[1]
png=catalog.replace(".fits", ".rota.png")
log=catalog.replace(".fits", ".rota.log")
key=title.replace("_SourceCatalog_en0.fits","")
png=png.replace(key,"{}_{}".format(obslist[key],key))
print(src)
hdul = fits.open(src)
src_map_data = hdul[0].data
src_map_hdr = hdul[0].header
src_tab_data = hdul[1].data
src_tab_hdr = hdul[1].header
hdul.close()
dsrc=[]
for rec in src_tab_data:
d = dict(ra = rec['RA'], dec = rec['DEC'], ra_err = rec['RA_ERR'], dec_err = rec['DEC_ERR'], radec_err = rec['RADEC_ERR'])
dsrc.append(d)
print(ref)
hdul = fits.open(ref)
ref_map_data = hdul[0].data
ref_map_hdr = hdul[0].header
ref_tab_data = hdul[1].data
ref_tab_hdr = hdul[1].header
hdul.close()
dref=[]
for rec in ref_tab_data:
d = dict(ra = rec['RA'], dec = rec['DEC'], ra_err = rec['RA_ERR'], dec_err = rec['DEC_ERR'])
dref.append(d)
resid = get_rms(dsrc,dref)
print(resid)
wcs_src=mWCS(src_map_hdr)
wcs_src.info()
wcs_ref=mWCS(ref_map_hdr)
wcs_ref.info()
x=[]
y=[]
rms_min=1000.0
rota_min=1000.0
for angle in np.arange(-0.5, 0.5, 0.01):
resid1=do_transform(angle,wcs_src,dsrc,dref)
x.append(angle*60)
y.append(resid1/resid)
if(resid1<rms_min):
rms_min=resid1
rota_min=angle*60
with open(log, 'w') as writer:
writer.write("{} {} {:.2f} {:.2f} {:.2f} {:.2f}%\n".format(obslist[key],title,rota_min, rms_min, resid, (1.0-rms_min/resid)*100.0))
fig, ax = plt.subplots( nrows=1, ncols=1 ) # create figure & 1 axis
ax.plot(x,y)
plt.ylabel('RMS/RMS0, RMS0={:.2f} arcsec'.format(resid))
plt.xlabel('Rotation, arcmin')
plt.title("{} {} {} pairs, {:.1f}%".format(obslist[key], title,len(dsrc),(1.0-rms_min/resid)*100.0))
plt.grid(True)
plt.axvline(x = 0.0, color = 'b', label = 'zero')
fig.savefig(png)
plt.close(fig) # close the figure window
def do_transform(angle,wcs,dsrc,dref):
# fills x,y
sky2pix(wcs,dsrc)
# do transform here
wcs.rotate(angle)
# fills ra1,dec1
pix2sky(wcs,dsrc)
resid1 = get_rms1(dsrc,dref)
return resid1

View File

@@ -43,12 +43,67 @@ wcslist={'tm1_obs_1':[34.7279760,-5.0680267],
'tm6_scan_3':[34.5405596,-4.8088748], 'tm6_scan_3':[34.5405596,-4.8088748],
'tm6_scan_4':[34.5405596,-4.8088748]} 'tm6_scan_4':[34.5405596,-4.8088748]}
""" Диапазоны энергий. """ """ like in the paper (Table 1) """
emin_ev=[300, 300, 600, 2300, 200] obslist={'tm1_obs_1':'N01',
emax_ev=[2300, 600, 2300, 5000,10000] 'tm5_obs_1':'N02',
emin_kev=[0.3, 0.3, 0.6, 2.3, 0.2] 'tm5_obs_2':'N03',
emax_kev=[2.3, 0.6, 2.3, 5.0, 10.0] 'tm7_obs_1':'N15',
ecf=[1.0, 1.0, 1.0, 1.0, 1.0] 'tm7_obs_2':'N16',
'tm6_obs_1':'N12',
'tm6_obs_2_badpix':'N13',
'tm6_obs_3_badpix':'N14',
'tm6_park_1':'N04',
'tm6_park_2':'N06',
'tm6_park_3':'N08',
'tm6_park_4':'N10',
'tm6_scan_1':'N05',
'tm6_scan_2':'N07',
'tm6_scan_3':'N09',
'tm6_scan_4':'N11'}
""" Это просто индекс диапазона для выходных файлов. """ """ Это просто индекс диапазона для выходных файлов. """
eband=["0", "1", "2", "3", "4"] eband=[ "0", "1", "2", "3", "4", "5", "6", "7", "8"]
""" Диапазоны энергий. """
emin_ev=[ 300, 300, 600, 2300, 200, 300, 5000, 500, 1000]
emax_ev=[2300, 600, 2300, 5000, 10000,8000, 8000, 1000, 2000]
emin_kev=[0.3, 0.3, 0.6, 2.3, 0.2, 0.3, 5.0, 0.5, 1.0]
emax_kev=[2.3, 0.6, 2.3, 5.0, 10.0, 8.0, 8.0, 1.0, 2.0]
#ecf = [ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]
# something is wrong here
#ecf = [9.7817E+11, 3.2982E+12, 1.3903E+12, 2.3322E+12, 5.2022E+11, 5.8453E+11, 3.8468E+12]
"""
*** en0 ecf 9.7817E+11 +/- 2.4606E+10 2.52% N=17
*** en1 ecf 3.2982E+12 +/- 8.2963E+10 2.52% N=17
*** en2 ecf 1.3903E+12 +/- 3.5036E+10 2.52% N=17
*** en3 ecf 2.3322E+12 +/- 5.8717E+10 2.52% N=17
*** en4 ecf 5.2022E+11 +/- 1.3110E+10 2.52% N=17
*** en5 ecf 5.8453E+11 +/- 1.4743E+10 2.52% N=17
"""
# finally use Pavel's file ../data/ECF/ecf_tbabspow_g2nh0.02.pkl
"""
for e in [(0.3, 2.3), (0.3, 0.6), (0.6, 2.3), (2.3, 5.0), (5.0, 8.0)]:
print(f'{ecf[e]:g}')
"""
ecf = [1.0911e+12, # (0.3, 2.3)
1.07252e+12, # (0.3, 0.6)
1.08963e+12, # (0.6, 2.3)
1.14674e+11, # (2.3, 5.0)
1.0,
1.0,
2.77581e+10, # (5.0, 8.0)
1354632916123.6191, # (0.5, 1.0) 4XMM-DR12 EP2 band
1014764099304.4968] # (1.0, 2.0) 4XMM-DR12 EP3 band
outfile_post='.fits'
"""
Pavel Medvedev:
0.3-2.3: 9.135819435325375e-13
0.3-0.6: 9.160477830652834e-13
0.6-2.3: 9.201664167869427e-13
2.3-5.0: 8.721504826794627e-12
"""

359
uds/uds/sherpa.py Normal file
View File

@@ -0,0 +1,359 @@
import glob
import os
import sys
import numpy as np
from astropy.io import fits
import pickle
from scipy.stats import sem
#import matplotlib.pyplot as plt
#plt.style.use('ggplot')
from sherpa.astro.ui import *
def calc_ecf(path,catprep=None, emin=None, emax=None, eband=None, outfile=None, fitmin=0.2, fitmax=5.0, simnum=100):
"""
Do uncorrection of SPECRESP for CORRCOMP=CORRPSF*CORRVIGN, like this:
SPECRESP=SPECRESP/CORRCOMB, because because ermldet does not correct for PSF and vignetting.
"""
data={}
if (catprep == None):
print("catprep file is not provided, exit")
sys.exit()
else:
print("Reading {}".format(catprep))
catprep_hdul = fits.open(catprep,mode='readonly')
catprep_data = catprep_hdul[1].data
#print(catprep_data['detUID'])
mlrate = {}
mlrate_err = {}
dl = {}
for rec in catprep_data:
srcid=rec['detUID']
""" consider only non-extended sources """
#if(rec['EXT'] > 0.0):
# continue
if(srcid in mlrate.keys()):
print("Duplicate SrcID! {}".format(srcid))
sys.exit()
else:
mlrate[srcid]=rec['ML_RATE_0']
mlrate_err[srcid]=rec['ML_RATE_ERR_0']
dl[srcid]=rec['DET_LIKE_0']
flist=glob.glob(path)
for filename in flist:
print("Reading {}".format(filename))
#fout=filename.replace(".fits", ".uncorr.fits")
pha=filename.replace("ARF", "SourceSpec")
pha=pha.replace(".fits", ".fits.grp")
pha_header = fits.getheader(pha,ext=1)
object = pha_header['OBJECT']
""" check a given source (testing...) """
#if (int(object) > 10):
# continue
srcid="ML{}".format(object)
if not (srcid in mlrate.keys()):
print("Skip {}".format(srcid))
continue
#hdul = fits.open(filename,mode='readonly')
#specresp = np.divide(hdul[1].data['SPECRESP'], hdul[1].data['CORRCOMB'])
#hdul[1].data['SPECRESP'] = specresp
#hdul.writeto(fout, overwrite=True)
clean() # sherpa
set_xsxsect("vern")
set_xsabund('wilm')
load_pha(pha) # sherpa
""" load uncorrected ARF """
# load_arf(fout) # sherpa
# Define the source model
#sh.set_source("powlaw1d.p1")
set_source("xstbabs.abs1 * powlaw1d.p1") # sherpa
abs1.nH = 0.02
# Change reference energy of the model
p1.ref = 1 # 1 keV
p1.gamma = 2.0
p1.ampl = 1e-4 # in cm**-2 s**-1 keV**-1
freeze(abs1.nH, p1.gamma) # sherpa
### Define the statistic
set_stat("wstat") # sherpa
ignore_bad() # sherpa
### Define the fit range
print("Notice 0.3-5.0 keV:")
notice(fitmin, fitmax) # sherpa
""" Check wether spectrum still has counts after filtering """
dep = get_dep(filter=True)
if not (len(dep)):
print("*** Skip {} due to low counts".format(srcid))
continue
### Do the fit
fit()
res_fit=get_fit_results()
rstat=res_fit.rstat
set_analysis('energy')
photon_flx=[]
energy_flx=[]
skipme=False
for ie in range(len(eband)):
ph_flx=calc_photon_flux(emin[ie], emax[ie])
en_flx=calc_energy_flux(emin[ie], emax[ie])
if not (ph_flx>0):
skipme=True
photon_flx.append(ph_flx)
energy_flx.append(en_flx)
print("en{} Photon flux: {:.4E} Energy flux: {:.4E}".format(eband[ie], ph_flx, en_flx))
if (skipme == True):
print("*** Skip zero flux for {}".format(srcid))
continue
print("----> Success: {} pow norm: {:.4E} reduced stat: {:.2f}".format(res_fit.succeeded,res_fit.parvals[0], rstat))
sample_flx=[]
sample_flx_lo=[]
sample_flx_hi=[]
if (res_fit.rstat < 3.0):
conf()
res_conf=get_conf_results()
#print(res_conf)
component=abs1+p1
#print(component)
#covar()
#errs = get_covar_results().parmaxes
#ans = sample_flux(correlated=False, scales=errs, num=500)
for ie in range(len(eband)):
pars_flux = sample_flux(modelcomponent=component, lo=emin[ie], hi=emax[ie], num=simnum, correlated=True, numcores=10, confidence=68)
sample_flx.append(pars_flux[0][0])
sample_flx_lo.append(pars_flux[0][2])
sample_flx_hi.append(pars_flux[0][1])
else:
continue
print("### SAMPLE FLUX: {}".format(sample_flx))
data[srcid]={'sample_flx':sample_flx,
'sample_flx_lo':sample_flx_lo,
'sample_flx_hi':sample_flx_hi,
'photon_flx':photon_flx,
'energy_flx':energy_flx,
'ml_rate':mlrate[srcid],
'ml_rate_err':mlrate_err[srcid],
'det_like':dl[srcid]}
with open(outfile, 'wb') as f:
pickle.dump(data, f)
def calc_flux(path,catprep=None, emin=None, emax=None, eband=None, outfile=None, fitmin=0.2, fitmax=5.0, simnum=100):
""" """
data={}
if (catprep == None):
print("catprep file is not provided, exit")
sys.exit()
else:
print("Reading {}".format(catprep))
catprep_hdul = fits.open(catprep,mode='readonly')
catprep_data = catprep_hdul[1].data
#print(catprep_data['detUID'])
mlrate = {}
mlrate_err = {}
dl = {}
for rec in catprep_data:
srcid=rec['detUID']
""" consider only non-extended sources """
#if(rec['EXT'] > 0.0):
# continue
if(srcid in mlrate.keys()):
print("Duplicate SrcID! {}".format(srcid))
sys.exit()
else:
mlrate[srcid]=rec['ML_RATE_0']
mlrate_err[srcid]=rec['ML_RATE_ERR_0']
dl[srcid]=rec['DET_LIKE_0']
flist=glob.glob(path)
for filename in flist:
print("Reading {}".format(filename))
pha=filename.replace("ARF", "SourceSpec")
pha=pha.replace(".fits", ".fits.grp")
pha_header = fits.getheader(pha,ext=1)
object = pha_header['OBJECT']
srcid="ML{}".format(object)
if not (srcid in mlrate.keys()):
print("Skip {}".format(srcid))
continue
clean() # sherpa
set_xsxsect("vern")
set_xsabund('wilm')
load_pha(pha) # sherpa
# Define the source model
#sh.set_source("powlaw1d.p1")
set_source("xstbabs.abs1 * powlaw1d.p1") # sherpa
abs1.nH = 0.02
# Change reference energy of the model
p1.ref = 1 # 1 keV
p1.gamma = 2.0
p1.ampl = 1e-4 # in cm**-2 s**-1 keV**-1
freeze(abs1.nH, p1.gamma) # sherpa
### Define the statistic
set_stat("wstat") # sherpa
photon_flx=[]
energy_flx=[]
skipme=False
for ie in range(len(eband)):
ignore_bad() # sherpa
### Define the fit range
print("Notice {}-{} keV:".format(emin[ie], emax[ie]))
notice(emin[ie], emax[ie]) # sherpa
""" Check wether spectrum still has counts after filtering """
dep = get_dep(filter=True)
if not (len(dep)):
print("*** Skip {} due to low counts".format(srcid))
continue
### Do the fit
fit()
res_fit=get_fit_results()
rstat=res_fit.rstat
set_analysis('energy')
ph_flx=calc_photon_flux(emin[ie], emax[ie])
en_flx=calc_energy_flux(emin[ie], emax[ie])
if not (ph_flx>0):
skipme=True
photon_flx.append(ph_flx)
energy_flx.append(en_flx)
print("en{} Photon flux: {:.4E} Energy flux: {:.4E}".format(eband[ie], ph_flx, en_flx))
print("----> Success: {} pow norm: {:.4E} reduced stat: {:.2f}".format(res_fit.succeeded,res_fit.parvals[0], rstat))
if (skipme == True):
print("*** Skip zero flux for {}".format(srcid))
continue
sample_flx=[]
sample_flx_lo=[]
sample_flx_hi=[]
if (res_fit.rstat < 3.0):
conf()
res_conf=get_conf_results()
#print(res_conf)
component=abs1+p1
#print(component)
#covar()
#errs = get_covar_results().parmaxes
#ans = sample_flux(correlated=False, scales=errs, num=500)
for ie in range(len(eband)):
pars_flux = sample_flux(modelcomponent=component, lo=emin[ie], hi=emax[ie], num=simnum, correlated=True, numcores=10, confidence=68)
sample_flx.append(pars_flux[0][0])
sample_flx_lo.append(pars_flux[0][2])
sample_flx_hi.append(pars_flux[0][1])
else:
continue
print("### SAMPLE FLUX: {}".format(sample_flx))
data[srcid]={'sample_flx':sample_flx,
'sample_flx_lo':sample_flx_lo,
'sample_flx_hi':sample_flx_hi,
'photon_flx':photon_flx,
'energy_flx':energy_flx,
'ml_rate':mlrate[srcid],
'ml_rate_err':mlrate_err[srcid],
'det_like':dl[srcid]}
with open(outfile, 'wb') as f:
pickle.dump(data, f)
def print_ecf(infile=None, emin=None, emax=None, eband=None, skipfrac=5.0):
with open(infile, 'rb') as f:
data = pickle.load(f)
for ie in range(len(eband)):
print()
ecf=[]
for key in data.keys():
ar=data[key]
ml = ar['ml_rate']
dml = ar['ml_rate_err']
flx = ar['sample_flx'][ie]
dflx1 = ar['sample_flx'][ie] - ar['sample_flx_lo'][ie]
dflx2 = ar['sample_flx_hi'][ie] - ar['sample_flx'][ie]
# take maximum error as conservative approach
dflx = np.maximum(dflx1,dflx2)
ecf0 = ml/flx
decf0 = abs(flx*dml - ml*dflx) / np.power(flx,2)
frac0 = 100 * abs(flx*dml - ml*dflx) / (flx*ml)
skipme=False
if((100*dflx/flx) < skipfrac):
skipme=False
else:
skipme=True
#if not (len(ar['sample_flx'])>0):
# continue
print("en{} {} DL={:.2f} FLUX: {:4E} (-{:.4E} +{:.4E} {:.2f}%) ML_RATE: {:.4f} +/- {:.4f} ({:.2f}%) --> {:.4E} +/- {:.4E} {:.2f}% skip={}".format(eband[ie],key,ar['det_like'],
flx,dflx1,dflx2,100*dflx/flx,
ml,dml,100*dml/ml,
ecf0,decf0,frac0,skipme))
if(skipme==True):
continue
ecf.append(ecf0)
ecf_mean = np.mean(ecf)
ecf_err = np.std(ecf, ddof=1)/np.sqrt(np.size(ecf))
print("\n*** en{} ecf {:.4E} +/- {:.4E} {:.2f}% N={}".format(eband[ie],
ecf_mean,
ecf_err,100*ecf_err/ecf_mean,
np.size(ecf)))
sys.exit()

File diff suppressed because it is too large Load Diff