2459 lines
83 KiB
Python
2459 lines
83 KiB
Python
import re
|
|
import json
|
|
import glob
|
|
import os
|
|
import sys
|
|
import logging
|
|
import requests
|
|
import json
|
|
import pprint
|
|
import time
|
|
|
|
import pandas as pd
|
|
import numpy as np
|
|
from math import pi, cos, sin
|
|
from Quaternion import Quat
|
|
from scipy.spatial.transform import Rotation, Slerp
|
|
from scipy.sparse import coo_matrix
|
|
from datetime import datetime
|
|
|
|
from django.db.models import Q
|
|
from django.urls import resolve
|
|
from django.contrib.auth.models import Group
|
|
from django_pandas.io import read_frame
|
|
from django.utils.timezone import localdate, localtime
|
|
|
|
import astropy
|
|
from astropy.io import fits
|
|
from astropy.io import ascii
|
|
from astropy_healpix import HEALPix
|
|
from astropy.coordinates import SkyCoord # High-level coordinates
|
|
from astropy.coordinates import ICRS, Galactic, FK4, FK5 # Low-level frames
|
|
from astropy.coordinates import Angle, Latitude, Longitude # Angles
|
|
import astropy.units as u
|
|
from astropy_healpix import HEALPix, neighbours
|
|
from astropy import units as au
|
|
from astropy.time.formats import erfa
|
|
from astropy.time import Time, TimeDelta, TimezoneInfo, TimeFromEpoch
|
|
|
|
from erotrans.models import EroTransSource, EroTrans
|
|
from erosurvey.models import NSIDE_PLATES, ORDER_PLATES
|
|
|
|
from heasarc.models import NSIDE_SOURCES, ORDER, HeasarcBase, GreenSNR
|
|
|
|
from astrobasis.models import GAIADR2
|
|
from astrobasis.models import Flesch
|
|
from astrobasis.models import SDSSDR12Spec
|
|
from astrobasis.models import ZTFAlert
|
|
from astrobasis.models import Simbad
|
|
from astrobasis.models import AllWise
|
|
from astrobasis.models import NVSS
|
|
from astrobasis.models import FIRST
|
|
from astrobasis.models import SUMSS
|
|
from astrobasis.models import VLASSfromVizieR
|
|
from astrobasis.models import BJfromVizieR
|
|
from astrobasis.models import GAIADR3fromVizieR
|
|
|
|
from srgcat.models import ArtCat, SkyMaps, SrgDataDump
|
|
from srgcat.models import SkyMapSource, SelectAllskyMissed
|
|
from srgcat.models import eRositaMatch
|
|
from srgcat.models import TrackStats
|
|
from srgcat.models import Gyro
|
|
|
|
from monthplan.models import Survey
|
|
from monthplan.models import SurveyHealpixPlate
|
|
|
|
|
|
from srglib import correlate_utils, check_transient
|
|
|
|
|
|
|
|
os.environ["HOME"] = "/tmp/djangoastropy/"
|
|
if not os.path.exists("/tmp/djangoastropy/.astropy"): os.makedirs("/tmp/djangoastropy/.astropy")
|
|
from astroquery.simbad import Simbad as aSimbad
|
|
from astroquery.vizier import Vizier as aVizier
|
|
|
|
|
|
MJDREF = 51543.875
|
|
TZ_UTC = TimezoneInfo(utc_offset=0*u.hour)
|
|
TZ_MSK = TimezoneInfo(utc_offset=3*u.hour)
|
|
|
|
TIMESTR_FORMAT = '%Y.%m.%d %H:%M:%S'
|
|
|
|
def boolean_notnan(x):
|
|
var = x if not (np.isnan(x)) else None
|
|
if(var):
|
|
return True if (int(var) == 1) else False
|
|
else:
|
|
return False
|
|
|
|
def notnan(val):
|
|
return val if not (np.isnan(val)) else None
|
|
|
|
def mission2date(timesec):
|
|
mjdref = Time(MJDREF, format='mjd')
|
|
delta = TimeDelta(timesec, format='sec')
|
|
dtime = mjdref + delta + 3*u.hour
|
|
return dtime.to_datetime(timezone=TZ_MSK)
|
|
|
|
def str2date(timestr, tzone):
|
|
time_raw = datetime.strptime(timestr, TIMESTR_FORMAT)
|
|
time_zoned = time_raw.replace(tzinfo=tzone)
|
|
return time_zoned
|
|
|
|
def str2dateT(timestr, tzone):
|
|
TIMESTR_FORMAT = '%Y-%m-%dT%H:%M:%S'
|
|
time_raw = datetime.strptime(timestr, TIMESTR_FORMAT)
|
|
time_zoned = time_raw.replace(tzinfo=tzone)
|
|
return time_zoned
|
|
|
|
def date2mission(dtime):
|
|
mjdref = Time(MJDREF, format='mjd')
|
|
dtime = Time(dtime , format='datetime')
|
|
return (dtime - mjdref).sec
|
|
|
|
def load_vizier_allwise(srcs, ssize=4096, **kwargs):
|
|
print("allwise")
|
|
hp = HEALPix(nside=NSIDE_SOURCES, order=ORDER, frame=FK5())
|
|
av = aVizier()
|
|
av.ROW_LIMIT=-1
|
|
av.TIMEOUT = 150
|
|
minrad=kwargs.get("minrad", 30.)
|
|
for src in srcs:
|
|
sc0 = SkyCoord(src.ra, src.dec, frame=FK5(), unit="deg")
|
|
try:
|
|
simreply = av.query_region(sc0, radius=kwargs.get("maxdist", 120)*au.arcsec, catalog='II/328/allwise')
|
|
except Exception as e:
|
|
print("{} failed to load {}".format(src,e))
|
|
#src.simbad_failed=True
|
|
#src.save()
|
|
|
|
if(simreply):
|
|
df=simreply[0].to_pandas()
|
|
print("{} found {} simbad sources".format(src,df.index.size))
|
|
for col in df.columns:
|
|
print("col: {}".format(col))
|
|
#print(simreply[0])
|
|
|
|
for index, row in df.iterrows():
|
|
name = row['AllWISE']#.decode("ascii")
|
|
ra = row['RAJ2000']
|
|
dec = row['DEJ2000']
|
|
print("{} --> {}".format(src,name))
|
|
|
|
try:
|
|
simsrc = AllWise.objects.get(name=name)
|
|
sc = SkyCoord(simsrc.ra, simsrc.dec, frame=FK5(), unit="deg")
|
|
print("Found in DB: {} offset {:.2f} arcsec".format(simsrc,sc0.separation(sc).arcsec))
|
|
#simsrc.delete()
|
|
#continue
|
|
except Exception as e:
|
|
print("Failed to find AllWise object for {}, {}".format(name,e))
|
|
print("Create AllWise object {}".format(name))
|
|
sc = SkyCoord(ra, dec, frame=FK5(), unit="deg")
|
|
#sc = SkyCoord(row['RA'] + " " + row['DEC'], unit=("hourangle", "deg"), frame="fk5")
|
|
simsrc = AllWise(name=name)
|
|
simsrc.ra = ra
|
|
simsrc.dec = dec
|
|
simsrc.healpix = hp.skycoord_to_healpix(sc)
|
|
simsrc.error_radius = 0.0
|
|
simsrc.W1mag = row['W1mag']
|
|
simsrc.e_W1mag = row['e_W1mag']
|
|
simsrc.W2mag = row['W2mag']
|
|
simsrc.e_W2mag = row['e_W2mag']
|
|
simsrc.save()
|
|
print("Created in DB: {} offset {:.2f} arcsec".format(simsrc,sc0.separation(sc).arcsec))
|
|
|
|
print("Cross-match with loaded AllWise objects")
|
|
find_counterparts(srcs, AllWise.objects.all(), "allwise", maxdist=kwargs.get("maxdist", 120.), minrad=kwargs.get("minrad", 40.))
|
|
|
|
|
|
def load_vizier_nvss(srcs, ssize=4096, **kwargs):
|
|
print("allwise")
|
|
hp = HEALPix(nside=NSIDE_SOURCES, order=ORDER, frame=FK5())
|
|
av = aVizier()
|
|
av.ROW_LIMIT=-1
|
|
av.TIMEOUT = 150
|
|
minrad=kwargs.get("minrad", 30.)
|
|
for src in srcs:
|
|
sc0 = SkyCoord(src.ra, src.dec, frame=FK5(), unit="deg")
|
|
try:
|
|
simreply = av.query_region(sc0, radius=kwargs.get("maxdist", 120)*au.arcsec, catalog='VIII/65/nvss')
|
|
except Exception as e:
|
|
print("{} failed to load {}".format(src,e))
|
|
#src.simbad_failed=True
|
|
#src.save()
|
|
|
|
|
|
if(simreply):
|
|
df=simreply[0].to_pandas()
|
|
print("{} found {} NVSS sources".format(src,df.index.size))
|
|
#for col in df.columns:
|
|
# print("col: {}".format(col))
|
|
#print(simreply[0])
|
|
|
|
|
|
for index, row in df.iterrows():
|
|
name = row['NVSS']#.decode("ascii")
|
|
ra = row['RAJ2000']
|
|
dec = row['DEJ2000']
|
|
print("{} --> {}".format(src,name))
|
|
|
|
try:
|
|
simsrc = NVSS.objects.get(name=name)
|
|
sc = SkyCoord(simsrc.ra, simsrc.dec, frame=FK5(), unit="deg")
|
|
print("Found in DB: {} offset {:.2f} arcsec".format(simsrc,sc0.separation(sc).arcsec))
|
|
#simsrc.delete()
|
|
#continue
|
|
except Exception as e:
|
|
print("Failed to find NVSS object for {}, {}".format(name,e))
|
|
print("Create NVSS object {}".format(name))
|
|
#sc = SkyCoord(ra, dec, frame=FK5(), unit="deg")
|
|
sc = SkyCoord(row['RAJ2000'] + " " + row['DEJ2000'], unit=("hourangle", "deg"), frame="fk5")
|
|
simsrc = NVSS(name=name, ra=sc.fk5.ra.value, dec=sc.fk5.dec.value)
|
|
simsrc.healpix = hp.skycoord_to_healpix(sc)
|
|
simsrc.error_radius = (row['e_RAJ2000']+row['e_DEJ2000'])/2.0
|
|
simsrc.e_ra = row['e_RAJ2000']
|
|
simsrc.e_dec = row['e_DEJ2000']
|
|
simsrc.S14 = row['S1.4']
|
|
simsrc.e_S14 = row['e_S1.4']
|
|
simsrc.save()
|
|
print("Created in DB: {} offset {:.2f} arcsec".format(simsrc,sc0.separation(sc).arcsec))
|
|
|
|
print("Cross-match with loaded AllWise objects")
|
|
find_counterparts(srcs, NVSS.objects.all(), "nvss", maxdist=kwargs.get("maxdist", 120.), minrad=kwargs.get("minrad", 40.))
|
|
|
|
def load_vizier_first(srcs, ssize=4096, **kwargs):
|
|
hp = HEALPix(nside=NSIDE_SOURCES, order=ORDER, frame=FK5())
|
|
av = aVizier()
|
|
av.ROW_LIMIT=-1
|
|
av.TIMEOUT = 150
|
|
minrad=kwargs.get("minrad", 30.)
|
|
for src in srcs:
|
|
sc0 = SkyCoord(src.ra, src.dec, frame=FK5(), unit="deg")
|
|
try:
|
|
simreply = av.query_region(sc0, radius=kwargs.get("maxdist", 120)*au.arcsec, catalog='VIII/92/first14')
|
|
except Exception as e:
|
|
print("{} failed to load {}".format(src,e))
|
|
#src.simbad_failed=True
|
|
#src.save()
|
|
|
|
|
|
if(simreply):
|
|
df=simreply[0].to_pandas()
|
|
print("{} found {} NVSS sources".format(src,df.index.size))
|
|
#for col in df.columns:
|
|
# print("col: {}".format(col))
|
|
#print(simreply[0])
|
|
|
|
|
|
for index, row in df.iterrows():
|
|
name = row['FIRST']#.decode("ascii")
|
|
ra = row['RAJ2000']
|
|
dec = row['DEJ2000']
|
|
print("{} --> {}".format(src,name))
|
|
|
|
try:
|
|
simsrc = FIRST.objects.get(name=name)
|
|
sc = SkyCoord(simsrc.ra, simsrc.dec, frame=FK5(), unit="deg")
|
|
print("Found in DB: {} offset {:.2f} arcsec".format(simsrc,sc0.separation(sc).arcsec))
|
|
#simsrc.delete()
|
|
#continue
|
|
except Exception as e:
|
|
print("Failed to find FIRST object for {}, {}".format(name,e))
|
|
print("Create FIRST object {}".format(name))
|
|
#sc = SkyCoord(ra, dec, frame=FK5(), unit="deg")
|
|
sc = SkyCoord(row['RAJ2000'] + " " + row['DEJ2000'], unit=("hourangle", "deg"), frame="fk5")
|
|
simsrc = FIRST(name=name, ra=sc.fk5.ra.value, dec=sc.fk5.dec.value)
|
|
simsrc.healpix = hp.skycoord_to_healpix(sc)
|
|
simsrc.error_radius = (row['Maj']+row['Min'])/2.0
|
|
simsrc.Fpeak = row['Fpeak']
|
|
simsrc.Fint = row['Fint']
|
|
simsrc.rms = row['Rms']
|
|
simsrc.major_axis = row['Maj']
|
|
simsrc.minor_axis = row['Min']
|
|
simsrc.save()
|
|
print("Created in DB: {} offset {:.2f} arcsec".format(simsrc,sc0.separation(sc).arcsec))
|
|
|
|
print("Cross-match with loaded objects")
|
|
find_counterparts(srcs, FIRST.objects.all(), "first", maxdist=kwargs.get("maxdist", 120.), minrad=kwargs.get("minrad", 40.))
|
|
|
|
def load_vizier_sumss(srcs, ssize=4096, **kwargs):
|
|
hp = HEALPix(nside=NSIDE_SOURCES, order=ORDER, frame=FK5())
|
|
av = aVizier()
|
|
av.ROW_LIMIT=-1
|
|
av.TIMEOUT = 150
|
|
minrad=kwargs.get("minrad", 30.)
|
|
for src in srcs:
|
|
sc0 = SkyCoord(src.ra, src.dec, frame=FK5(), unit="deg")
|
|
try:
|
|
simreply = av.query_region(sc0, radius=kwargs.get("maxdist", 120)*au.arcsec, catalog='VIII/81B')
|
|
except Exception as e:
|
|
print("{} failed to load {}".format(src,e))
|
|
|
|
|
|
if(simreply):
|
|
df=simreply[0].to_pandas()
|
|
print("{} found {} SUMSS sources".format(src,df.index.size))
|
|
for col in df.columns:
|
|
print("col: {}".format(col))
|
|
print(simreply[0])
|
|
|
|
|
|
for index, row in df.iterrows():
|
|
name = "{}, {}".format(row['RAJ2000'], row['DEJ2000'])
|
|
print("{} --> {}".format(src,name))
|
|
|
|
try:
|
|
simsrc = SUMSS.objects.get(name=name)
|
|
sc = SkyCoord(simsrc.ra, simsrc.dec, frame=FK5(), unit="deg")
|
|
print("Found in DB: {} offset {:.2f} arcsec".format(simsrc,sc0.separation(sc).arcsec))
|
|
#simsrc.delete()
|
|
#continue
|
|
except Exception as e:
|
|
print("Failed to find SUMSS object for {}, {}".format(name,e))
|
|
print("Create SUMSS object {}".format(name))
|
|
#sc = SkyCoord(ra, dec, frame=FK5(), unit="deg")
|
|
sc = SkyCoord(row['RAJ2000'] + " " + row['DEJ2000'], unit=("hourangle", "deg"), frame="fk5")
|
|
simsrc = SUMSS(name=name, ra=sc.fk5.ra.value, dec=sc.fk5.dec.value)
|
|
simsrc.healpix = hp.skycoord_to_healpix(sc)
|
|
simsrc.error_radius = (row['MajAxis']+row['MinAxis'])/2.0
|
|
simsrc.Sp = row['Sp']
|
|
simsrc.e_Sp = row['e_Sp']
|
|
simsrc.St = row['St']
|
|
simsrc.e_St = row['e_St']
|
|
simsrc.e_ra = row['e_RAJ2000']
|
|
simsrc.e_dec = row['e_DEJ2000']
|
|
simsrc.ra_orig = row['RAJ2000']#.decode("ascii")
|
|
simsrc.dec_orig = row['DEJ2000']#.decode("ascii")
|
|
simsrc.major_axis = row['MajAxis']
|
|
simsrc.minor_axis = row['MinAxis']
|
|
simsrc.save()
|
|
print("Created in DB: {} offset {:.2f} arcsec".format(simsrc,sc0.separation(sc).arcsec))
|
|
|
|
print("Cross-match with loaded objects")
|
|
find_counterparts(srcs, SUMSS.objects.all(), "sumss", maxdist=kwargs.get("maxdist", 120.), minrad=kwargs.get("minrad", 40.))
|
|
|
|
def load_vizier_vlass(srcs, ssize=4096, **kwargs):
|
|
|
|
cols=['recno',
|
|
'CompName',
|
|
'CompId','_Glon','_Glat',
|
|
'RAJ2000','e_RAJ2000',
|
|
'DEJ2000','e_DEJ2000',
|
|
'Ftot',
|
|
'e_Ftot',
|
|
'Fpeak',
|
|
'e_Fpeak',
|
|
'Islrms',
|
|
'SCode',
|
|
'DCMaj',
|
|
'DCMin',
|
|
'DCPA',
|
|
'Subtile',
|
|
'NVSSdist',
|
|
'FIRSTdist',
|
|
'PeakToRing',
|
|
'DupFlag',
|
|
'QualFlag',
|
|
'NNdist',
|
|
'MainSample',
|
|
'QLcutout',]
|
|
|
|
|
|
|
|
hp = HEALPix(nside=NSIDE_SOURCES, order=ORDER, frame=FK5())
|
|
av = aVizier(columns=cols)
|
|
av.ROW_LIMIT=-1
|
|
av.TIMEOUT = 150
|
|
minrad=kwargs.get("minrad", 30.)
|
|
for src in srcs:
|
|
sc0 = SkyCoord(src.ra, src.dec, frame=FK5(), unit="deg")
|
|
try:
|
|
simreply = av.query_region(sc0, radius=kwargs.get("maxdist", 120)*au.arcsec, catalog='J/ApJS/255/30/comp')
|
|
except Exception as e:
|
|
print("{} failed to load {}".format(src,e))
|
|
#src.simbad_failed=True
|
|
#src.save()
|
|
|
|
|
|
if(simreply):
|
|
df=simreply[0].to_pandas()
|
|
print("{} found {} source(s)".format(src,df.index.size))
|
|
"""
|
|
for col in df.columns:
|
|
print("col: {}".format(col))
|
|
print(simreply)
|
|
"""
|
|
|
|
|
|
for index, row in df.iterrows():
|
|
name = row['CompName']#.decode("ascii")
|
|
ra = row['RAJ2000']
|
|
dec = row['DEJ2000']
|
|
print("{} --> {}".format(src,name))
|
|
|
|
try:
|
|
simsrc = VLASSfromVizieR.objects.get(name=name)
|
|
sc = SkyCoord(simsrc.ra, simsrc.dec, frame=FK5(), unit="deg")
|
|
""" print("Found in DB: {} offset {:.2f} arcsec".format(simsrc,sc0.separation(sc).arcsec)) """
|
|
#simsrc.delete()
|
|
#continue
|
|
except Exception as e:
|
|
print("Failed to find VLASS object for {}, {}".format(name,e))
|
|
print("Create VLASS object {}".format(name))
|
|
sc = SkyCoord(ra, dec, frame=FK5(), unit="deg")
|
|
#sc = SkyCoord(row['RAJ2000'] + " " + row['DEJ2000'], unit=("hourangle", "deg"), frame="fk5")
|
|
simsrc = VLASSfromVizieR(name=name, ra=sc.fk5.ra.value, dec=sc.fk5.dec.value)
|
|
simsrc.healpix = hp.skycoord_to_healpix(sc)
|
|
simsrc.error_radius = (row['e_RAJ2000']+row['e_DEJ2000'])/2.0
|
|
simsrc.recno=int(row['recno'])
|
|
simsrc.glon=float(row['_Glon'])
|
|
simsrc.glat=float(row['_Glat'])
|
|
simsrc.e_ra=float(row['e_RAJ2000'])
|
|
simsrc.e_dec=float(row['e_DEJ2000'])
|
|
simsrc.ftot = float(row['Ftot'])
|
|
simsrc.e_ftot = float(row['e_Ftot'])
|
|
simsrc.fpeak = float(row['Fpeak'])
|
|
simsrc.e_fpeak = float(row['e_Fpeak'])
|
|
simsrc.dupflag = int(row['DupFlag'])
|
|
simsrc.qualflag = int(row['QualFlag'])
|
|
simsrc.save()
|
|
print("Created in DB: {} offset {:.2f} arcsec".format(simsrc,sc0.separation(sc).arcsec))
|
|
|
|
print("Cross-match with loaded objects")
|
|
find_counterparts(srcs, VLASSfromVizieR.objects.all(), "vlass", maxdist=kwargs.get("maxdist", 120.), minrad=kwargs.get("minrad", 40.))
|
|
|
|
|
|
def load_vizier_bj(srcs, ssize=4096, **kwargs):
|
|
|
|
cols=['Source',
|
|
'RA_ICRS',
|
|
'DE_ICRS',
|
|
'rgeo',
|
|
'b_rgeo',
|
|
'B_rgeo',
|
|
'rpgeo',
|
|
'b_rpgeo',
|
|
'B_rpgeo',
|
|
'Flag',]
|
|
|
|
hp = HEALPix(nside=NSIDE_SOURCES, order=ORDER, frame=FK5())
|
|
av = aVizier(columns=cols)
|
|
av.ROW_LIMIT=-1
|
|
av.TIMEOUT = 150
|
|
minrad=kwargs.get("minrad", 30.)
|
|
for src in srcs:
|
|
sc0 = SkyCoord(src.ra, src.dec, frame=FK5(), unit="deg")
|
|
try:
|
|
simreply = av.query_region(sc0, radius=kwargs.get("maxdist", 120)*au.arcsec, catalog='I/352/gedr3dis')
|
|
except Exception as e:
|
|
print("{} failed to load {}".format(src,e))
|
|
#src.simbad_failed=True
|
|
#src.save()
|
|
|
|
|
|
if(simreply):
|
|
df=simreply[0].to_pandas()
|
|
print("{} found {} source(s)".format(src,df.index.size))
|
|
|
|
"""
|
|
for col in df.columns:
|
|
print("col: {}".format(col))
|
|
print(simreply)
|
|
sys.exit()
|
|
"""
|
|
|
|
for index, row in df.iterrows():
|
|
source_id = int(row['Source'])#.decode("ascii")
|
|
ra = float(row['RA_ICRS'])
|
|
dec = float(row['DE_ICRS'])
|
|
print("{} --> {:d}".format(src,source_id))
|
|
|
|
try:
|
|
simsrc = BJfromVizieR.objects.get(source_id=source_id)
|
|
sc = SkyCoord(simsrc.ra, simsrc.dec, frame=FK5(), unit="deg")
|
|
""" print("Found in DB: {} offset {:.2f} arcsec".format(simsrc,sc0.separation(sc).arcsec)) """
|
|
#simsrc.delete()
|
|
#continue
|
|
except Exception as e:
|
|
print("Failed to find BJ object for {}, {}".format(source_id,e))
|
|
print("Create BJ object {}".format(source_id))
|
|
sc = SkyCoord(ra, dec, frame=FK5(), unit="deg")
|
|
#sc = SkyCoord(row['RAJ2000'] + " " + row['DEJ2000'], unit=("hourangle", "deg"), frame="fk5")
|
|
simsrc = BJfromVizieR(source_id=source_id, ra=sc.fk5.ra.value, dec=sc.fk5.dec.value)
|
|
simsrc.healpix = hp.skycoord_to_healpix(sc)
|
|
#simsrc.recno=int(row['recno'])
|
|
simsrc.r_med_geo=row['rgeo']
|
|
simsrc.r_lo_geo=row['b_rgeo']
|
|
simsrc.r_hi_geo=row['B_rgeo']
|
|
simsrc.r_med_pgeo=row['rpgeo']
|
|
simsrc.r_lo_pgeo=row['b_rpgeo']
|
|
simsrc.r_hi_pgeo=row['B_rpgeo']
|
|
#simsrc.flag=row['Flag']
|
|
simsrc.save()
|
|
print("Created in DB: {} offset {:.2f} arcsec".format(simsrc,sc0.separation(sc).arcsec))
|
|
|
|
print("Cross-match with loaded objects")
|
|
find_counterparts(srcs, BJfromVizieR.objects.all(), "bj2021", maxdist=kwargs.get("maxdist", 120.), minrad=kwargs.get("minrad", 40.))
|
|
|
|
def load_vizier_gaia_dr3(srcs, ssize=4096, **kwargs):
|
|
#all = GAIADR3fromVizieR.objects.all()
|
|
#all.delete()
|
|
#return
|
|
cols=['Source',
|
|
'RA_ICRS',
|
|
'DE_ICRS',
|
|
'e_RA_ICRS',
|
|
'e_DE_ICRS',
|
|
'Plx',
|
|
'e_Plx',
|
|
'RPlx',
|
|
'PM',
|
|
'pmRA',
|
|
'e_pmRA',
|
|
'pmDE',
|
|
'e_pmDE',
|
|
'Gmag','e_Gmag','BPmag','e_BPmag','RPmag','e_RPmag']
|
|
|
|
"""
|
|
col: RA_ICRS col: DE_ICRS
|
|
col: Source col: e_RA_ICRS
|
|
col: e_DE_ICRS col: Plx col: e_Plx
|
|
col: PM col: pmRA col: e_pmRA col: pmDE col: e_pmDE
|
|
col: RUWE col: FG col: e_FG col: Gmag col: FBP col: e_FBP
|
|
col: BPmag col: FRP col: e_FRP col: RPmag
|
|
col: BP-RP col: RV col: e_RV col: Vbroad
|
|
col: GRVSmag col: QSO col: Gal col: NSS col: XPcont
|
|
col: XPsamp col: RVS col: EpochPh
|
|
col: EpochRV col: MCMCGSP
|
|
col: MCMCMSC col: And col: Teff col: logg col: __Fe_H_ col: Dist
|
|
col: A0 col: HIP col: PS1 col: SDSS13
|
|
col: SKYM2 col: TYC2
|
|
col: URAT1 col: AllWISE
|
|
col: APASS9 col: GSC23 col: RAVE5 col: _2MASS col: RAVE6 col: RAJ2000 col: DEJ2000
|
|
"""
|
|
hp = HEALPix(nside=NSIDE_SOURCES, order=ORDER, frame=FK5())
|
|
av = aVizier(columns=cols)
|
|
#av = aVizier()
|
|
av.ROW_LIMIT=-1
|
|
av.TIMEOUT = 150
|
|
minrad=kwargs.get("minrad", 30.)
|
|
for src in srcs:
|
|
sc0 = SkyCoord(src.ra, src.dec, frame=FK5(), unit="deg")
|
|
try:
|
|
simreply = av.query_region(sc0, radius=kwargs.get("maxdist", 120)*au.arcsec, catalog='I/355/gaiadr3')
|
|
except Exception as e:
|
|
print("{} failed to load {}".format(src,e))
|
|
#src.simbad_failed=True
|
|
#src.save()
|
|
|
|
|
|
if(simreply):
|
|
df=simreply[0].to_pandas()
|
|
print("{} found {} source(s)".format(src,df.index.size))
|
|
|
|
"""
|
|
for col in df.columns:
|
|
print("col: {}".format(col))
|
|
print(simreply)
|
|
sys.exit()
|
|
"""
|
|
|
|
for index, row in df.iterrows():
|
|
source_id = int(row['Source'])#.decode("ascii")
|
|
ra = float(row['RA_ICRS'])
|
|
dec = float(row['DE_ICRS'])
|
|
print("{} --> {:d}".format(src,source_id))
|
|
|
|
try:
|
|
simsrc = GAIADR3fromVizieR.objects.get(source_id=source_id)
|
|
sc = SkyCoord(simsrc.ra, simsrc.dec, frame=FK5(), unit="deg")
|
|
""" print("Found in DB: {} offset {:.2f} arcsec".format(simsrc,sc0.separation(sc).arcsec)) """
|
|
#simsrc.delete()
|
|
#continue
|
|
except Exception as e:
|
|
print("Failed to find GAIADR3 object for {}, {}".format(source_id,e))
|
|
print("Create BJ object {}".format(source_id))
|
|
sc = SkyCoord(ra, dec, frame=FK5(), unit="deg")
|
|
#sc = SkyCoord(row['RAJ2000'] + " " + row['DEJ2000'], unit=("hourangle", "deg"), frame="fk5")
|
|
simsrc = GAIADR3fromVizieR(source_id=source_id, ra=sc.fk5.ra.value, dec=sc.fk5.dec.value)
|
|
simsrc.healpix = hp.skycoord_to_healpix(sc)
|
|
simsrc.e_ra = row['e_RA_ICRS']
|
|
simsrc.e_dec = row['e_DE_ICRS']
|
|
simsrc.plx = row['Plx']
|
|
simsrc.e_plx = row['e_Plx']
|
|
simsrc.rplx = row['RPlx']
|
|
simsrc.pm = row['PM']
|
|
simsrc.pmra = row['pmRA']
|
|
simsrc.e_pmra = row['e_pmRA']
|
|
simsrc.pmde = row['pmDE']
|
|
simsrc.e_pmde = row['e_pmDE']
|
|
simsrc.gmag= row['Gmag']
|
|
simsrc.e_gmag= row['e_Gmag']
|
|
simsrc.bpmag= row['BPmag']
|
|
simsrc.e_bpmag= row['e_BPmag']
|
|
simsrc.rpmag = row['RPmag']
|
|
simsrc.e_rpmag = row['e_RPmag']
|
|
simsrc.save()
|
|
print("Created in DB: {} offset {:.2f} arcsec".format(simsrc,sc0.separation(sc).arcsec))
|
|
|
|
print("Cross-match with loaded objects")
|
|
find_counterparts(srcs, GAIADR3fromVizieR.objects.all(), "gaiadr3", maxdist=kwargs.get("maxdist", 120.), minrad=kwargs.get("minrad", 40.))
|
|
|
|
|
|
def load_simbad_sources(erosrcs, ssize=4096, **kwargs):
|
|
"""
|
|
for the set of sources queries simbad in 1 arcmin vicinity, cahces them and after that searches
|
|
for cross matches with provided sources in specified zone
|
|
"""
|
|
hp = HEALPix(nside=NSIDE_SOURCES, order=ORDER, frame=FK5())
|
|
esrc = read_frame(erosrcs)
|
|
aSimbad.TIMEOUT = 150
|
|
s1 = aSimbad()
|
|
s1.reset_votable_fields()
|
|
s1.add_votable_fields("z_value", "otype")
|
|
fails = []
|
|
print("sources to correlate", esrc.index.size)
|
|
for i in range((esrc.index.size + ssize - 1)//ssize):
|
|
eloc = esrc.iloc[i*ssize:min((i + 1)*ssize, esrc.index.size)]
|
|
print("load simbad for first %d objects" % eloc.index.size)
|
|
try:
|
|
simbadc = s1.query_region(SkyCoord(eloc.ra.values, eloc.dec.values, unit="deg", frame="fk5"), radius=kwargs.get("maxdist", 60)*au.arcsec).to_pandas()
|
|
except Exception as exp:
|
|
print(exp)
|
|
print("ahtung failed to load")
|
|
fails.append(eloc.id.values)
|
|
continue
|
|
print("found %d simbad sources" % simbadc.index.size)
|
|
sc = SkyCoord(simbadc.RA.values + " " + simbadc.DEC.values, unit=("hourangle", "deg"), frame="fk5")
|
|
simbadc["ra"] = sc.fk5.ra.value
|
|
simbadc["dec"] = sc.fk5.dec.value
|
|
simbadc["healpix"] = hp.skycoord_to_healpix(sc)
|
|
simbadc.rename(columns={"COO_ERR_MINA": "error_radius", "Z_VALUE": "z"}, inplace=True)
|
|
simbadc.error_radius.fillna(0., inplace=True)
|
|
simbadc['main_id'] = simbadc.MAIN_ID.str.decode("ascii")
|
|
simbadc['obj_class'] = simbadc.OTYPE.str.decode("ascii")
|
|
simbadc['coo_bibcode'] = simbadc.COO_BIBCODE.str.decode("ascii")
|
|
simbadc['coo_wavelength'] = simbadc.COO_WAVELENGTH.str.decode("ascii")
|
|
"""
|
|
simbad.rename({"COO_ERR_MINA":"error_radius",
|
|
"MAIN_ID": "main_id",
|
|
"OTYPE": "obj_class",
|
|
"COO_BIBCODE": "coo_bibcode",
|
|
"COO_WAVELENGTH": "coo_wavelength",
|
|
"Z_VALUE": "z"}, inplace=True)
|
|
"""
|
|
simbadc.drop_duplicates('main_id', inplace=True)
|
|
simbadc.drop(columns=["RA", "DEC", "RA_PREC", "DEC_PREC", "COO_ERR_MAJA",
|
|
"COO_ERR_ANGLE", "COO_QUAL", "MAIN_ID",
|
|
"OTYPE", "COO_WAVELENGTH", "COO_BIBCODE"], inplace=True)
|
|
print("loaded", simbadc.index.size)
|
|
sobj = [Simbad(**o) for i, o in simbadc.iterrows()]
|
|
Simbad.objects.bulk_create(sobj, ignore_conflicts=True)
|
|
ech = erosrcs.filter(id__in=eloc.id.values)
|
|
print("correlating with %d sources" % ech.count())
|
|
find_counterparts(ech, Simbad.objects.all(), "simbad", maxdist=kwargs.get("maxdist", 20.), minrad=kwargs.get("minrad", 3.))
|
|
efail = erosrcs.filter(id__in=np.concatenate(fails)) if fails else erosrcs.filter(id__in=[])
|
|
return efail
|
|
|
|
def load_simbad_for_skymap_sources(srcs, **kwargs):
|
|
"""
|
|
for the set of sources queries simbad in 1 arcmin vicinity, cahces them and after that searches
|
|
for cross matches with provided sources in specified zone
|
|
"""
|
|
hp = HEALPix(nside=NSIDE_SOURCES, order=ORDER, frame=FK5())
|
|
|
|
aSimbad.TIMEOUT = 150
|
|
s1 = aSimbad()
|
|
s1.reset_votable_fields()
|
|
s1.add_votable_fields("z_value", "otype")
|
|
|
|
minrad=kwargs.get("minrad", 30.)
|
|
for src in srcs:
|
|
simbad_found_minrad=False
|
|
""" within minrad """
|
|
|
|
src.simbad_failed=False
|
|
src.simbad_notfound=False
|
|
src.save()
|
|
sc0 = SkyCoord(src.ra, src.dec, frame=FK5(), unit="deg")
|
|
try:
|
|
simreply = s1.query_region(SkyCoord(src.ra, src.dec, unit="deg", frame="fk5"), radius=kwargs.get("maxdist", 120)*au.arcsec)
|
|
except Exception as e:
|
|
print("{} failed to load {}".format(src,e))
|
|
src.simbad_failed=True
|
|
src.save()
|
|
|
|
if(simreply):
|
|
df=simreply.to_pandas()
|
|
print("{} found {} simbad sources".format(src,df.index.size))
|
|
#for col in simbadc.columns:
|
|
# print(col)
|
|
|
|
for index, row in df.iterrows():
|
|
main_id = row['MAIN_ID']#.decode("ascii")
|
|
print("{} --> {}".format(src,main_id))
|
|
try:
|
|
simsrc = Simbad.objects.get(main_id=main_id)
|
|
sc = SkyCoord(simsrc.ra, simsrc.dec, frame=FK5(), unit="deg")
|
|
print("Found in DB: {} offset {:.2f} arcmin".format(simsrc,sc0.separation(sc).arcsec))
|
|
#simsrc.delete()
|
|
#continue
|
|
except Exception as e:
|
|
print("Failed to find Simbad object for {}, {}".format(main_id,e))
|
|
print("Create Simbad object {}".format(main_id))
|
|
sc = SkyCoord(row['RA'] + " " + row['DEC'], unit=("hourangle", "deg"), frame="fk5")
|
|
simsrc = Simbad(main_id=main_id)
|
|
simsrc.ra = sc.fk5.ra.value
|
|
simsrc.dec = sc.fk5.dec.value
|
|
simsrc.healpix = hp.skycoord_to_healpix(sc)
|
|
simsrc.error_radius = 0.0
|
|
simsrc.obj_class = row['OTYPE']#.decode("ascii")
|
|
simsrc.coo_bibcode = row['COO_BIBCODE']#.decode("ascii")
|
|
simsrc.coo_wavelength = row['COO_WAVELENGTH']
|
|
simsrc.z = row['Z_VALUE']
|
|
simsrc.save()
|
|
print("Created in DB: {} offset {:.2f} arcmin".format(simsrc,sc0.separation(sc).arcsec))
|
|
|
|
sc = SkyCoord(simsrc.ra, simsrc.dec, frame=FK5(), unit="deg")
|
|
if(sc0.separation(sc).arcsec < minrad):
|
|
print("Take {} to cross-match".format(src))
|
|
simbad_found_minrad=True
|
|
|
|
else:
|
|
print("{} No astronomical object found".format(src))
|
|
src.simbad_notfound=True
|
|
src.save()
|
|
|
|
if not (simbad_found_minrad):
|
|
print("Mark {} as simbad_notfound".format(src))
|
|
src.simbad_notfound=True
|
|
src.save()
|
|
|
|
print("Cross-match with loaded Simbad objects")
|
|
find_counterparts(srcs, Simbad.objects.all(), "simbad", maxdist=kwargs.get("maxdist", 120.), minrad=kwargs.get("minrad", 30.))
|
|
|
|
|
|
def make_source_name(key, ra, dec):
|
|
""" Makes source name in format JHHMMSS.s+/-DDMMSS based on input RA and Dec. """
|
|
try:
|
|
crd = SkyCoord(ra, dec, frame=FK5(), unit="deg")
|
|
name = '{0} J{1}{2}'.format(key,
|
|
crd.ra.to_string(unit=u.hourangle, sep='', precision=1, pad=True),
|
|
crd.dec.to_string(sep='', precision=0, alwayssign=True, pad=True))
|
|
return(name)
|
|
except BaseException as e:
|
|
print('Failed: '+ str(e))
|
|
return('None')
|
|
|
|
def srg_record_stats(request, group):
|
|
""" Save user access statistics """
|
|
try:
|
|
grp = Group.objects.get(name=group)
|
|
except:
|
|
grp=None
|
|
|
|
current_url = resolve(request.path_info).url_name
|
|
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
|
|
|
|
if x_forwarded_for:
|
|
ipaddress = x_forwarded_for.split(',')[-1].strip()
|
|
else:
|
|
ipaddress = request.META.get('REMOTE_ADDR')
|
|
|
|
rec = TrackStats()
|
|
rec.ip_address= ipaddress
|
|
rec.url_name = current_url
|
|
rec.path_info = request.path_info
|
|
rec.user = request.user
|
|
rec.group = grp
|
|
rec.save()
|
|
pass
|
|
|
|
status_code = {0:"No error",
|
|
1:'<h2>User is not authenticated, please <a href="/login">login</a></h2>',
|
|
2:'<h2>User has no Triton User Profile</h2>',
|
|
3:'<h2>User has no appropriate group to access this page</h2>',}
|
|
|
|
def srg_group_auth(user, group):
|
|
""" Check whether User has appropriate group for access.
|
|
|
|
Parameters
|
|
----------
|
|
user : User object
|
|
User object from request
|
|
group : str
|
|
Requested group name
|
|
|
|
Returns
|
|
-------
|
|
status : int
|
|
Status code {0:'No error', 1:'User is not authenticated', 2:'User has no user profile', 3:'User has no appropriate group to access this page',}
|
|
|
|
|
|
:Author:
|
|
Roman Krivonos <krivonos@cosmos.ru>
|
|
|
|
"""
|
|
if not user.is_authenticated:
|
|
return 1
|
|
try:
|
|
user_profile=user.profile
|
|
except:
|
|
return 2
|
|
|
|
if not user.groups.filter(name=group).exists():
|
|
return 3
|
|
|
|
return 0
|
|
|
|
def srg_group_auth_stats(request, group):
|
|
""" check user group and write record """
|
|
status = srg_group_auth(request.user, group)
|
|
if status == 0:
|
|
srg_record_stats(request, group)
|
|
return status
|
|
|
|
def srg_auth(user):
|
|
""" Check User authorization and profile.
|
|
|
|
Parameters
|
|
----------
|
|
user : User object
|
|
User object from request
|
|
|
|
Returns
|
|
-------
|
|
status : int
|
|
Status code {0:'No error', 1:'User is not authenticated', 2:'User has no user profile',}
|
|
|
|
|
|
:Author:
|
|
Roman Krivonos <krivonos@cosmos.ru>
|
|
|
|
"""
|
|
|
|
if not user.is_authenticated:
|
|
return 1
|
|
try:
|
|
user_profile=user.profile
|
|
except:
|
|
return 2
|
|
return 0
|
|
|
|
def check_sources_transient(srclistquery):
|
|
"""
|
|
check whether the objects in the provided queryset have fluxes 10 times different from their heasarc counterparts
|
|
|
|
Parameters
|
|
----------
|
|
srclistquery: queryset
|
|
a quueryset of the erosita sources which have ML_FLUX_0 field, corresponding to 0.5--2 keV band
|
|
|
|
|
|
:Author:
|
|
Semena Andrey <san@iki.rssi.ru>
|
|
"""
|
|
srctoupdate = []
|
|
for src in srclistquery.prefetch_related("heasarc"):
|
|
check_transient.check_rass_transient(src)
|
|
if src.heasarc.filter(polymorphic_ctype__in=[check_transient.polctypes[cat] for cat in ["sxps2", "3xmm", "csc2"]]).exists():
|
|
src.transient = True
|
|
check_transient.check_csc_transients(src)
|
|
check_transient.check_sxps_transients(src)
|
|
check_transient.check_xmm3_transients(src)
|
|
if src.transient or src.rosat_transient:
|
|
srctoupdate.append(src)
|
|
EroTransSource.objects.bulk_update(srctoupdate, ["transient", "rosat_transient"])
|
|
|
|
def check_catsources_transient(cat):
|
|
check_sources_transient(cat.erotranssource_set.all())
|
|
cat.ntransients = cat.erotranssource_set.filter(Q(transient=True) | Q(rosat_transient=True)).count()
|
|
cat.save()
|
|
|
|
|
|
def find_counterparts(srcquery, counterpatscat, counterpartname, neighsearch=1, verbose=False, **kwargs):
|
|
"""
|
|
for all sources in srcquery, which is assumed to have many-to-many relationtip to counterpatscat catalog,
|
|
performs search of sources in counterpart catalog within max(minrad[=30], 3*sqrt(src.poserr**2 + counterpart.poserr**2))
|
|
|
|
|
|
Parameters:
|
|
----------
|
|
srcquery: django.srcquery - iterable providing access to catalog under consideration
|
|
|
|
counterpartscat: catalog to search counterparts in
|
|
counterpartname: name of the many-to-many relationtip field in srcquery
|
|
**kwargs: additional arguments provided to makeposerrdist functions (see doc within)
|
|
"""
|
|
start_time = time.time()
|
|
|
|
scat = read_frame(srcquery)
|
|
try:
|
|
scat["error_radius"] = scat.radec_error
|
|
except AttributeError:
|
|
scat["error_radius"] = scat.error_radius
|
|
|
|
uhealpix = np.unique(scat.healpix.values)
|
|
for v in range(neighsearch):
|
|
uhealpix = np.unique(np.concatenate([uhealpix, neighbours(uhealpix, NSIDE_SOURCES, order=ORDER).ravel()]))
|
|
|
|
if (verbose):
|
|
print("--- find healpix in {} seconds ---".format(time.time() - start_time))
|
|
ts = time.time()
|
|
|
|
ccat = read_frame(counterpatscat.filter(healpix__in=uhealpix))
|
|
print(scat.index.size, ccat.index.size)
|
|
if 'error_radius' not in ccat.columns.values:
|
|
ccat['error_radius'] = np.zeros(ccat.index.size)
|
|
|
|
if (verbose):
|
|
print("--- select healpix in {} seconds ---".format(time.time() - ts))
|
|
ts = time.time()
|
|
|
|
if scat.index.size == 0 or ccat.index.size == 0:
|
|
return 0
|
|
ccat.error_radius.fillna(0., inplace=True)
|
|
dmat = correlate_utils.makeposerrdist(scat, ccat, **kwargs)
|
|
print("detected nonzero connections", dmat.nnz)
|
|
|
|
if (verbose):
|
|
print("--- nonzero connections in {} seconds ---".format(time.time() - ts))
|
|
ts = time.time()
|
|
|
|
uidx, ucts = np.unique(dmat.row, return_counts=True)
|
|
print("unique detections", uidx.size)
|
|
cs = np.empty(ucts.size + 1, np.int)
|
|
cs[0] = 0
|
|
cs[1:] = ucts.cumsum()
|
|
ce = np.lib.stride_tricks.as_strided(cs, (cs.size - 1, 2), cs.strides*2)
|
|
|
|
for i, uid in enumerate(uidx):
|
|
src = srcquery.get(id=scat.iloc[uid].id) #[0]
|
|
getattr(src, counterpartname).add(*ccat.iloc[dmat.col[ce[i, 0]: ce[i, 1]]].id.values)
|
|
src.save()
|
|
|
|
if (verbose):
|
|
print("--- main search in {} seconds ---".format(time.time() - ts))
|
|
ts = time.time()
|
|
|
|
if (verbose):
|
|
print("--- %s seconds ---" % (time.time() - start_time))
|
|
|
|
def find_erosita_gaia_counterparts(erosources):
|
|
"""
|
|
for provided erosita sources catalog makes connections to gaia catalog sources within 3 pos errors, but not less then 5''
|
|
"""
|
|
find_counterparts(erosources, GAIADR2.objects, "gaia", minrad=3, maxdist=20.)
|
|
|
|
|
|
def find_heasarc_counterparts(cat):
|
|
"""
|
|
searches the heasarc counterparts for the source in provided catalogue
|
|
|
|
|
|
Parameters
|
|
----------
|
|
cat : an instance of django.models.Models which provides the catalogue, containing a set of sources (which can be accessed with erotranssource_set)
|
|
it is assumed that the sources, attached to the catalogue have a field of type manytomany, which can connect then to the heasarc sources load in the system
|
|
|
|
Returns
|
|
-------
|
|
status : int
|
|
|
|
|
|
:Author:
|
|
Andrey Semena <san@iki.rssi.ru>
|
|
"""
|
|
|
|
srcs = cat.source_set.all()
|
|
res = find_heasarc_counterparts_for_srcs(srcs)
|
|
cat.nmatched = cat.source_set.exclude(heasarc=None).count()
|
|
cat.save()
|
|
return res
|
|
|
|
def find_heasarc_counterparts_for_srcs(srcs):
|
|
"""
|
|
searches the heasarc counterparts for the provided sources
|
|
|
|
|
|
Parameters
|
|
----------
|
|
srcs : an instance of django.models.Models which provides the sources of EroTransSource type
|
|
it is assumed that the sources, attached to the catalogue have a field of type manytomany, which can connect then to the heasarc sources load in the system
|
|
|
|
Returns
|
|
-------
|
|
status : int
|
|
|
|
|
|
:Author:
|
|
Andrey Semena <san@iki.rssi.ru>
|
|
"""
|
|
|
|
|
|
for src in srcs:
|
|
src.heasarc.clear()
|
|
|
|
ecat = read_frame(srcs)
|
|
ecat["error_radius"] = ecat.radec_error
|
|
uhealpix = np.unique(ecat.healpix_plate.values)
|
|
allhealpix = np.unique(np.concatenate([neighbours(uhealpix, NSIDE_PLATES, order=ORDER).ravel(), uhealpix]))
|
|
heasarccats = HeasarcBase.objects.filter(healpix_plate__in=allhealpix)
|
|
if not heasarccats.exists():
|
|
print('Done')
|
|
return None
|
|
|
|
ccat = read_frame(heasarccats) #ccat = crosss catalogues
|
|
print("preliminary healpix match", ccat.index.size)
|
|
|
|
dmat = correlate_utils.makeposerrdist(ecat, ccat)
|
|
heasarccats = heasarccats.filter(id__in=np.unique(dmat.col))
|
|
print("detected nonzero connections", dmat.nnz)
|
|
uidx, ucts = np.unique(dmat.row, return_counts=True)
|
|
print("unique erosita detections", uidx.size)
|
|
cs = np.empty(ucts.size + 1, np.int)
|
|
cs[0] = 0
|
|
cs[1:] = ucts.cumsum()
|
|
ce = np.lib.stride_tricks.as_strided(cs, (cs.size - 1, 2), cs.strides*2)
|
|
|
|
for i, uid in enumerate(uidx):
|
|
src = srcs.get(id=ecat.iloc[uid].id) #[0]
|
|
src.heasarc.add(*ccat.iloc[dmat.col[ce[i, 0]: ce[i, 1]]].id.values) #*list(heasarccats.filter(id__in=ccat.iloc[dmat.col[ce[i, 0]: ce[i, 1]]].id.values)))
|
|
print(i, "has matches", ce[i, 1] - ce[i, 0])
|
|
src.save()
|
|
return 1
|
|
|
|
|
|
def correlate_erotrans_with_green(srcs):
|
|
"""
|
|
for erotrans sources search SNR remnants nearby
|
|
"""
|
|
ecat = read_frame(srcs)
|
|
ecat["error_radius"] = ecat.radec_error
|
|
|
|
green = read_frame(GreenSNR.objects.all())
|
|
dmatsnr = coo_matrix(correlate_utils.makedistmat3d(ecat, green, 600))
|
|
mask = dmatsnr.data < green.iloc[dmatsnr.col].MajDiam.values*60.
|
|
rows, cols = dmatsnr.row[mask], dmatsnr.col[mask]
|
|
urow, uii = np.unique(rows, return_index=True)
|
|
ucols = cols[uii]
|
|
|
|
for i, esrcid in enumerate(ecat.iloc[urow].id.values):
|
|
src = srcs.get(id=esrcid)
|
|
src.green_snr_id = green.iloc[ucols[i]].id
|
|
src.save()
|
|
|
|
|
|
def load_erotrans_table(filename):
|
|
""" Loads catalog of eRosita sources to database.
|
|
|
|
This procedure accepts filename in format 'catalog_OBSID.pkl',
|
|
where OBSID is 11-digit identificator. In case this OBSID is
|
|
already loaded to DB, the procedure exits. Otherwise,
|
|
catalog is loaded to DB.
|
|
|
|
Parameters
|
|
----------
|
|
filename : str
|
|
Absolute path to the input catalog file
|
|
|
|
Returns
|
|
-------
|
|
status : int
|
|
Status code {0:'Already in DB', ID:'Object ID or Primary key in DB'}
|
|
|
|
|
|
:Author:
|
|
Andrey Semena <san@cosmos.ru>
|
|
|
|
"""
|
|
#metadata = json.load(open(filename.split(".")[0] + ".json"))
|
|
|
|
obsid = re.findall("\d+", filename)[0]
|
|
|
|
logging.debug("Loading %s, ObsID: %s" % (filename,obsid,))
|
|
|
|
check_repeat = EroTrans.objects.filter(eroday_start=int(obsid[:5])).first()
|
|
if check_repeat and check_repeat.obsid >= obsid:
|
|
print("obsid exists")
|
|
logging.info("ObsID %s is already loaded, skip" % obsid)
|
|
return 0
|
|
|
|
logging.info("Loading ObsID %s" % obsid)
|
|
|
|
hp = HEALPix(nside=NSIDE_SOURCES,
|
|
order=ORDER, frame=FK5())
|
|
|
|
hp_plate = HEALPix(nside=NSIDE_PLATES,
|
|
order=ORDER_PLATES,
|
|
frame=FK5())
|
|
|
|
|
|
datafile=filename
|
|
|
|
eroday_start = int(obsid[0:5])
|
|
eroday_stop = int(obsid[5:10])
|
|
ts_start = eroday_start*14400
|
|
ts_stop = eroday_stop*14400
|
|
start = mission2date(ts_start)
|
|
stop = mission2date(ts_stop)
|
|
dtime_start = Time(start , format='datetime')
|
|
dtime_stop = Time(stop , format='datetime')
|
|
|
|
survey = EroTrans(obsid=obsid,
|
|
eroday_start=eroday_start,
|
|
eroday_stop=eroday_stop,
|
|
start=start,
|
|
stop=stop,
|
|
mjd_start=dtime_start.mjd,
|
|
mjd_stop=dtime_stop.mjd)
|
|
|
|
|
|
survey.save()
|
|
if check_repeat and check_repeat.obsid < obsid:
|
|
print("removing older version of the catalog")
|
|
logging.info("remove older version of the catalog: %s" % obsid)
|
|
check_repeat.delete()
|
|
|
|
df=pd.read_pickle(datafile)
|
|
df.drop(columns=["tilenum", 'srcname', 'hpidx', "TSTART", "ID_SRC",
|
|
"TSTOP", "DIST_NN", "ID_CLUSTER", 'SRCDENS'], inplace=True, errors="ignore")
|
|
df.rename(columns={"RA":"ra", "DEC":"dec", "RADEC_ERR": "radec_error",
|
|
"srcID": "srcid", "ML_EXP_1":'APE_EXP_1'}, inplace=True)
|
|
df["flux"] = df.ML_FLUX_0.values*0.8911e-12
|
|
sc = SkyCoord(df.ra.values, df.dec.values, unit="deg", frame="fk5")
|
|
df["lii"] = sc.galactic.l.value
|
|
df["bii"] = sc.galactic.b.value
|
|
print(df.index.size)
|
|
sc = sc[df.lii.values < 180.]
|
|
df = df[df.lii.values < 180.]
|
|
print(df.index.size)
|
|
df["healpix"] = hp.skycoord_to_healpix(sc)
|
|
df["name"] = [make_source_name("IKIe", row.ra, row.dec) for row in df[["ra", "dec"]].itertuples()]
|
|
df["healpix_plate"] = hp_plate.skycoord_to_healpix(sc)
|
|
|
|
srclist = [EroTransSource(catalog=survey, **src) for idx, src in df.iterrows()]
|
|
EroTransSource.objects.bulk_create(srclist, ignore_conflicts=True)
|
|
|
|
survey.nrows = df.index.size
|
|
survey.source_set = survey.erotranssource_set
|
|
find_counterparts(survey.erotranssource_set.all(), HeasarcBase.objects.all(), "heasarc", minrad=20)
|
|
find_counterparts(survey.erotranssource_set.all(), Flesch.objects.all(), "flesch", minrad=3, maxdist=20.)
|
|
find_counterparts(survey.erotranssource_set.all(), SDSSDR12Spec.objects.all(), "sdss_spec", minrad=3, maxdist=20.)
|
|
find_counterparts(survey.erotranssource_set.all(), GAIADR2.objects.all(), "gaia", minrad=3, maxdist=20.)
|
|
correlate_erotrans_with_green(survey.erotranssource_set.all())
|
|
survey.nmatched = survey.erotranssource_set.exclude(heasarc=None).count()
|
|
check_catsources_transient(survey)
|
|
survey.ntransients = survey.erotranssource_set.filter(Q(transient=True) | Q(rosat_transient=True)).count()
|
|
survey.save()
|
|
load_simbad_sources(survey.erotranssource_set.all())
|
|
return survey.pk
|
|
|
|
|
|
|
|
def load_erotrans_table_test(filename):
|
|
""" Test
|
|
|
|
Parameters
|
|
----------
|
|
filename : str
|
|
Absolute path to the input catalog file
|
|
|
|
|
|
:Author:
|
|
Roman Krivonos <krivonos@cosmos.ru>
|
|
|
|
"""
|
|
obsid = re.findall("\d+", filename)[0]
|
|
|
|
#logging.warning("Loading %s, ObsID: %s" % (filename,obsid,))
|
|
|
|
if EroTrans.objects.filter(obsid=obsid).exists():
|
|
logging.warning("ObsID %s is already loaded" % obsid)
|
|
return None
|
|
logging.warning("ObsID %s loading to DB" % obsid)
|
|
|
|
|
|
|
|
|
|
def quat_to_pol_and_roll(qfin):
|
|
"""
|
|
it is assumed that quaternion is acting on the sattelite coordinate system
|
|
in order to orient in in icrs coordinates
|
|
opaxis - define dirrection of the main axis (x axis [1, 0, 0] coinside with optical axis)
|
|
we assume that north is oriented along z coordinate, we also name this coordinate
|
|
north for detectors
|
|
"""
|
|
opaxis=[1, 0, 0]
|
|
north=[0, 0, 1]
|
|
opticaxis = qfin.apply(opaxis)
|
|
print(opticaxis)
|
|
dec = np.arctan(opticaxis[2]/np.sqrt(opticaxis[1]**2 + opticaxis[0]**2))
|
|
ra = np.arctan2(opticaxis[1], opticaxis[0])%(2.*pi)
|
|
print(ra/pi*180,dec/pi*180)
|
|
return
|
|
yzprojection = np.cross(opticaxis, north)
|
|
vort = np.cross(north, opaxis)
|
|
|
|
rollangle = np.arctan2(np.sum(yzprojection*qfin.apply(vort), axis=1),
|
|
np.sum(yzprojection*qfin.apply(north), axis=1))
|
|
print(ra, dec, rollangle)
|
|
return ra, dec, rollangle
|
|
|
|
def slerp(v0, v1, t_array):
|
|
"""Spherical linear interpolation."""
|
|
# >>> slerp([1,0,0,0], [0,0,0,1], np.arange(0, 1, 0.001))
|
|
t_array = np.array(t_array)
|
|
v0 = np.array(v0)
|
|
v1 = np.array(v1)
|
|
dot = np.sum(v0 * v1)
|
|
|
|
if dot < 0.0:
|
|
v1 = -v1
|
|
dot = -dot
|
|
|
|
DOT_THRESHOLD = 0.9995
|
|
if dot > DOT_THRESHOLD:
|
|
result = v0[np.newaxis,:] + t_array[:,np.newaxis] * (v1 - v0)[np.newaxis,:]
|
|
return (result.T / np.linalg.norm(result, axis=1)).T
|
|
|
|
theta_0 = np.arccos(dot)
|
|
sin_theta_0 = np.sin(theta_0)
|
|
|
|
theta = theta_0 * t_array
|
|
sin_theta = np.sin(theta)
|
|
|
|
s0 = np.cos(theta) - dot * sin_theta / sin_theta_0
|
|
s1 = sin_theta / sin_theta_0
|
|
return (s0[:,np.newaxis] * v0[np.newaxis,:]) + (s1[:,np.newaxis] * v1[np.newaxis,:])
|
|
|
|
def load_srg_data_dumps(dir):
|
|
""" Loads data dumps stored in plates
|
|
|
|
The format of the data damp directoery is srg_20191228_143827_000
|
|
|
|
|
|
Parameters
|
|
----------
|
|
dir : str
|
|
Absolute path to the input directory
|
|
|
|
|
|
:Author:
|
|
Roman Krivonos <krivonos@cosmos.ru>
|
|
"""
|
|
logging.info("Loading data dumps from %s" % dir)
|
|
format_string='srg_%Y%m%d_%H%M%S'
|
|
for path in glob.glob("%s/??????" % (dir)):
|
|
plate = os.path.basename(path)
|
|
|
|
try:
|
|
skymap = SkyMaps.objects.get(SMAPNR=int(plate))
|
|
except:
|
|
logging.error("SkyMap %s is not found" % plate)
|
|
pass
|
|
for l3 in glob.glob("%s/L3/srg_????????_??????_000" % path):
|
|
name = os.path.basename(l3)
|
|
try:
|
|
dump = SrgDataDump.objects.get(name=name)
|
|
#for s in dump.skymap.all():
|
|
#print("{} --> {}".format(dump,s))
|
|
queryset = skymap.srgdatadump_set.all()
|
|
if not queryset.filter(pk=dump.pk).exists():
|
|
print("Adding {} to {}".format(skymap,dump))
|
|
dump.skymap.add(skymap)
|
|
dump.save()
|
|
continue
|
|
except SrgDataDump.DoesNotExist:
|
|
pass
|
|
|
|
logging.info("Creating {} in SkyMap {}".format(name, skymap))
|
|
|
|
dtime = datetime.strptime(name[:-4], format_string)
|
|
tm = Time(dtime, format='datetime', scale='utc')
|
|
dt = tm.to_datetime(timezone=TZ_MSK)
|
|
|
|
dump = SrgDataDump(name=name, date=dt, version=int(name[-3:]))
|
|
dump.catalog_loaded=False
|
|
dump.save()
|
|
dump.skymap.add(skymap)
|
|
dump.save()
|
|
|
|
|
|
def load_skymap_sources_file(filename):
|
|
""" Loads sources stored in plates
|
|
|
|
The format of the data damp directory is srg_20191228_143827_000
|
|
|
|
|
|
Parameters
|
|
----------
|
|
filename : str
|
|
Absolute path to the file
|
|
|
|
|
|
:Author:
|
|
Roman Krivonos <krivonos@cosmos.ru>
|
|
"""
|
|
|
|
if not os.path.isfile(filename):
|
|
print("File {} does not exist".format(filename))
|
|
return
|
|
|
|
logging.debug("Loading sources from {}".format(filename))
|
|
|
|
name = os.path.basename(filename)
|
|
fits_name = os.path.splitext(name)[0]
|
|
""" without .gz """
|
|
dump_name = os.path.splitext(fits_name)[0]
|
|
|
|
""" Check whether this data dump exists """
|
|
try:
|
|
dump = SrgDataDump.objects.get(name=dump_name)
|
|
if(dump.catalog_loaded):
|
|
logging.debug("Source catalog for {} is loaded, skip".format(dump))
|
|
print("Source catalog for {} is loaded, skip".format(dump))
|
|
return
|
|
else:
|
|
logging.debug("Clear source catalog for {}".format(dump))
|
|
srcs = dump.skymapsource_set.all()
|
|
srcs.delete()
|
|
except SrgDataDump.DoesNotExist:
|
|
logging.debug("Create new data dump {}".format(dump_name))
|
|
format_string='srg_%Y%m%d_%H%M%S'
|
|
dtime = datetime.strptime(dump_name[:-4], format_string)
|
|
tm = Time(dtime, format='datetime', scale='utc')
|
|
dt = tm.to_datetime(timezone=TZ_MSK)
|
|
dump = SrgDataDump(name=dump_name, date=dt)
|
|
dump.save()
|
|
|
|
logging.debug("Read source list from {} and attach it to {}".format(filename,dump))
|
|
print("Read source list from {} and attach it to {}".format(filename,dump))
|
|
hp = HEALPix(nside=NSIDE_SOURCES, order=ORDER, frame=FK5())
|
|
hp_plate = HEALPix(nside=NSIDE_PLATES,
|
|
order=ORDER_PLATES,
|
|
frame=FK5())
|
|
|
|
|
|
ext=1
|
|
hdul = fits.open(filename) # open a FITS file
|
|
hdr = hdul[ext].header
|
|
data = hdul[ext].data
|
|
|
|
for item in data:
|
|
try:
|
|
plate=int(item['FIELD'])
|
|
skymap = SkyMaps.objects.get(SMAPNR=plate)
|
|
except:
|
|
logging.info("SkyMap {} is not found".format(plate))
|
|
pass
|
|
ra=float(item['RA'])
|
|
dec=float(item['DEC'])
|
|
crd = SkyCoord(ra, dec, frame=FK5(), unit="deg")
|
|
healpix = hp.skycoord_to_healpix(crd)
|
|
healpix_plate = hp_plate.skycoord_to_healpix(crd)
|
|
src=SkyMapSource(skymap=skymap,dump=dump,healpix=healpix,
|
|
healpix_plate=healpix_plate,
|
|
name=make_source_name('SRGA',ra,dec),
|
|
name_orig=item['NAME'],
|
|
lii=crd.galactic.l.value,
|
|
bii=crd.galactic.b.value,
|
|
ra=ra,dec=dec,
|
|
x = item['x'],
|
|
y = item['y'],
|
|
cnts = item['cnts'],
|
|
cnts_err = item['cnts_err'],
|
|
cnts_bg = item['cnts_bg'],
|
|
exptime = item['exptime'],
|
|
rate = item['rate'],
|
|
rate_err = item['rate_err'],
|
|
flux = item['flux'],
|
|
flux_err = item['flux_err'],
|
|
sig = item['sig'],
|
|
nfalse = item['nfalse'],)
|
|
src.save()
|
|
|
|
dump.catalog_loaded=True
|
|
dump.save()
|
|
|
|
srcs = dump.skymapsource_set.all()#.order_by('-rate')
|
|
|
|
#clean_skymap_sources(srcs)
|
|
find_counterparts(srcs, HeasarcBase.objects.all(),'heasarc',minrad=40)
|
|
load_simbad_for_skymap_sources(srcs,minrad=40,maxdist=120)
|
|
#find_counterparts(srcs, GAIADR2.objects.all(), "gaia", maxdist=60., minrad=30.)
|
|
#mark_new_skymap_sources_in_latest()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def load_skymap_sources_for_artsurvey(filename,name,force=False):
|
|
""" Loads sources stored in plates
|
|
|
|
The format of the data damp directory is srg_20191228_143827_000
|
|
|
|
|
|
Parameters
|
|
----------
|
|
filename : str
|
|
Absolute path to the file
|
|
|
|
|
|
:Author:
|
|
Roman Krivonos <krivonos@cosmos.ru>
|
|
"""
|
|
|
|
if not os.path.isfile(filename):
|
|
print("File {} does not exist".format(filename))
|
|
return
|
|
|
|
|
|
logging.debug("Loading sources from {}".format(filename))
|
|
print("Loading sources from {}".format(filename))
|
|
|
|
|
|
#name = os.path.basename(filename)
|
|
#fits_name = os.path.splitext(name)[0]
|
|
#""" without .gz """
|
|
#dump_name = os.path.splitext(fits_name)[0]
|
|
|
|
print(name)
|
|
|
|
|
|
""" Check whether this data dump exists """
|
|
try:
|
|
dump = SrgDataDump.objects.get(name=name)
|
|
if(force):
|
|
dump.catalog_loaded=False
|
|
dump.save()
|
|
if(dump.catalog_loaded):
|
|
print("Source catalog for {} is loaded, skip".format(dump))
|
|
return
|
|
else:
|
|
print("Clear source catalog for {}".format(dump))
|
|
srcs = dump.skymapsource_set.all()
|
|
srcs.delete()
|
|
except SrgDataDump.DoesNotExist:
|
|
print("Create new data dump {}".format(name))
|
|
dt=localtime()
|
|
dump = SrgDataDump(name=name, date=dt)
|
|
dump.save()
|
|
|
|
|
|
print("Read source list from {} and attach it to {}".format(filename,dump))
|
|
hp = HEALPix(nside=NSIDE_SOURCES, order=ORDER, frame=FK5())
|
|
hp_plate = HEALPix(nside=NSIDE_PLATES,
|
|
order=ORDER_PLATES,
|
|
frame=FK5())
|
|
|
|
|
|
ext=1
|
|
hdul = fits.open(filename) # open a FITS file
|
|
hdr = hdul[ext].header
|
|
data = hdul[ext].data
|
|
|
|
|
|
for item in hdul[ext].columns:
|
|
print(item)
|
|
|
|
|
|
|
|
for item in data:
|
|
try:
|
|
plate=int(item['FIELD'])
|
|
skymap = SkyMaps.objects.get(SMAPNR=plate)
|
|
except:
|
|
logging.info("SkyMap plate is not found")
|
|
plate=0
|
|
skymap=None
|
|
pass
|
|
|
|
ra=float(item['RA'])
|
|
dec=float(item['DEC'])
|
|
crd = SkyCoord(ra, dec, frame=FK5(), unit="deg")
|
|
healpix = hp.skycoord_to_healpix(crd)
|
|
healpix_plate = hp_plate.skycoord_to_healpix(crd)
|
|
|
|
""" Find nearest SkyMap if FIELD is not present """
|
|
if not (skymap):
|
|
try:
|
|
my_hp_plate = SurveyHealpixPlate.objects.get(healpix=healpix_plate)
|
|
skymaps = my_hp_plate.skymaps_set.all()
|
|
sep_min=10000.0
|
|
for smap in skymaps:
|
|
smap_crd = SkyCoord(smap.ra, smap.dec, frame=FK5(), unit="deg")
|
|
sep=crd.separation(smap_crd).arcmin
|
|
if(sep < sep_min):
|
|
sep_min = sep
|
|
skymap = smap
|
|
except:
|
|
pass
|
|
|
|
print("Add {}".format(make_source_name('SRGA',ra,dec)))
|
|
src=SkyMapSource(skymap=skymap,dump=dump,healpix=healpix,
|
|
healpix_plate=healpix_plate,
|
|
name=make_source_name('SRGA',ra,dec),
|
|
name_orig=item['NAME'])
|
|
src.lii=crd.galactic.l.value
|
|
src.bii=crd.galactic.b.value
|
|
src.ra=ra
|
|
src.dec=dec
|
|
src.x = item['x']
|
|
src.y = item['y']
|
|
|
|
src.cnts = item['ml_cnts']
|
|
|
|
try:
|
|
src.cnts_err = item['ml_cnts_err']
|
|
except KeyError:
|
|
src.cnts_err = None
|
|
try:
|
|
src.cnts_bg = item['cnts_bg']
|
|
except KeyError:
|
|
src.cnts_bg = None
|
|
try:
|
|
src.cnts_tot = item['cnts_tot']
|
|
except KeyError:
|
|
src.cnts_tot = None
|
|
src.exptime = item['exptime']
|
|
try:
|
|
src.ext = int(item['ext'])
|
|
except KeyError:
|
|
src.ext = None
|
|
try:
|
|
src.ext_id=item['ext_id']
|
|
except KeyError:
|
|
src.ext_id = None
|
|
try:
|
|
src.rate = item['rate']
|
|
except KeyError:
|
|
src.rate = None
|
|
try:
|
|
src.rate_err = item['rateerr_up']
|
|
except KeyError:
|
|
src.rate_err = None
|
|
src.flux = item['flux']
|
|
try:
|
|
src.flux_err = item['fluxerr_up']# obsolete
|
|
except KeyError:
|
|
src.flux_err = None
|
|
try:
|
|
src.fluxerr_lo = item['fluxerr_lo']
|
|
except KeyError:
|
|
src.fluxerr_lo = None
|
|
try:
|
|
src.fluxerr_up = item['fluxerr_up']
|
|
except KeyError:
|
|
src.fluxerr_up = None
|
|
try:
|
|
src.flux_uplim = item['flux_uplim']
|
|
except KeyError:
|
|
src.flux_uplim = None
|
|
|
|
try:
|
|
src.cnts0 = item['cnts0']
|
|
except KeyError:
|
|
src.cnts0 =None
|
|
try:
|
|
src.cnts_er0 = item['cntserr0_up']# obsolete
|
|
except KeyError:
|
|
src.cnts_er0 =None
|
|
try:
|
|
src.cnts_bg0 = item['cnts_bg0']
|
|
except KeyError:
|
|
src.cnts_bg0 = None
|
|
try:
|
|
src.cnts_t0 = item['cnts_t0']
|
|
except KeyError:
|
|
src.cnts_t0 = None
|
|
try:
|
|
src.exptime0 = item['exptime0']
|
|
except KeyError:
|
|
src.exptime0 =None
|
|
try:
|
|
src.flux0 = item['flux0']
|
|
except KeyError:
|
|
src.flux0 = None
|
|
try:
|
|
src.flux_er0 = item['fluxerr0_up']
|
|
except KeyError:
|
|
src.flux_er0 =None
|
|
|
|
try:
|
|
src.cnts1 = item['cnts1']
|
|
except KeyError:
|
|
src.cnts1 = None
|
|
try:
|
|
src.cnts_er1 = item['cntserr1_up']# obsolete
|
|
except KeyError:
|
|
src.cnts_er1 =None
|
|
try:
|
|
src.cnts_bg1 = item['cnts_bg1']
|
|
except KeyError:
|
|
src.cnts_bg1 =None
|
|
try:
|
|
src.cnts_t1 = item['cnts_t1']
|
|
except KeyError:
|
|
src.cnts_t1 =None
|
|
try:
|
|
src.exptime1 = item['exptime1']
|
|
except KeyError:
|
|
src.exptime1 =None
|
|
try:
|
|
src.flux1 = item['flux1']
|
|
except KeyError:
|
|
src.flux1 = None
|
|
try:
|
|
src.flux_er1 = item['fluxerr1_up']
|
|
except KeyError:
|
|
src.flux_er1 = None
|
|
|
|
try:
|
|
src.cnts2 = item['cnts2']
|
|
except KeyError:
|
|
src.cnts2 = None
|
|
try:
|
|
src.cnts_er2 = item['cntserr2_up']
|
|
except KeyError:
|
|
src.cnts_er2 = None
|
|
try:
|
|
src.cnts_bg2 = item['cnts_bg2']
|
|
except KeyError:
|
|
src.cnts_bg2 =None
|
|
try:
|
|
src.cnts_t2 = item['cnts_t2']
|
|
except KeyError:
|
|
src.cnts_t2 =None
|
|
try:
|
|
src.exptime2 = item['exptime2']
|
|
except KeyError:
|
|
src.exptime2 =None
|
|
try:
|
|
src.flux2 = item['flux2']
|
|
except KeyError:
|
|
src.flux2 =None
|
|
try:
|
|
src.flux_er2 = item['fluxerr2_up']
|
|
except KeyError:
|
|
src.flux_er2 = None
|
|
|
|
try:
|
|
src.cnts3 = item['cnts3']
|
|
except KeyError:
|
|
src.cnts3 =None
|
|
try:
|
|
src.cnts_er3 = item['cntserr3_up']
|
|
except KeyError:
|
|
src.cnts_er3 =None
|
|
try:
|
|
src.cnts_bg3 = item['cnts_bg3']
|
|
except KeyError:
|
|
src.cnts_bg3 =None
|
|
try:
|
|
src.cnts_t3 = item['cnts_t3']
|
|
except KeyError:
|
|
src.cnts_t3 =None
|
|
try:
|
|
src.exptime3 = item['exptime3']
|
|
except KeyError:
|
|
src.exptime3 =None
|
|
try:
|
|
src.flux3 = item['flux3']
|
|
except KeyError:
|
|
src.flux3 =None
|
|
try:
|
|
src.flux_er3 = item['fluxerr3_up']
|
|
except KeyError:
|
|
src.flux_er3 = None
|
|
|
|
try:
|
|
src.cnts4 = item['cnts4']
|
|
except KeyError:
|
|
src.cnts4 =None
|
|
try:
|
|
src.cnts_er4 = item['cntserr4_up']
|
|
except KeyError:
|
|
src.cnts_er4 =None
|
|
try:
|
|
src.cnts_bg4 = item['cnts_bg4']
|
|
except KeyError:
|
|
src.cnts_bg4 =None
|
|
try:
|
|
src.cnts_t4 = item['cnts_t4']
|
|
except KeyError:
|
|
src.cnts_t4 =None
|
|
try:
|
|
src.exptime4 = item['exptime4']
|
|
except KeyError:
|
|
src.exptime4 =None
|
|
try:
|
|
src.flux4 = item['flux4']
|
|
except KeyError:
|
|
src.flux4 = None
|
|
try:
|
|
src.flux_er4 = item['fluxerr4_up']
|
|
except KeyError:
|
|
src.flux_er4 = None
|
|
|
|
try:
|
|
src.sig = item['sig']
|
|
except KeyError:
|
|
src.sig =None
|
|
try:
|
|
src.detlike = float(item['detlike'])#/2.30259 Andy's catalog fix
|
|
except KeyError:
|
|
src.detlike =None
|
|
try:
|
|
src.nfalse = item['nfalse']
|
|
except KeyError:
|
|
src.nfalse = None
|
|
|
|
try:
|
|
src.ml_sig = item['ml_sig']
|
|
except KeyError:
|
|
src.ml_sig = None
|
|
try:
|
|
src.ml_detlike = item['ml_detlike']
|
|
except KeyError:
|
|
src.ml_detlike = None
|
|
try:
|
|
src.ml_nfalse = item['ml_nfalse']
|
|
except KeyError:
|
|
src.ml_nfalse = None
|
|
try:
|
|
src.ml_ra = item['ml_ra']
|
|
except KeyError:
|
|
src.ml_ra = None
|
|
try:
|
|
src.ml_dec = item['ml_dec']
|
|
except KeyError:
|
|
src.ml_dec = None
|
|
try:
|
|
src.ml_radec_err_90 = item['radec_err_90']
|
|
except KeyError:
|
|
src.ml_radec_err_90 = None
|
|
try:
|
|
src.ml_radec_err_98 = item['radec_err_98']
|
|
except KeyError:
|
|
src.ml_radec_err_98 = None
|
|
try:
|
|
src.ml_flux = item['ml_flux']
|
|
except KeyError:
|
|
src.ml_flux = None
|
|
try:
|
|
src.ml_flux_err = item['ml_flux_err']
|
|
except KeyError:
|
|
src.ml_flux_err = None
|
|
try:
|
|
src.ml_exp = item['ml_exp']
|
|
except KeyError:
|
|
src.ml_exp = None
|
|
|
|
src.save()
|
|
|
|
dump.catalog_loaded=True
|
|
dump.save()
|
|
|
|
srcs = dump.skymapsource_set.all()#.order_by('-rate')
|
|
print(srcs.count())
|
|
|
|
|
|
|
|
def update_skymap_sources_for_artsurvey(filename,name):
|
|
""" Update sources
|
|
|
|
The format of the data damp directory is srg_20191228_143827_000
|
|
|
|
|
|
Parameters
|
|
----------
|
|
filename : str
|
|
Absolute path to the file
|
|
|
|
|
|
:Author:
|
|
Roman Krivonos <krivonos@cosmos.ru>
|
|
"""
|
|
|
|
if not os.path.isfile(filename):
|
|
print("File {} does not exist".format(filename))
|
|
return
|
|
|
|
|
|
logging.debug("Loading sources from {}".format(filename))
|
|
print("Loading sources from {}".format(filename))
|
|
|
|
|
|
#name = os.path.basename(filename)
|
|
#fits_name = os.path.splitext(name)[0]
|
|
#""" without .gz """
|
|
#dump_name = os.path.splitext(fits_name)[0]
|
|
|
|
print(name)
|
|
|
|
|
|
""" Check whether this data dump exists """
|
|
try:
|
|
dump = SrgDataDump.objects.get(name=name)
|
|
print("Use {}".format(dump))
|
|
|
|
except SrgDataDump.DoesNotExist:
|
|
print("Not found {}".format(name))
|
|
|
|
print("Read source list from {} and attach it to {}".format(filename,dump))
|
|
|
|
ext=1
|
|
hdul = fits.open(filename) # open a FITS file
|
|
hdr = hdul[ext].header
|
|
data = hdul[ext].data
|
|
|
|
"""
|
|
for item in hdul[ext].columns:
|
|
print(item)
|
|
"""
|
|
|
|
for item in data:
|
|
try:
|
|
plate=int(item['FIELD'])
|
|
skymap = SkyMaps.objects.get(SMAPNR=plate)
|
|
except:
|
|
logging.info("SkyMap plate is not found")
|
|
plate=0
|
|
skymap=None
|
|
pass
|
|
|
|
""" find source by name_orig """
|
|
name_orig=item['NAME']
|
|
try:
|
|
src = SkyMapSource.objects.get(name_orig__exact=name_orig)
|
|
except:
|
|
continue
|
|
ext_id=item['EXT_ID']
|
|
if not ext_id == '':
|
|
src.ext_id=ext_id
|
|
src.save()
|
|
print("{} --> {}".format(src, ext_id))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def load_skymap_sources_erosita(filename):
|
|
""" Loads sources cross-matched with eRosita
|
|
|
|
Parameters
|
|
----------
|
|
filename : str
|
|
Absolute path to the file
|
|
|
|
|
|
:Author:
|
|
Roman Krivonos <krivonos@cosmos.ru>
|
|
"""
|
|
logger = logging.getLogger('django')
|
|
|
|
if not os.path.isfile(filename):
|
|
logger.error("File {} does not exist".format(filename))
|
|
return
|
|
|
|
logger.info("Loading sources from {}".format(filename))
|
|
|
|
|
|
|
|
name = os.path.basename(filename)
|
|
catalog_name = os.path.splitext(name)[0]
|
|
logger.info("Filename {} catalog {}".format(name, catalog_name))
|
|
|
|
|
|
""" Check whether this data dump exists """
|
|
try:
|
|
catalog = ArtCat.objects.get(name=catalog_name)
|
|
logger.info("Catalog {} is found".format(catalog))
|
|
catalog.delete()
|
|
except ArtCat.DoesNotExist:
|
|
pass
|
|
|
|
logger.info("Create catalog {}".format(catalog_name))
|
|
|
|
format_string='srg_%Y%m%d_%H%M%S'
|
|
dtime = datetime.strptime(catalog_name[:-4], format_string)
|
|
tm = Time(dtime, format='datetime', scale='utc')
|
|
dt = tm.to_datetime(timezone=TZ_MSK)
|
|
|
|
catalog = ArtCat(name=catalog_name)
|
|
catalog.save()
|
|
|
|
data = astropy.table.Table.read(filename,format='ascii.csv')
|
|
print(data.info)
|
|
|
|
hp = HEALPix(nside=NSIDE_SOURCES, order=ORDER, frame=FK5())
|
|
hp_plate = HEALPix(nside=NSIDE_PLATES,
|
|
order=ORDER_PLATES,
|
|
frame=FK5())
|
|
|
|
for item in data:
|
|
|
|
try:
|
|
plate=int(item['FIELD'])
|
|
skymap = SkyMaps.objects.get(SMAPNR=plate)
|
|
except:
|
|
logging.info("SkyMap {} is not found".format(plate))
|
|
pass
|
|
ra=float(item['RA'])
|
|
dec=float(item['DEC'])
|
|
crd = SkyCoord(ra, dec, frame=FK5(), unit="deg")
|
|
healpix = hp.skycoord_to_healpix(crd)
|
|
healpix_plate = hp_plate.skycoord_to_healpix(crd)
|
|
src=SkyMapSource(skymap=skymap,catalog=catalog,healpix=healpix,
|
|
healpix_plate=healpix_plate,
|
|
name=make_source_name('SRGA',ra,dec),
|
|
name_orig=item['NAME'],
|
|
lii=crd.galactic.l.value,
|
|
bii=crd.galactic.b.value,
|
|
ra=ra,dec=dec,
|
|
x = item['X'],
|
|
y = item['Y'],
|
|
cnts = item['CNTS'],
|
|
cnts_err = item['CNTS_ERR'],
|
|
cnts_bg = item['CNTS_BG'],
|
|
exptime = item['EXPTIME'],
|
|
rate = item['RATE'],
|
|
rate_err = item['RATE_ERR'],
|
|
flux = item['FLUX'],
|
|
flux_err = item['FLUX_ERR'],
|
|
sig = item['SIG'],
|
|
nfalse = item['NFALSE'],)
|
|
src.save()
|
|
|
|
if not (item['s_match'] == 'True'):
|
|
continue
|
|
|
|
ero = eRositaMatch(source=src,
|
|
exp = item['exp'],
|
|
sep = item['s_sep'],
|
|
ra = item['s_ra'],
|
|
dec = item['s_dec'],
|
|
lkh = item['s_lkh'],
|
|
cts = item['s_cts'],
|
|
flux = item['s_fx'])
|
|
ero.save()
|
|
|
|
srcs = catalog.skymapsource_set.all()
|
|
##clean_skymap_sources(srcs)
|
|
find_counterparts(srcs, HeasarcBase.objects.all(),'heasarc',minrad=30)
|
|
load_simbad_sources(srcs,minrad=30,maxdist=120)
|
|
find_counterparts(srcs, GAIADR2.objects.all(), "gaia", maxdist=60., minrad=30.)
|
|
##mark_new_skymap_sources_in_latest()
|
|
|
|
def clean_skymap_sources(srcs, radius=36, threshold=10):
|
|
""" Marks sources as bad
|
|
|
|
|
|
Actually not used. TODO: preselection by healpix_plate
|
|
|
|
Parameters
|
|
----------
|
|
srcs :
|
|
Sorted list of sources
|
|
|
|
|
|
:Author:
|
|
Roman Krivonos <krivonos@cosmos.ru>
|
|
"""
|
|
logging.info("Cleaning skymap sources")
|
|
|
|
"""
|
|
for s in srcs:
|
|
s.bad=False
|
|
s.good = False
|
|
s.save()
|
|
return
|
|
"""
|
|
|
|
for s in srcs:
|
|
|
|
if(s.bad == True):
|
|
continue
|
|
crd = SkyCoord(s.ra, s.dec, frame="fk5", unit="deg")
|
|
count=0
|
|
for s2 in srcs:
|
|
if(s == s2 or s2.bad == True):
|
|
continue
|
|
|
|
crd2 = SkyCoord(s2.ra, s2.dec, frame="fk5", unit="deg")
|
|
s2.sep=crd.separation(crd2).arcmin
|
|
if(s2.sep <= radius):
|
|
count=count+1
|
|
if(count >= threshold):
|
|
for s2 in srcs:
|
|
if(s == s2):
|
|
continue
|
|
if(s2.sep <= radius and s2.good != True):
|
|
s2.bad=True
|
|
s2.save()
|
|
s.good=True
|
|
s.save()
|
|
|
|
logging.debug("{}: Rate {} SkyMap: {} Dump: {} {}".format(s,s.rate,s.skymap,s.dump,count))
|
|
|
|
def load_skymap_sources_dir(dir):
|
|
""" Loads sources stored in plates
|
|
|
|
The format of the data dump directoery is srg_20191228_143827_000
|
|
|
|
|
|
Parameters
|
|
----------
|
|
dir : str
|
|
Absolute path to the input directory
|
|
|
|
|
|
:Author:
|
|
Roman Krivonos <krivonos@cosmos.ru>
|
|
"""
|
|
logging.info("Loading sources from %s" % dir)
|
|
files = glob.glob("{}/srg_????????_??????_000.fits.gz".format(dir))
|
|
files.sort(key=os.path.getmtime)
|
|
for filename in files:
|
|
print(filename)
|
|
load_skymap_sources_file(filename)
|
|
|
|
def update_all_skymap_sources_counterparts():
|
|
skymaps = SkyMaps.objects.all()
|
|
for cat in skymaps:
|
|
ntot = cat.skymapsource_set.count()
|
|
if (ntot):
|
|
find_counterparts(cat.skymapsource_set.all(), HeasarcBase.objects.all(),'heasarc',minrad=30)
|
|
load_simbad_sources(cat.skymapsource_set.all())
|
|
find_counterparts(cat.skymapsource_set.all(), GAIADR2.objects.all(), "gaia", maxdist=20., minrad=3.)
|
|
|
|
def update_allsky_missed():
|
|
""" Select sources missed in published all-sky surveys """
|
|
logging.info("Update All-sky missed")
|
|
missed = SelectAllskyMissed.objects.all()
|
|
missed.delete()
|
|
|
|
#dump = SrgDataDump.objects.all().order_by('-date')[0]
|
|
dump = SrgDataDump.objects.filter(catalog_loaded=True).latest('date')
|
|
srcs = dump.skymapsource_set.all()
|
|
for src in srcs:
|
|
if (src.heasarc.filter(heasarcswiftbat105m__isnull=False).count()):
|
|
continue
|
|
if (src.heasarc.filter(heasarcxteasscat__isnull=False).count()):
|
|
continue
|
|
if (src.heasarc.filter(heasarcintrefcat__isnull=False).count()):
|
|
continue
|
|
if (src.heasarc.filter(heasarcrass2rxs__isnull=False).count()):
|
|
continue
|
|
if (src.heasarc.filter(heasarcmaxigschgl__isnull=False).count()):
|
|
continue
|
|
if (src.heasarc.filter(heasarcxmmsl2__isnull=False).count()):
|
|
continue
|
|
if (src.heasarc.filter(heasarc3maxi__isnull=False).count()):
|
|
continue
|
|
obj = SelectAllskyMissed(dump=dump, source=src)
|
|
obj.save()
|
|
|
|
def mark_new_skymap_sources_in_latest(tolerance=10):
|
|
|
|
if(SrgDataDump.objects.all().count() < 10):
|
|
print('mark_new_skymap_sources_in_latest: Too few dumps, skip')
|
|
return
|
|
|
|
dumps = SrgDataDump.objects.all().order_by('-date')[:10]
|
|
latest=dumps[0]
|
|
for i in range(1,len(dumps)):
|
|
if(dumps[i].skymapsource_set.count() > 0):
|
|
second = dumps[i]
|
|
break
|
|
|
|
try:
|
|
second
|
|
except NameError:
|
|
second = None
|
|
|
|
if second is None:
|
|
return
|
|
|
|
skymaps_latest = latest.skymap.all()
|
|
sources_latest = latest.skymapsource_set.filter(skymap__in=skymaps_latest)
|
|
logging.debug("Latest {} nsrc={} in SkyMap {}".format(latest,latest.skymapsource_set.count(),sources_latest.count()))
|
|
|
|
sources_second = second.skymapsource_set.filter(skymap__in=skymaps_latest)
|
|
logging.debug("Second {} nsrc={} in SkyMap {}".format(second,second.skymapsource_set.count(),sources_second.count()))
|
|
|
|
for src1 in sources_latest:
|
|
crd1 = SkyCoord(src1.ra, src1.dec, frame="fk5", unit="deg")
|
|
matched = False
|
|
for src2 in sources_second:
|
|
sep=crd1.separation(SkyCoord(src2.ra, src2.dec, frame="fk5", unit="deg"))
|
|
if(sep.arcsecond < tolerance):
|
|
matched = True
|
|
break
|
|
if matched:
|
|
continue
|
|
logging.debug("{} new".format(src1))
|
|
src1.new=True
|
|
src1.save()
|
|
""" print some statistics """
|
|
logging.debug("Latest {}: {} marked as new within {} arcseconds".format(latest,sources_latest.filter(new=True).count(),tolerance))
|
|
|
|
def transfer_data(child,clean_ads=True,clean_oname=True):
|
|
|
|
if(child.survey.archived):
|
|
print("ERROR: Survey {} is archived ({})".format(child.survey,child))
|
|
return
|
|
|
|
parent=child.parent
|
|
|
|
print("--> Transfer data from parent {} to {}".format(parent, child))
|
|
|
|
""" Clean ADS and OtherName in child """
|
|
if(clean_ads):
|
|
child.ads.clear()
|
|
|
|
if(clean_oname):
|
|
for o in child.othername_set.all():
|
|
print("Delete {} in {}".format(o,child))
|
|
o.delete()
|
|
child.save()
|
|
|
|
|
|
""" fill child with data """
|
|
child.owner=parent.owner
|
|
child.notes=parent.notes
|
|
child.notes_paper=parent.notes_paper
|
|
child.follow_up=parent.follow_up
|
|
child.redshift=parent.redshift
|
|
child.class_id=parent.class_id
|
|
child.object_class=parent.object_class
|
|
child.cname=parent.cname
|
|
child.refid=parent.refid
|
|
child.erosita_data=parent.erosita_data
|
|
child.class_tentative=parent.class_tentative
|
|
child.category=parent.category
|
|
child.turkish=parent.turkish
|
|
child.turkish_date=parent.turkish_date
|
|
|
|
for upload in parent.uploads.all():
|
|
upload.artsources.add(child)
|
|
|
|
for a in parent.ads.all():
|
|
child.ads.add(a)
|
|
|
|
""" clone OtherName """
|
|
for o in parent.othername_set.all():
|
|
o.source=child
|
|
o.pk=None
|
|
o.save()
|
|
|
|
if(parent.allwise_primary):
|
|
child.allwise_primary=parent.allwise_primary
|
|
|
|
if(parent.gaia3_primary):
|
|
child.gaia3_primary=parent.gaia3_primary
|
|
|
|
if(parent.nvss_primary):
|
|
child.nvss_primary=parent.nvss_primary
|
|
|
|
if(parent.first_primary):
|
|
child.first_primary=parent.first_primary
|
|
|
|
child.save()
|
|
|
|
def transfer(srcs1,srcs2,maxdist=120.0,minrad=40.0):
|
|
match_count=0
|
|
hp = HEALPix(nside=NSIDE_SOURCES, order=ORDER, frame=FK5())
|
|
for s1 in srcs1:
|
|
crd1 = SkyCoord(s1.ra, s1.dec, frame="fk5", unit="deg")
|
|
|
|
if(s1.ext):
|
|
if(s1.ext > minrad):
|
|
radec_error=float(s1.ext)
|
|
else:
|
|
radec_error=minrad
|
|
else:
|
|
radec_error=minrad
|
|
|
|
heal = hp.cone_search_skycoord(crd1, radius=maxdist*u.arcsecond)
|
|
try:
|
|
sel2=srcs2.filter(healpix__in=heal)
|
|
except:
|
|
continue
|
|
|
|
tot=0
|
|
child=None
|
|
childs=[]
|
|
childs_sep=[]
|
|
for s2 in sel2:
|
|
crd2 = SkyCoord(s2.ra, s2.dec, frame="fk5", unit="deg")
|
|
sep=crd1.separation(crd2).arcsecond
|
|
if(sep <= radec_error):
|
|
#child=s2
|
|
childs.append(s2)
|
|
childs_sep.append(sep)
|
|
tot=tot+1
|
|
if(tot == 0):
|
|
continue
|
|
if(tot == 1):
|
|
child=childs[0]
|
|
if(tot > 1):
|
|
print("More than one child found for {}, please select:".format(s1))
|
|
|
|
for idx, child in enumerate(childs):
|
|
print("{} {} at {:.2f} arcsec".format(idx, childs[idx], childs_sep[idx]))
|
|
val = input("Enter your value: ")
|
|
print("You entered {}".format(int(val)))
|
|
child=childs[int(val)]
|
|
|
|
child.parent=s1
|
|
child.save()
|
|
print("Transfer data from {} to {}".format(s1,child))
|
|
transfer_data(child)
|
|
|
|
match_count=match_count+1
|
|
|
|
return(match_count)
|
|
|
|
|
|
def crossmatch_count(srcs1,srcs2,maxdist=120.0,minrad=40.0):
|
|
match_count=0
|
|
match1_count=0
|
|
match2_count=0
|
|
match3_count=0
|
|
match4_count=0
|
|
hp = HEALPix(nside=NSIDE_SOURCES, order=ORDER, frame=FK5())
|
|
for s1 in srcs1:
|
|
crd1 = SkyCoord(s1.ra, s1.dec, frame="fk5", unit="deg")
|
|
heal = hp.cone_search_skycoord(crd1, radius=maxdist*u.arcsecond)
|
|
try:
|
|
sel2=srcs2.filter(healpix__in=heal)
|
|
except:
|
|
continue
|
|
|
|
tot=0
|
|
for s2 in sel2:
|
|
crd2 = SkyCoord(s2.ra, s2.dec, frame="fk5", unit="deg")
|
|
sep=crd1.separation(crd2).arcsecond
|
|
#print("{:.1f}".format(sep))
|
|
if(sep <= minrad):
|
|
match_count=match_count+1
|
|
tot=tot+1
|
|
if(tot == 1):
|
|
match1_count=match1_count+1
|
|
if(tot == 2):
|
|
match2_count=match2_count+1
|
|
if(tot == 3):
|
|
match3_count=match3_count+1
|
|
if(tot == 4):
|
|
match4_count=match4_count+1
|
|
logging.debug("Confusion 1:{} 2:{} 3:{} 4:{}".format(match1_count,match2_count,match3_count,match4_count))
|
|
return(match_count)
|
|
|
|
def crossmatch(srcs1,srcs2,maxdist=120.0,minrad=40.0,ratio_min=0.001, ratio_max=1000.0):
|
|
match_count=0
|
|
match1_count=0
|
|
match2_count=0
|
|
match3_count=0
|
|
match4_count=0
|
|
hp = HEALPix(nside=NSIDE_SOURCES, order=ORDER, frame=FK5())
|
|
match_list = []
|
|
|
|
missed_count1=0
|
|
missed_list1 = []
|
|
for s1 in srcs1:
|
|
crd1 = SkyCoord(s1.ra, s1.dec, frame="fk5", unit="deg")
|
|
heal = hp.cone_search_skycoord(crd1, radius=maxdist*u.arcsecond)
|
|
try:
|
|
sel2=srcs2.filter(healpix__in=heal)
|
|
except:
|
|
continue
|
|
|
|
tot=0
|
|
for s2 in sel2:
|
|
crd2 = SkyCoord(s2.ra, s2.dec, frame="fk5", unit="deg")
|
|
sep=crd1.separation(crd2).arcsecond
|
|
#print("{:.1f}".format(sep))
|
|
if(sep <= minrad):
|
|
if not (np.isnan(s1.flux) or s1.flux<=0.0):
|
|
flux_ratio = s2.flux/s1.flux
|
|
else:
|
|
flux_ratio = -100
|
|
if(flux_ratio > ratio_min and flux_ratio < ratio_max):
|
|
match_list.append({
|
|
'dir':"2 in 1",
|
|
'src1':s1,
|
|
'src2':s2,
|
|
'sep':sep,
|
|
'f1f2_ratio':s2.flux/s1.flux,
|
|
})
|
|
if(tot==0):
|
|
match_count=match_count+1
|
|
tot=tot+1
|
|
if(tot == 0):
|
|
missed_list1.append({'src1':s1,})
|
|
missed_count1=missed_count1+1
|
|
|
|
if(tot == 1):
|
|
match1_count=match1_count+1
|
|
if(tot == 2):
|
|
match2_count=match2_count+1
|
|
if(tot == 3):
|
|
match3_count=match3_count+1
|
|
if(tot == 4):
|
|
match4_count=match4_count+1
|
|
|
|
match_count2=0
|
|
match1_count2=0
|
|
match2_count2=0
|
|
match3_count2=0
|
|
match4_count2=0
|
|
match_list2 = []
|
|
missed_list2 = []
|
|
missed_count2 = 0
|
|
for s2 in srcs2:
|
|
crd2 = SkyCoord(s2.ra, s2.dec, frame="fk5", unit="deg")
|
|
heal = hp.cone_search_skycoord(crd2, radius=maxdist*u.arcsecond)
|
|
try:
|
|
sel1=srcs1.filter(healpix__in=heal)
|
|
except:
|
|
continue
|
|
|
|
tot=0
|
|
for s1 in sel1:
|
|
crd1 = SkyCoord(s1.ra, s1.dec, frame="fk5", unit="deg")
|
|
sep=crd2.separation(crd1).arcsecond
|
|
|
|
if(sep <= minrad):
|
|
if not (np.isnan(s1.flux) or s1.flux<=0.0):
|
|
flux_ratio = s2.flux/s1.flux
|
|
else:
|
|
flux_ratio = -100
|
|
|
|
if(flux_ratio > ratio_min and flux_ratio < ratio_max):
|
|
match_list.append({
|
|
'dir':"1 in 2",
|
|
'src1':s1,
|
|
'src2':s2,
|
|
'sep':sep,
|
|
'f1f2_ratio':s2.flux/s1.flux,
|
|
})
|
|
#if(tot==0):
|
|
match_count2=match_count2+1
|
|
tot=tot+1
|
|
if(tot == 0):
|
|
missed_list2.append({'src2':s2,})
|
|
missed_count2=missed_count2+1
|
|
|
|
if(tot == 1):
|
|
match1_count2=match1_count2+1
|
|
if(tot == 2):
|
|
match2_count2=match2_count2+1
|
|
if(tot == 3):
|
|
match3_count2=match3_count2+1
|
|
if(tot == 4):
|
|
match4_count2=match4_count2+1
|
|
|
|
logging.debug("Confusion 1:{} 2:{} 3:{} 4:{}".format(match1_count,match2_count,match3_count,match4_count))
|
|
return match_count, match1_count,match2_count,match3_count,match4_count, match_list, missed_count1, missed_list1, missed_count2, missed_list2, match_count2, match1_count2, match2_count2,match3_count2,match4_count2
|
|
|
|
|
|
def normalize(array):
|
|
"""
|
|
Normalize a 4 element array/list/numpy.array for use as a quaternion
|
|
|
|
:param quat_array: 4 element list/array
|
|
:returns: normalized array
|
|
:rtype: numpy array
|
|
"""
|
|
quat = np.array(array)
|
|
return quat / np.sqrt(np.dot(quat, quat))
|
|
|
|
|
|
def load_gyro_file(filename):
|
|
""" Loads sources stored in plates
|
|
|
|
The format of the data damp directory is srg_20191228_143827_000
|
|
|
|
|
|
Parameters
|
|
----------
|
|
filename : str
|
|
Absolute path to the file
|
|
|
|
|
|
:Author:
|
|
Roman Krivonos <krivonos@cosmos.ru>
|
|
"""
|
|
|
|
if not os.path.isfile(filename):
|
|
print("File {} does not exist".format(filename))
|
|
return
|
|
|
|
logging.debug("Loading gyro from {}".format(filename))
|
|
print("Loading {}".format(filename))
|
|
|
|
|
|
|
|
name = os.path.basename(filename)
|
|
fits_name = os.path.splitext(name)[0]
|
|
""" without .gz """
|
|
dump_name = os.path.splitext(fits_name)[0]
|
|
dump_name=dump_name[:19]
|
|
|
|
""" Check whether this data dump exists """
|
|
try:
|
|
dump = SrgDataDump.objects.get(name=dump_name)
|
|
if(dump.gyro_loaded):
|
|
logging.debug("Gyro for {} is loaded, skip".format(dump))
|
|
print("Gyro for {} is loaded, skip".format(dump))
|
|
return
|
|
else:
|
|
logging.debug("Clear gyro for {}".format(dump))
|
|
gyros = dump.gyro_set.all()
|
|
gyros.delete()
|
|
except SrgDataDump.DoesNotExist:
|
|
logging.debug("Create new data dump {}".format(dump_name))
|
|
format_string='srg_%Y%m%d_%H%M%S'
|
|
dtime = datetime.strptime(dump_name, format_string)
|
|
tm = Time(dtime, format='datetime', scale='utc')
|
|
dt = tm.to_datetime(timezone=TZ_MSK)
|
|
dump = SrgDataDump(name=dump_name, date=dt)
|
|
dump.save()
|
|
|
|
logging.debug("Read gyro from {} and attach it to {}".format(filename,dump))
|
|
print("Read gyro from {} and attach it to {}".format(filename,dump))
|
|
hp = HEALPix(nside=NSIDE_SOURCES, order=ORDER, frame=FK5())
|
|
|
|
ext=1
|
|
hdul = fits.open(filename) # open a FITS file
|
|
hdr = hdul[ext].header
|
|
data = hdul[ext].data
|
|
|
|
for item in data:
|
|
q1=float(item['QORT_0'])
|
|
q2=float(item['QORT_1'])
|
|
q3=float(item['QORT_2'])
|
|
q4=float(item['QORT_3'])
|
|
obt=float(item['TIME'])
|
|
#print(q1,q2,q3,q4)
|
|
quat=Quat(attitude=normalize([q2,q3,q4,q1]))
|
|
|
|
ra=quat.ra
|
|
dec=quat.dec
|
|
crd = SkyCoord(ra, dec, frame=FK5(), unit="deg")
|
|
healpix = hp.skycoord_to_healpix(crd)
|
|
gyro=Gyro(dump=dump,healpix=healpix,ra=ra,dec=dec,obt=obt,mjd=Time(mission2date(obt), format='datetime').mjd)
|
|
gyro.save()
|
|
|
|
dump.gyro_loaded=True
|
|
dump.save()
|
|
|
|
|
|
|