Removed Survey model, it is now a field in the Pixel model; all code rewritten for the change; added set_contaminated management command to set the source contamination flag; added CatalogSource model for the art-xc source catalog storage, the table is filled by the set_contaminated management script; the upper limit view now outputs extra fields, including a list of sources and their properties within 120 arcseconds
This commit is contained in:
parent
ffaa663fdd
commit
bfaf103729
1545
catalog.dat
Normal file
1545
catalog.dat
Normal file
File diff suppressed because it is too large
Load Diff
@ -5,7 +5,7 @@ from astropy.io import fits
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
from uplim.models import Pixel, Survey
|
||||
from uplim.models import Pixel#, Survey
|
||||
from django.db.models import Max
|
||||
|
||||
from itertools import islice
|
||||
@ -109,32 +109,34 @@ class Command(BaseCommand):
|
||||
# CREATE THE SURVEY IF IT DOES NOT EXIST
|
||||
# **************************************************************
|
||||
|
||||
with transaction.atomic():
|
||||
# with transaction.atomic():
|
||||
|
||||
survey,created = Survey.objects.get_or_create(number=survey_number)
|
||||
# survey,created = Survey.objects.get_or_create(number=survey_number)
|
||||
|
||||
if created:
|
||||
self.stdout.write(f"Created a new survey instance with number: {survey.number}")
|
||||
else:
|
||||
self.stdout.write(f"Using existing survey instance with the number: {survey.number}")
|
||||
# if created:
|
||||
# self.stdout.write(f"Created a new survey instance with number: {survey.number}")
|
||||
# else:
|
||||
# self.stdout.write(f"Using existing survey instance with the number: {survey.number}")
|
||||
|
||||
# FETCH THE LAST PROCESSED HPID AND CONTINUE FROM IT
|
||||
# **************************************************************
|
||||
|
||||
last_hpid = (
|
||||
Pixel.objects
|
||||
.filter(survey=survey)
|
||||
.filter(survey=survey_number)
|
||||
.aggregate(max_hpid=Max('hpid'))['max_hpid']
|
||||
or -1
|
||||
)
|
||||
start_index = last_hpid + 1
|
||||
|
||||
|
||||
|
||||
pixel_generator = (
|
||||
Pixel(
|
||||
hpid=i,
|
||||
counts=int(count),
|
||||
exposure=float(exposure),
|
||||
survey=survey
|
||||
survey=survey_number
|
||||
)
|
||||
for i, (count, exposure) in enumerate(zip(counts_data, exposure_data))
|
||||
if i >= start_index
|
||||
|
@ -1,17 +1,34 @@
|
||||
# uplim/management/commands/set_contaminated.py
|
||||
|
||||
# add custom flux-radius mapping?
|
||||
# add specifying the columns?
|
||||
# do contamination setting per survey?
|
||||
# include nside for surveys?
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
|
||||
import pandas as pd
|
||||
import healpy as hp
|
||||
import numpy as np
|
||||
from astropy.coordinates import SkyCoord
|
||||
|
||||
from uplim.models import Pixel
|
||||
from uplim.models import Pixel, CatalogSource
|
||||
|
||||
from itertools import islice
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
BATCH_SIZE=900
|
||||
|
||||
def batch(iterable, size):
|
||||
iterable = iter(iterable)
|
||||
while True:
|
||||
chunk = list(islice(iterable, size))
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Set the 'contaminated' flag for all pixels based on the fluxes in the provided catalog."
|
||||
@ -67,7 +84,7 @@ class Command(BaseCommand):
|
||||
# READ THE CATALOG FILE USING PANDAS READ_FWF
|
||||
# *******************************************
|
||||
|
||||
# Define column positions based on the byte ranges in your table
|
||||
# Define column positions based on the byte ranges
|
||||
colspecs = [
|
||||
(0, 4), # SrcID (1-4)
|
||||
(5, 26), # Name (6-26)
|
||||
@ -91,8 +108,49 @@ class Command(BaseCommand):
|
||||
# Read the file using the fixed-width format
|
||||
catalog = pd.read_fwf(catalog_file, colspecs=colspecs, names=colnames)
|
||||
|
||||
for col in ['Name', 'CName', 'Type']:
|
||||
catalog[col] = catalog[col].fillna('')
|
||||
|
||||
self.stdout.write(str(catalog.head()))
|
||||
|
||||
# LOAD THE CATALOG INTO THE DATABASE
|
||||
# **********************************
|
||||
|
||||
existing_srcids = set(
|
||||
CatalogSource.objects.values_list('srcid', flat=True)
|
||||
)
|
||||
|
||||
to_create = []
|
||||
|
||||
for _, row in catalog.iterrows():
|
||||
|
||||
srcid = int(row['SrcID'])
|
||||
if srcid in existing_srcids:
|
||||
continue
|
||||
to_create.append(
|
||||
CatalogSource(
|
||||
srcid = srcid,
|
||||
name = row['Name'].strip(),
|
||||
ra_deg = float(row['RAdeg']),
|
||||
dec_deg = float(row['DEdeg']),
|
||||
pos_error = float(row['ePos']),
|
||||
significance = float(row['Signi']),
|
||||
flux = float(row['Flux']),
|
||||
flux_error = float(row['e_Flux']),
|
||||
catalog_name = row['CName'].strip(),
|
||||
new_xray = bool(int(row['NewXray'])),
|
||||
source_type = row['Type'].strip()
|
||||
)
|
||||
)
|
||||
|
||||
if to_create:
|
||||
self.stdout.write(f'Inserting {len(to_create)} new catalog rows.')
|
||||
for chunk in batch(to_create, BATCH_SIZE):
|
||||
CatalogSource.objects.bulk_create(chunk, ignore_conflicts=True)
|
||||
self.stdout.write('Catalog update complete.')
|
||||
else:
|
||||
self.stdout.write('All catalog rows already exist in the database.')
|
||||
|
||||
# hard coded nside and flux-radius mapping
|
||||
# maybe change that
|
||||
|
||||
@ -168,9 +226,24 @@ class Command(BaseCommand):
|
||||
|
||||
self.stdout.write("\nList ready, updating the database...")
|
||||
|
||||
|
||||
|
||||
if masked_pixels_list:
|
||||
Pixel.objects.filter(hpid__in=masked_pixels_list).update(contaminated=True)
|
||||
self.stdout.write(f"\nMarked {len(masked_pixels_list)} pixels as contaminated.")
|
||||
else:
|
||||
self.stdout.write("No pixels marked as contaminated, exiting.")
|
||||
if not masked_pixels_list:
|
||||
self.stdout.write("No pixels marked as contaminated, exiting.")
|
||||
return
|
||||
|
||||
total = len(masked_pixels_list)
|
||||
updated = 0
|
||||
self.stdout.write(f'\nUpdating contaminated flag in batches of {BATCH_SIZE}')
|
||||
|
||||
for chunk in batch(masked_pixels_list, BATCH_SIZE):
|
||||
with transaction.atomic():
|
||||
Pixel.objects.filter(hpid__in=chunk).update(contaminated=True)
|
||||
|
||||
updated += len(chunk)
|
||||
percentage = updated / total * 100
|
||||
|
||||
timestamp = datetime.now().strftime("%H:%M:%S")
|
||||
self.stdout.write(f'[{timestamp}] {updated}/{total} ({percentage:.1f}%) updated')
|
||||
|
||||
self.stdout.write(f'\n Marked {updated} pixels as contaminated.')
|
65
models.py
65
models.py
@ -6,40 +6,49 @@ from django.db.models import UniqueConstraint
|
||||
|
||||
|
||||
|
||||
class Survey(models.Model):
|
||||
|
||||
number = models.IntegerField(unique=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"Survey {self.number}"
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class Pixel(models.Model):
|
||||
|
||||
#id = models.AutoField(primary_key=True) # ~2 million pixels for a 4096 survey
|
||||
#id = models.AutoField(primary_key=True) # ~200 million pixels for a 4096 survey
|
||||
# no need to set explicitly
|
||||
# WILL ONLY HOLD 10 SURVEYS AS AN AUTOFIELD (IntegerField, ~2 billion limit)
|
||||
# BIGAUTOFIELD WILL BE REQUIRED FOR MORE!
|
||||
|
||||
survey = models.SmallIntegerField()
|
||||
|
||||
survey = models.ForeignKey(
|
||||
Survey,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='pixels',
|
||||
default=0
|
||||
)
|
||||
hpid = models.IntegerField(db_index=True) # up to over 200 million
|
||||
|
||||
hpid = models.IntegerField(db_index=True) # up to over 200 million
|
||||
counts = models.IntegerField() # f4, up to ~44k integer: 2 byte too small
|
||||
|
||||
counts = models.IntegerField() # f4, up to ~44k integer: 2 byte too small
|
||||
|
||||
exposure = models.FloatField() # f4, up to ~13300 float
|
||||
exposure = models.FloatField() # f4, up to ~13300 float
|
||||
|
||||
contaminated = models.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
constraints = [
|
||||
UniqueConstraint(fields=['survey','hpid'], name='unique_hpid_per_survey'),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"Pixel {self.id} (Survey {self.survey.number})"
|
||||
return f"Pixel {self.id} hpid {self.hpid} (Survey {self.survey.number})"
|
||||
|
||||
|
||||
|
||||
|
||||
class CatalogSource(models.Model):
|
||||
|
||||
srcid = models.SmallIntegerField(primary_key=True)
|
||||
|
||||
name = models.CharField(max_length=21)
|
||||
|
||||
ra_deg = models.FloatField()
|
||||
|
||||
dec_deg = models.FloatField()
|
||||
|
||||
pos_error = models.FloatField()
|
||||
|
||||
significance = models.FloatField()
|
||||
|
||||
flux = models.FloatField()
|
||||
|
||||
flux_error = models.FloatField()
|
||||
|
||||
catalog_name = models.CharField(max_length=28)
|
||||
|
||||
new_xray = models.BooleanField(default=False)
|
||||
|
||||
source_type = models.CharField(max_length=13)
|
140
views.py
140
views.py
@ -4,7 +4,7 @@
|
||||
# search for pixels non-inclusively
|
||||
import healpy as hp
|
||||
import astropy.units as u
|
||||
from astropy.coordinates import SkyCoord
|
||||
from astropy.coordinates import SkyCoord, Angle
|
||||
import numpy as np
|
||||
|
||||
import scipy.special as sp
|
||||
@ -17,7 +17,7 @@ from rest_framework import status
|
||||
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
from uplim.models import Pixel
|
||||
from uplim.models import Pixel, CatalogSource
|
||||
|
||||
# SANITIZE RESPONSE DATA BEFORE JSON CONVERSION FOR DEBUGGING NANS
|
||||
# now NaNs are converted to 'null' beforehand
|
||||
@ -54,6 +54,7 @@ def parse_survey_param(raw):
|
||||
|
||||
|
||||
# PIXEL VIEW (MOSTLY FOR TESTING)
|
||||
# add healpix indices into the output
|
||||
# **************************************************************
|
||||
|
||||
class PixelAggregateView(APIView):
|
||||
@ -94,43 +95,37 @@ class PixelAggregateView(APIView):
|
||||
# **************************************************************
|
||||
qs = Pixel.objects.filter(
|
||||
hpid=hpid,
|
||||
survey__number__in=survey_numbers
|
||||
survey__in=survey_numbers
|
||||
)
|
||||
|
||||
if not qs.exists():
|
||||
# no matching pixel(s) → 404
|
||||
get_object_or_404(Pixel, hpid=hpid, survey__number__in=survey_numbers)
|
||||
get_object_or_404(
|
||||
Pixel,
|
||||
hpid=hpid,
|
||||
survey__in=survey_numbers
|
||||
)
|
||||
|
||||
result = qs.aggregate(
|
||||
aggregates = qs.aggregate(
|
||||
#pixel_hpid=hpid,
|
||||
#survey_number=survey_numbers,
|
||||
total_counts=Sum("counts"),
|
||||
total_exposure=Sum("exposure")
|
||||
)
|
||||
|
||||
plusdata = {
|
||||
'pixel_hpid' : hpid,
|
||||
'surveys' : survey_numbers
|
||||
}
|
||||
|
||||
result = {**aggregates, **plusdata}
|
||||
|
||||
# RETURN THE SUMS
|
||||
# **************************************************************
|
||||
return Response(result, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# class PixelDetailView(APIView):
|
||||
# """
|
||||
# API endpoint that returns the pixel data (counts, exposure, rate)
|
||||
# for a given hpid.
|
||||
# """
|
||||
# def get(self, request, hpid):
|
||||
# # Get the survey using the survey_number field.
|
||||
# # survey = get_object_or_404(Survey, number=survey_number)
|
||||
|
||||
# # Get the pixel corresponding to the survey and hpid.
|
||||
# pixel = get_object_or_404(Pixel, hpid=hpid)
|
||||
|
||||
# # Serialize the pixel data to JSON.
|
||||
# serializer = PixelSerializer(pixel)
|
||||
# return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
|
||||
# UPPER LIMIT COMPUTATION VIEW
|
||||
# **************************************************************
|
||||
|
||||
@ -225,17 +220,24 @@ class UpperLimitView(APIView):
|
||||
item for item in outer_pixel_list if item not in inner_pixel_list
|
||||
]
|
||||
|
||||
|
||||
|
||||
source_pixels = Pixel.objects.filter(
|
||||
hpid__in = source_pixel_list,
|
||||
survey__number__in = survey_numbers
|
||||
survey__in = survey_numbers
|
||||
)
|
||||
|
||||
annulus_pixels = Pixel.objects.filter(
|
||||
hpid__in = annulus_pixel_list,
|
||||
survey__number__in = survey_numbers
|
||||
survey__in = survey_numbers
|
||||
)
|
||||
|
||||
# check contamination
|
||||
contamination = (
|
||||
source_pixels.filter(contaminated=True).exists() or
|
||||
annulus_pixels.filter(contaminated=True).exists()
|
||||
)
|
||||
|
||||
|
||||
if not source_pixels.exists() and not annulus_pixels.exists():
|
||||
return Response(
|
||||
{"detail": "No pixel data for the given survey selection."},
|
||||
@ -311,43 +313,87 @@ class UpperLimitView(APIView):
|
||||
|
||||
S = N - B # counts as simply counts within aperture
|
||||
# with the background estimate subtracted
|
||||
|
||||
|
||||
CR = S / t / EEF # count rate
|
||||
CR = S / t / EEF # source count rate
|
||||
|
||||
BR = B / t # background rate within aperture
|
||||
|
||||
FL = CR * ECF # conversion to flux
|
||||
|
||||
Flux = max(FL, 0) # flux cannot be lower than zero
|
||||
|
||||
# RESULT ASSEMBLY
|
||||
# NEARBY SOURCES CHECK
|
||||
# ****************************************************************
|
||||
|
||||
radius_as = 120
|
||||
radius_deg = radius_as / 3600
|
||||
|
||||
dec_min = max(dec - radius_deg, -90)
|
||||
dec_max = min(dec + radius_deg, 90)
|
||||
|
||||
# cheap belt query
|
||||
belt_sources = CatalogSource.objects.filter(
|
||||
dec_deg__gte = dec_min,
|
||||
dec_deg__lte = dec_max
|
||||
)
|
||||
|
||||
center_coord = SkyCoord(ra, dec, unit='deg')
|
||||
|
||||
nearby_sources = []
|
||||
|
||||
#refine belt to circular region using astropy separation
|
||||
for catsrc in belt_sources:
|
||||
catsrc_coord = SkyCoord(catsrc.ra_deg, catsrc.dec_deg, unit='deg')
|
||||
if center_coord.separation(catsrc_coord).deg <= radius_deg:
|
||||
nearby_sources.append(
|
||||
{
|
||||
'srcid' : catsrc.srcid,
|
||||
'name' : catsrc.name,
|
||||
'ra_deg' : catsrc.ra_deg,
|
||||
'dec_deg' : catsrc.dec_deg,
|
||||
'pos_error' : catsrc.pos_error,
|
||||
'significance' : catsrc.significance,
|
||||
'flux' : catsrc.flux,
|
||||
'flux_error' : catsrc.flux_error,
|
||||
'catalog_name' : catsrc.catalog_name,
|
||||
'new_xray' : catsrc.new_xray,
|
||||
'source_type' : catsrc.source_type
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
# RESULT JSON
|
||||
# ****************************************************************
|
||||
|
||||
result = {
|
||||
|
||||
'ClassicUpperLimit' : classic_count_ul,
|
||||
'ClassicLowerLimit' : classic_count_ll,
|
||||
'ClassicUpperLimit' : classic_count_ul,
|
||||
'ClassicLowerLimit' : classic_count_ll,
|
||||
'ClassicCountRateUpperLimit' : classic_rate_ul,
|
||||
'ClassicCountRateLowerLimit' : classic_rate_ll,
|
||||
'ClassicFluxUpperLimit' : classic_flux_ul,
|
||||
'ClassicFluxLowerLimit' : classic_flux_ll,
|
||||
'ClassicFluxUpperLimit' : classic_flux_ul,
|
||||
'ClassicFluxLowerLimit' : classic_flux_ll,
|
||||
|
||||
'BayesianUpperLimit' : bayesian_count_ul,
|
||||
'BayesianLowerLimit' : bayesian_count_ll,
|
||||
'BayesianUpperLimit' : bayesian_count_ul,
|
||||
'BayesianLowerLimit' : bayesian_count_ll,
|
||||
'BayesianCountRateUpperLimit' : bayesian_rate_ul,
|
||||
'BayesianCountRateLowerLimit' : bayesian_rate_ll,
|
||||
'BayesianFluxUpperLimit' : bayesian_flux_ul,
|
||||
'BayesianFluxLowerLimit' : bayesian_flux_ll,
|
||||
'BayesianFluxUpperLimit' : bayesian_flux_ul,
|
||||
'BayesianFluxLowerLimit' : bayesian_flux_ll,
|
||||
|
||||
'FluxEstimate' : Flux,
|
||||
'FluxEstimate' : Flux,
|
||||
|
||||
# raw counts and exposure omitted from the response
|
||||
'ApertureCounts' : N,
|
||||
'ApertureBackgroundCounts' : B,
|
||||
'SourceCounts' : S,
|
||||
'Exposure' : t,
|
||||
|
||||
# 'N' : N,
|
||||
# 'Nnpix' : Nnpix,
|
||||
# 'Bcounts' : Bcounts,
|
||||
# 'Bnpix' : Bnpix,
|
||||
# 'B' : B,
|
||||
# 'tsum' : tsum,
|
||||
# 't' : t
|
||||
'SourceRate' : CR,
|
||||
'BackgroundRate' : BR,
|
||||
|
||||
'Contamination' : contamination,
|
||||
'NearbySources' : nearby_sources
|
||||
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user