Compare commits

...

29 Commits

Author SHA1 Message Date
a2c528644c Added normalized background rate output in units ct/s/keV/arcmin2 2025-07-16 16:57:36 +03:00
3f4ca4075f increased the size of background region 2025-06-05 13:53:20 +03:00
c62826c8b8 reverted excluding contaminated pixels from the source region 2025-06-05 13:18:03 +03:00
f680039e9a changed the contamination handling logic to also exclude contaminated pixels from the source region 2025-06-05 13:07:33 +03:00
d346bbadbc added Virgo and Coma clusters to the mask with the radius 2.5 degrees; sorting nearby sources by angular separation 2025-05-28 10:47:30 +03:00
709918571f increased nearby source search radius to 3 degrees 2025-05-27 12:49:05 +03:00
90b938a934 removed contamination array from the map dict 2025-05-27 12:42:02 +03:00
c144285f88 switched to arc seconds for separation and mask_radius fields 2025-05-27 11:19:17 +03:00
85a96b5fb2 fixed variable assignment error 2025-05-27 11:07:38 +03:00
ebf8e11866 added default value of nearby source search if mr=0 2025-05-27 11:00:53 +03:00
e2d0afa4c5 added separation to nearby sources, set nearby source search radius to 2x map radius 2025-05-27 10:57:23 +03:00
e01941f06b added mask_radius_deg field to nearby source dict 2025-05-27 10:40:36 +03:00
42c63547b1 changed from using Max aggregatio to a separate query for the contamination status 2025-05-26 17:28:40 +03:00
a36163f731 Added status, error message fields, contamination map to response 2025-05-26 15:26:15 +03:00
cc410f58dc fix 2025-05-24 14:35:50 +03:00
ada05dd34b background pixels with contaminated=True are removed from background calculation 2025-05-24 14:34:21 +03:00
fdc26bc4c4 correct exposure handling across multiple surveys 2025-05-23 18:31:33 +03:00
5e0ecc9cc9 add one-sided and double-sided interval distinction 2025-05-23 12:19:03 +03:00
03cdfa01b0 make image map radius a parameter 2025-05-23 10:54:11 +03:00
3c5aed6a41 fix bayesian limits being lists in the json output 2025-05-22 17:22:52 +03:00
26f848d274 code formatting 2025-05-19 15:09:04 +03:00
cf3213a0f9 Added image healpix map into the json response of UpperLimitView; altered the Survey field into a PositiveSmallIntegerField 2025-05-19 13:48:27 +03:00
6154679dd2 readme update 2025-05-18 15:38:25 +03:00
982e5e8680 update 2025-05-18 15:36:56 +03:00
a8cf963dee update 2025-05-18 15:30:28 +03:00
bfaf103729 Removed Survey model, it is now a field in the Pixel model; all code rewritten for the change; added set_contaminated management command to set the source contamination flag; added CatalogSource model for the art-xc source catalog storage, the table is filled by the set_contaminated management script; the upper limit view now outputs extra fields, including a list of sources and their properties within 120 arcseconds 2025-05-18 15:26:20 +03:00
ffaa663fdd upper limit NaN fix; ECF & EEF values update 2025-05-13 15:38:03 +03:00
d46e5e5fa6 Merge branch 'main' of http://heagit.cosmos.ru/tyrin/axc_ul 2025-05-08 16:33:10 +03:00
f0b505330f --batch_size no longer required, defaults to 1000 2025-05-08 16:31:58 +03:00
9 changed files with 2404 additions and 315 deletions

View File

@@ -1,8 +1,5 @@
# uplim
test
ART-XC upper limits django application
## Management commands
@@ -13,13 +10,16 @@ Allows to populate the database with data for a survey in healpix format. Takes
Syntax: python manage.py load_survey --counts='*path_to_counts_map*' --exposure='*path_to_exposure_map*' --survey_number=*integer survey number* --batch_size=*number of pixels to insert in one transaction*
- set_contaminated
Load catalog.dat catalog into the database or update it. Set the contamination flag based on the catalog of sources (catalog.dat file). Includes a --reset option to reset the flag to False on all pixels.
Syntax: python manage.py set_contaminated --catalog='*path to catalog.dat*'
## Models (models.py)
- Survey:
- survey number
- Pixel:
- survey foreign key
- survey number
- hpid (healpix pixel id)
- counts
- exposure
@@ -27,23 +27,23 @@ Syntax: python manage.py load_survey --counts='*path_to_counts_map*' --exposure=
## Views (views.py)
**Before public deployment views providing direct pixel data should be altered to not include it in the response json.**
- PixelAggregateView:
For testing, outputs the aggregated data across specified surveys for a healpix pixel.
Returns:
Sum of counts and exposure over specified surveys for the specified pixel.
URL: /api/pixel-aggregate/?pixel=123&survey=1
/api/pixel-aggregate/?pixel=123&survey=1
(/api/pixel-aggregate/?pixel=123&survey=1,3,5)
(/api/pixel-aggregate/?pixel=123&survey=1-5)
- UpperLimitView:
The view providing upper limit calculation.
Returns:
Upper and lower limits in units of counts, count rate, and physical flux for Bayesian and classic approaches, as well as a count rate estimate and aperture photometry data (counts in the aperture, background estimate, etc).
Upper and lower limits in units of counts, count rate, and physical flux for Bayesian and classic approaches, as well as a count rate estimate and aperture photometry data (counts in the aperture, background estimate, etc). Also whether or not the apertures contain any pixels marked as contaminated, as well as a list of nearby sources.
URL: /api/upper-limit/?ra=10.0&dec=10.0&cl=.95&survey=1 (or list 1,3,5 or hyphenated range 1-5).

1545
catalog.dat Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,22 +1,25 @@
# uplim/management/commands/load_survey.py
import numpy as np
from astropy.io import fits
from itertools import islice
from datetime import datetime
from django.core.management.base import BaseCommand
from django.db import transaction
from uplim.models import Pixel, Survey
from uplim.models import Pixel
from django.db.models import Max
from itertools import islice
from datetime import datetime
from tqdm import tqdm
import sys
# DEFINE BATCH SIZE AND BATCH
# **************************************************************
# BATCH_SIZE = 1000000
def batch(iterable, size):
"""
Generator that yields successive chunks of size 'size' from 'iterable'.
@@ -29,8 +32,6 @@ def batch(iterable, size):
yield chunk
class Command(BaseCommand):
help = "Process FITS files and store the data in the database"
@@ -39,40 +40,33 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'--counts',
type=str,
required=True,
help='Path of the counts file'
"--counts", type=str, required=True, help="Path of the counts file"
)
parser.add_argument(
'--exposure',
type=str,
required=True,
help='Path of the exposure file'
"--exposure", type=str, required=True, help="Path of the exposure file"
)
parser.add_argument(
'--survey_number',
"--survey_number",
type=int,
required=True,
help='Integer ID of the survey being read'
help="Integer ID of the survey being read",
)
parser.add_argument(
'--batch_size',
"--batch_size",
type=int,
required=True,
help='Integer number of pixels to be inserted into the database at once'
default=1000,
help="Integer number of pixels to be inserted into the database at once",
)
def handle(self, *args, **options):
# GET FILENAMES FROM ARGUMENTS
# **************************************************************
counts_file = options['counts']
exposure_file = options['exposure']
survey_number = options['survey_number']
BATCH_SIZE = options['batch_size']
counts_file = options["counts"]
exposure_file = options["exposure"]
survey_number = options["survey_number"]
BATCH_SIZE = options["batch_size"]
self.stdout.write(f"\nCounts file:\t{counts_file}")
self.stdout.write(f"Exposure file:\t{exposure_file}")
@@ -87,7 +81,6 @@ class Command(BaseCommand):
counts_data = counts_map.ravel()
with fits.open(exposure_file) as hdul:
column_name = "T"
@@ -104,53 +97,67 @@ class Command(BaseCommand):
total_pixels = counts_data.shape[0]
self.stdout.write(f"\nTotal pixels to insert:\t{total_pixels}")
assert counts_data.shape == exposure_data.shape, "Counts and exposure maps must have the same shape"
assert (
counts_data.shape == exposure_data.shape
), "Counts and exposure maps must have the same shape"
# CREATE THE SURVEY IF IT DOES NOT EXIST
# **************************************************************
with transaction.atomic():
# with transaction.atomic():
survey,created = Survey.objects.get_or_create(number=survey_number)
# survey,created = Survey.objects.get_or_create(number=survey_number)
if created:
self.stdout.write(f"Created a new survey instance with number: {survey.number}")
else:
self.stdout.write(f"Using existing survey instance with the number: {survey.number}")
# if created:
# self.stdout.write(f"Created a new survey instance with number: {survey.number}")
# else:
# self.stdout.write(f"Using existing survey instance with the number: {survey.number}")
# FETCH THE LAST PROCESSED HPID AND CONTINUE FROM IT
# **************************************************************
last_hpid = (
Pixel.objects
.filter(survey=survey)
.aggregate(max_hpid=Max('hpid'))['max_hpid']
Pixel.objects.filter(survey=survey_number).aggregate(max_hpid=Max("hpid"))[
"max_hpid"
]
or -1
)
start_index = last_hpid + 1
pixels_to_insert = total_pixels - start_index
if pixels_to_insert <= 0:
self.stdout.write("All pixels have already been inserted. Exiting.")
pixel_generator = (
Pixel(
hpid=i,
counts=int(count),
exposure=float(exposure),
survey=survey
survey=survey_number,
)
for i, (count, exposure) in enumerate(zip(counts_data, exposure_data))
if i >= start_index
)
total_inserted = start_index
pbar = tqdm(
total=pixels_to_insert,
unit="pix",
desc=f"Survey {survey_number}",
# file=sys.stdout,
)
# Process in batches
for pixel_batch in batch(pixel_generator, BATCH_SIZE):
with transaction.atomic():
Pixel.objects.bulk_create(pixel_batch)
total_inserted += len(pixel_batch)
percentage = total_inserted / total_pixels * 100
timestamp = datetime.now().strftime("%H:%M:%S")
self.stdout.write(
f"[{timestamp}] {percentage:.2f}% inserted"
)
# total_inserted += len(pixel_batch)
# percentage = total_inserted / total_pixels * 100
# timestamp = datetime.now().strftime("%H:%M:%S")
# self.stdout.write(f"[{timestamp}] {percentage:.2f}% inserted")
pbar.update(BATCH_SIZE)
self.stdout.write(f"Inserted a total of {total_inserted} pixels.")
pbar.close()
self.stdout.write(f"Done: Inserted a total of {total_inserted} pixels.")

View File

@@ -0,0 +1,307 @@
# uplim/management/commands/set_contaminated.py
# add custom flux-radius mapping?
# add specifying the columns?
# do contamination setting per survey?
# include nside for surveys?
from django.core.management.base import BaseCommand
from django.db import transaction
from uplim.models import Pixel, CatalogSource
import pandas as pd
import healpy as hp
import numpy as np
from astropy.coordinates import SkyCoord
from itertools import islice
from datetime import datetime
BATCH_SIZE = 900
def batch(iterable, size):
iterable = iter(iterable)
while True:
chunk = list(islice(iterable, size))
if not chunk:
break
yield chunk
class Command(BaseCommand):
help = "Set the 'contaminated' flag for all pixels based on the fluxes in the provided catalog."
# COMMAND LINE ARGUMENTS
# **********************
def add_arguments(self, parser):
parser.add_argument(
"--catalog", type=str, required=False, help="Path to the catalog.dat file"
)
# parser.add_argument(
# '--survey',
# type=int,
# required=False,
# help='integer number of the survey to set the flag for'
# )
parser.add_argument(
"--reset",
action="store_true",
default=False,
help="Reset the contamination flag across all pixels back to False.",
)
def handle(self, *args, **options):
# RESET BEHAVIOR: SET CONTAMINATION FLAG TO FALSE FOR ALL PIXELS
# **************************************************************
if options["reset"]:
self.stdout.write("Resetting the contamination flag...")
Pixel.objects.update(contaminated=False)
self.stdout.write("Done")
return
if not options["catalog"]:
self.stdout.write("No catalog file provided, exiting")
return
catalog_file = options["catalog"]
self.stdout.write(f"Catalog file:\t{catalog_file}")
# READ THE CATALOG FILE USING PANDAS READ_FWF
# *******************************************
# Define column positions based on the byte ranges
colspecs = [
(0, 4), # SrcID (1-4)
(5, 26), # Name (6-26)
(27, 37), # RAdeg (28-37)
(38, 48), # DEdeg (39-48)
(49, 55), # ePos (50-55)
(56, 63), # Signi (57-63)
(64, 76), # Flux (65-76)
(77, 89), # e_Flux (78-89)
(90, 118), # CName (91-118)
(119, 120), # NewXray (120)
(121, 134), # Type (122-134)
]
# Define column names
colnames = [
"SrcID",
"Name",
"RAdeg",
"DEdeg",
"ePos",
"Signi",
"Flux",
"e_Flux",
"CName",
"NewXray",
"Type",
]
# Read the file using the fixed-width format
catalog = pd.read_fwf(catalog_file, colspecs=colspecs, names=colnames)
for col in ["Name", "CName", "Type"]:
catalog[col] = catalog[col].fillna("")
self.stdout.write(str(catalog.head()))
# LOAD THE CATALOG INTO THE DATABASE
# **********************************
existing_srcids = set(CatalogSource.objects.values_list("srcid", flat=True))
to_create = []
for _, row in catalog.iterrows():
srcid = int(row["SrcID"])
if srcid in existing_srcids:
continue
to_create.append(
CatalogSource(
srcid=srcid,
name=row["Name"].strip(),
ra_deg=float(row["RAdeg"]),
dec_deg=float(row["DEdeg"]),
pos_error=float(row["ePos"]),
significance=float(row["Signi"]),
flux=float(row["Flux"]),
flux_error=float(row["e_Flux"]),
catalog_name=row["CName"].strip(),
new_xray=bool(int(row["NewXray"])),
source_type=row["Type"].strip(),
)
)
if to_create:
self.stdout.write(f"Inserting {len(to_create)} new catalog rows.")
for chunk in batch(to_create, BATCH_SIZE):
CatalogSource.objects.bulk_create(chunk, ignore_conflicts=True)
self.stdout.write("Catalog update complete.")
else:
self.stdout.write("All catalog rows already exist in the database.")
# hard coded nside and flux-radius mapping
# maybe change
nside = 4096
npix = hp.nside2npix(nside)
# flux_bins = [0, 125, 250, 2000, 20000, np.inf] # define bin edges
# mask_radii_deg = [
# 0.06,
# 0.15,
# 0.5,
# 0.9,
# 2.5,
# ] # corresponding mask radii in degrees
# # Convert mask radii from degrees to radians (required by query_disc)
# mask_radii = [np.radians(r) for r in mask_radii_deg]
# # Use pandas.cut to assign each source a bin index (0, 1, or 2)
# catalog["flux_bin"] = pd.cut(catalog["Flux"], bins=flux_bins, labels=False)
flux_bins = [0, 125, 250, 2000, 20000, np.inf]
catalog["flux_bin"] = pd.cut(catalog["Flux"], bins=flux_bins, labels=False)
bin_to_radius_deg = {
0: 0.06,
1: 0.15,
2: 0.5,
3: 0.9,
4: 2.5,
}
catalog["mask_radius_deg"] = catalog["flux_bin"].map(bin_to_radius_deg)
# manually add and change some sources
# manual_additions = pd.DataFrame(
# [
# {"RAdeg": 279.9804336, "DEdeg": 5.0669542, "flux_bin": 3},
# {"RAdeg": 266.5173685, "DEdeg": -29.1252321, "flux_bin": 3},
# {
# "RAdeg": 194.9350000,
# "DEdeg": 27.9124722,
# "flux_bin": 4,
# }, # Coma Cluster, 2.5 degrees
# {
# "RAdeg": 187.6991667,
# "DEdeg": 12.3852778,
# "flux_bin": 4,
# }, # Virgo cluster, 2.5 degrees
# ]
# )
# catalog = pd.concat([catalog, manual_additions], ignore_index=True)
# catalog.loc[catalog["SrcID"] == 1101, "flux_bin"] = 2
manual_additions = pd.DataFrame(
[
{
"RAdeg": 279.9804336,
"DEdeg": 5.0669542,
"mask_radius_deg": 0.9,
},
{"RAdeg": 266.5173685, "DEdeg": -29.1252321, "mask_radius_deg": 0.9},
{
"RAdeg": 194.9350000,
"DEdeg": 27.9124722,
"mask_radius_deg": 1.17,
}, # Coma, 70 arcmin radius
{
"RAdeg": 187.6991667,
"DEdeg": 12.3852778,
"mask_radius_deg": 2.5,
}, # Virgo, 2.5 deg radius
]
)
catalog = pd.concat([catalog, manual_additions], ignore_index=True)
catalog.loc[catalog["SrcID"] == 1101, "mask_radius_deg"] = 0.5 # e.g. override
mask_array = np.ones(npix, dtype=bool)
masked_pixels_set = set()
self.stdout.write("\nCreating a list of contaminated pixels...")
# process each source in the catalog
for _, row in catalog.iterrows():
ra = row["RAdeg"]
dec = row["DEdeg"]
src_coord = SkyCoord(ra, dec, unit="deg", frame="icrs")
gal = src_coord.galactic
ra, dec = gal.l.deg, gal.b.deg
# flux_bin = row["flux_bin"] # 0, 1, or 2
# # Get the corresponding mask radius (in radians) for this flux bin
# radius = mask_radii[flux_bin]
# Convert (ra, dec) to HEALPix spherical coordinates
theta = np.radians(90.0 - dec)
phi = np.radians(ra)
vec = hp.ang2vec(theta, phi)
# # Query all pixels within the given radius
# # 'inclusive=True' makes sure pixels on the edge are included
# pix_indices = hp.query_disc(nside, vec, radius, inclusive=True)
# read the explicit per-source radius in degrees, convert to radians:
radius_deg = row["mask_radius_deg"]
radius = np.radians(radius_deg)
# now query:
pix_indices = hp.query_disc(nside, vec, radius, inclusive=True)
# Mark these pixels as bad (False) in our mask
mask_array[pix_indices] = False
# Add the pixel indices to our set of masked pixels
masked_pixels_set.update(pix_indices)
# Convert the set of masked pixels to a sorted list.
masked_pixels_list = sorted(list(masked_pixels_set))
# print("Number of masked pixels:", len(masked_pixels_list))
self.stdout.write("\nList ready, updating the database...")
if not masked_pixels_list:
self.stdout.write("No pixels marked as contaminated, exiting.")
return
total = len(masked_pixels_list)
updated = 0
self.stdout.write(f"\nUpdating contaminated flag in batches of {BATCH_SIZE}")
for chunk in batch(masked_pixels_list, BATCH_SIZE):
with transaction.atomic():
Pixel.objects.filter(hpid__in=chunk).update(contaminated=True)
updated += len(chunk)
percentage = updated / total * 100
timestamp = datetime.now().strftime("%H:%M:%S")
self.stdout.write(
f"[{timestamp}] {updated}/{total} ({percentage:.1f}%) updated"
)
self.stdout.write(f"\n Marked {updated} pixels as contaminated.")

View File

@@ -4,46 +4,47 @@ from django.db import models
from django.db.models import UniqueConstraint
class Survey(models.Model):
number = models.IntegerField(unique=True)
# nside = models.IntegerField(
# default=4096
# )
def __str__(self):
return f"Survey {self.number} of NSIDE {self.nside}"
class Pixel(models.Model):
id = models.AutoField(primary_key=True)
# id = models.AutoField(primary_key=True) # ~200 million pixels for a 4096 survey
# no need to set explicitly
# WILL ONLY HOLD 10 SURVEYS AS AN AUTOFIELD (IntegerField, ~2 billion limit)
# BIGAUTOFIELD WILL BE REQUIRED FOR MORE!
survey = models.ForeignKey(
Survey,
on_delete=models.CASCADE,
related_name='pixels',
default=0
)
survey = models.PositiveSmallIntegerField()
hpid = models.IntegerField(db_index=True)
hpid = models.IntegerField(db_index=True) # up to over 200 million
counts = models.IntegerField()
counts = models.IntegerField() # f4, up to ~44k integer: 2 byte too small
exposure = models.FloatField()
exposure = models.FloatField() # f4, up to ~13300 float
contaminated = models.BooleanField(default=False)
class Meta:
constraints = [
UniqueConstraint(fields=['survey', 'hpid'], name='unique_hpid_per_survey'),
]
def __str__(self):
return f"Pixel {self.id} (Survey {self.survey.number})"
return f"Pixel {self.id} hpid {self.hpid} (Survey {self.survey.number})"
class CatalogSource(models.Model):
srcid = models.SmallIntegerField(primary_key=True)
name = models.CharField(max_length=21)
ra_deg = models.FloatField()
dec_deg = models.FloatField()
pos_error = models.FloatField()
significance = models.FloatField()
flux = models.FloatField()
flux_error = models.FloatField()
catalog_name = models.CharField(max_length=28)
new_xray = models.BooleanField(default=False)
source_type = models.CharField(max_length=13)

View File

@@ -2,5 +2,6 @@ astropy
numpy
healpy
scipy
mpmath
django
djangorestframework

View File

@@ -1,8 +1,3 @@
# uplim/serializers.py
from rest_framework import serializers
from uplim.models import Pixel
# class PixelSerializer(serializers.ModelSerializer):
# class Meta:
# model = Pixel
# fields = ('hpid', 'counts', 'exposure', 'contaminated')

View File

@@ -5,6 +5,6 @@ from .views import PixelAggregateView, UpperLimitView #, PixelDetailView
urlpatterns = [
# path('pixel/<int:hpid>/', PixelDetailView.as_view(), name='pixel-detail'),
path('pixel-aggregate/', PixelAggregateView.as_view(), name='pixel-aggregate'),
path('upper-limit/', UpperLimitView.as_view(), name='upper-limit'),
path("pixel-aggregate/", PixelAggregateView.as_view(), name="pixel-aggregate"),
path("upper-limit/", UpperLimitView.as_view(), name="upper-limit"),
]

455
views.py
View File

@@ -4,20 +4,49 @@
# search for pixels non-inclusively
import healpy as hp
import astropy.units as u
from astropy.coordinates import SkyCoord
import numpy as np
import scipy.special as sp
from astropy.coordinates import SkyCoord, Angle
from astropy.stats import poisson_conf_interval
from django.db.models import Sum
from collections import defaultdict
from django.db.models import (
Max,
Sum,
F,
IntegerField,
BooleanField,
ExpressionWrapper,
Case,
When,
Value,
)
from django.db.models.functions import Cast
from django.shortcuts import get_object_or_404
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from uplim.models import Pixel, CatalogSource
from django.shortcuts import get_object_or_404
# SANITIZE RESPONSE DATA BEFORE JSON CONVERSION FOR DEBUGGING NANS
# now NaNs are converted to 'null' beforehand
# ****************************************************************
from uplim.models import Pixel
def sanitize(obj):
if isinstance(obj, dict):
return {k: sanitize(v) for k, v in obj.items()}
if isinstance(obj, (list, tuple)):
return [sanitize(v) for v in obj]
# handle numpy scalars
if isinstance(obj, np.generic):
v = obj.item()
return None if (np.isnan(v) or np.isinf(v)) else v
if isinstance(obj, float):
return None if (np.isnan(obj) or np.isinf(obj)) else obj
return obj
# SURVEY PARAMETER PARSER
@@ -35,10 +64,11 @@ def parse_survey_param(raw):
return sorted(surveys)
# PIXEL VIEW (MOSTLY FOR TESTING)
# add healpix indices into the output
# **************************************************************
class PixelAggregateView(APIView):
def get(self, request):
@@ -52,7 +82,7 @@ class PixelAggregateView(APIView):
if raw_pixel is None or raw_survey is None:
return Response(
{"detail": "Both `pixel` and `survey` parameters are required."},
status=status.HTTP_400_BAD_REQUEST
status=status.HTTP_400_BAD_REQUEST,
)
# FILTER THE INPUTS
@@ -62,7 +92,7 @@ class PixelAggregateView(APIView):
except ValueError:
return Response(
{"detail": "`pixel` must be an integer."},
status=status.HTTP_400_BAD_REQUEST
status=status.HTTP_400_BAD_REQUEST,
)
try:
@@ -70,50 +100,33 @@ class PixelAggregateView(APIView):
except ValueError:
return Response(
{"detail": "Malformed `survey`; use N, N,M or N-M formats."},
status=status.HTTP_400_BAD_REQUEST
status=status.HTTP_400_BAD_REQUEST,
)
# FILTER AND AGGREGATE
# **************************************************************
qs = Pixel.objects.filter(
hpid=hpid,
survey__number__in=survey_numbers
)
qs = Pixel.objects.filter(hpid=hpid, survey__in=survey_numbers)
if not qs.exists():
# no matching pixel(s) → 404
get_object_or_404(Pixel, hpid=hpid, survey__number__in=survey_numbers)
get_object_or_404(Pixel, hpid=hpid, survey__in=survey_numbers)
result = qs.aggregate(
aggregates = qs.aggregate(
# pixel_hpid=hpid,
# survey_number=survey_numbers,
total_counts=Sum("counts"),
total_exposure=Sum("exposure")
total_exposure=Sum("exposure"),
)
plusdata = {"pixel_hpid": hpid, "surveys": survey_numbers}
result = {**aggregates, **plusdata}
# RETURN THE SUMS
# **************************************************************
return Response(result, status=status.HTTP_200_OK)
# class PixelDetailView(APIView):
# """
# API endpoint that returns the pixel data (counts, exposure, rate)
# for a given hpid.
# """
# def get(self, request, hpid):
# # Get the survey using the survey_number field.
# # survey = get_object_or_404(Survey, number=survey_number)
# # Get the pixel corresponding to the survey and hpid.
# pixel = get_object_or_404(Pixel, hpid=hpid)
# # Serialize the pixel data to JSON.
# serializer = PixelSerializer(pixel)
# return Response(serializer.data, status=status.HTTP_200_OK)
# UPPER LIMIT COMPUTATION VIEW
# **************************************************************
@@ -122,50 +135,37 @@ class UpperLimitView(APIView):
"""
Calculate confidence bounds based on aperture photometry using classic and bayesian methods
"""
def get(self, request):
# GET PARAMETERS FROM THE REQUEST
# **************************************************************
try:
ra = float(request.query_params.get('ra'))
dec = float(request.query_params.get('dec'))
confidence_level = float(request.query_params.get('cl'))
ra = float(request.query_params.get("ra"))
dec = float(request.query_params.get("dec"))
confidence_level = float(request.query_params.get("cl"))
except (TypeError, ValueError):
return Response(
{"error": "Invalud parameters, provide RA, DEC, and CL"},
status = status.HTTP_400_BAD_REQUEST
status=status.HTTP_400_BAD_REQUEST,
)
# ── NEW: pull & parse survey selection ──
raw_survey = request.query_params.get('survey')
# pull & parse survey selection
raw_survey = request.query_params.get("survey")
if raw_survey is None:
return Response(
{"error": "Missing required `survey` parameter (e.g. ?survey=1,3-5)"},
status=status.HTTP_400_BAD_REQUEST
status=status.HTTP_400_BAD_REQUEST,
)
try:
survey_numbers = parse_survey_param(raw_survey)
except ValueError:
return Response(
{"error": "Malformed `survey`; use formats like 1, 2-5, or 1,3-4"},
status=status.HTTP_400_BAD_REQUEST
status=status.HTTP_400_BAD_REQUEST,
)
# hp = HEALPix(
# nside = 4096,
# order = 'ring',
# frame = 'icrs'
# )
# CREATE SKYCOORD, CONVERT TO GALACTIC BECAUSE THE HEALPIX
# MAP ITSELF WAS MADE IN GALACTIC COORDINATES
# **************************************************************
src_coord = SkyCoord(
ra, dec, unit = 'deg', frame = 'icrs'
)
gal = src_coord.galactic
src_vec = hp.ang2vec(gal.l.deg, gal.b.deg, lonlat = True)
map_radius_value = request.query_params.get("mr")
# DEFINE APERTURE AND ANNULUS RADII
# **************************************************************
@@ -173,8 +173,16 @@ class UpperLimitView(APIView):
aperture_radius = 71 # radius of the aperture in arc seconds
# HPD ~48 arcseconds
# 90% ~100 arcseconds
annulus_inner = 142 # 2 * aperture_radius
annulus_outer = 284 # 4 * aperture_radius
annulus_inner = 213 # 3 * aperture_radius
annulus_outer = 355 # 5 * aperture_radius
# CREATE SKYCOORD, CONVERT TO GALACTIC BECAUSE THE HEALPIX
# MAP ITSELF WAS MADE IN GALACTIC COORDINATES
# **************************************************************
src_coord = SkyCoord(ra, dec, unit="deg", frame="icrs")
gal = src_coord.galactic
src_vec = hp.ang2vec(gal.l.deg, gal.b.deg, lonlat=True)
# FETCH PIXEL DATA DEFINED VIA HP.QUERY_DISC (INCLUSIVE=FALSE)
# **************************************************************
@@ -184,7 +192,7 @@ class UpperLimitView(APIView):
vec=src_vec,
inclusive=False,
nest=False,
radius = (aperture_radius * u.arcsecond).to(u.radian).value
radius=(aperture_radius * u.arcsecond).to(u.radian).value,
)
inner_pixel_list = hp.query_disc(
@@ -192,7 +200,7 @@ class UpperLimitView(APIView):
vec=src_vec,
inclusive=False,
nest=False,
radius = (annulus_inner * u.arcsecond).to(u.radian).value
radius=(annulus_inner * u.arcsecond).to(u.radian).value,
)
outer_pixel_list = hp.query_disc(
@@ -200,36 +208,41 @@ class UpperLimitView(APIView):
vec=src_vec,
inclusive=False,
nest=False,
radius = (annulus_outer * u.arcsecond).to(u.radian).value
radius=(annulus_outer * u.arcsecond).to(u.radian).value,
)
annulus_pixel_list = [
item for item in outer_pixel_list if item not in inner_pixel_list
]
source_pixels = Pixel.objects.filter(
hpid__in = source_pixel_list,
survey__number__in = survey_numbers
hpid__in=source_pixel_list, survey__in=survey_numbers
)
annulus_pixels = Pixel.objects.filter(
hpid__in = annulus_pixel_list,
survey__number__in = survey_numbers
hpid__in=annulus_pixel_list, survey__in=survey_numbers
)
if not source_pixels.exists() and not annulus_pixels.exists():
return Response(
{"detail": "No pixel data for the given survey selection."},
status=status.HTTP_404_NOT_FOUND
# check contamination
contamination = (
source_pixels.filter(contaminated=True).exists()
or annulus_pixels.filter(contaminated=True).exists()
)
# exclude contaminated pixels from the background and source regions
annulus_pixels = annulus_pixels.exclude(contaminated=True)
# source_pixels = source_pixels.exclude(contaminated=True)
status_int = 0
error_message = ""
if not source_pixels.exists() or not annulus_pixels.exists():
status_int = 1 # status 1 if there are no source or bg pixels
# COMPUTE COUNTS, BACKGROUND ESTIMATE, EXPOSURE
# **************************************************************
try:
# summing counts across all surveys
N = sum(obj.counts for obj in source_pixels)
Nnpix = len(source_pixels)
@@ -240,50 +253,83 @@ class UpperLimitView(APIView):
B = Bcounts / Bnpix * Nnpix
tsum = sum(obj.exposure for obj in source_pixels)
# create a dict of exposures keyed by survey
t_by_survey = defaultdict(list)
t = tsum / Nnpix
for pixel in source_pixels:
t_by_survey[pixel.survey].append(pixel.exposure)
# create and populate a list of average exposures per survey
survey_averages = []
for survey_id, exposures in t_by_survey.items():
average_exposure = sum(exposures) / len(exposures)
survey_averages.append(average_exposure)
# sum them up across surveys
t = sum(survey_averages)
# CONSTANTS
# **************************************************************
EEF = .9 # eclosed energy fraction, .5 for hpd, .9 for w90
ECF = 4e-11 # energy conversion factor
# EEF = .9 # eclosed energy fraction, .5 for hpd, .9 for w90
# ECF = 4e-11 # energy conversion factor
EEF = 0.80091 # use values from the paper
ECF = 3.3423184e-11
# BAYESIAN IMPLEMENTATION VIA POISSON_CONF_INTERVAL
# **************************************************************
# double sided interval
low, high = poisson_conf_interval(
n=N,
background=B,
interval = 'kraft-burrows-nousek',
confidence_level=confidence_level
interval="kraft-burrows-nousek",
confidence_level=confidence_level,
)
bayesian_count_ul = high
bayesian_count_ll = low
# because poisson_conf_interval returns lists
bayesian_count_ul = high[0]
bayesian_count_ll = low[0]
bayesian_count_UL = (
sp.gammaincinv(
N + 1,
confidence_level * sp.gammaincc(N + 1, B) + sp.gammainc(N + 1, B),
)
- B
)
bayesian_rate_ul = bayesian_count_ul / t / EEF # count rate limits
bayesian_rate_ll = bayesian_count_ll / t / EEF
bayesian_rate_UL = bayesian_count_UL / t / EEF
bayesian_flux_ul = bayesian_rate_ul * ECF # flux limits
bayesian_flux_ll = bayesian_rate_ll * ECF
bayesian_flux_UL = bayesian_rate_UL * ECF
# CLASSICAL IMPLEMENTATION VIA GAMMAINCCINV
# ****************************************************************
classic_count_ul = sp.gammainccinv(N+1, 1 - confidence_level) - B
classic_count_ll = sp.gammainccinv(N, confidence_level) - B
# confidence level for the double sided interval
cl2 = (1 + confidence_level) / 2
# double sided interval
classic_count_ul = sp.gammainccinv(N + 1, 1 - cl2) - B
classic_count_ll = sp.gammainccinv(N, cl2) - B
# one sided interval
classic_count_UL = sp.gammainccinv(N + 1, 1 - confidence_level) - B
classic_count_ll = max(classic_count_ll, 0)
if not np.isfinite(classic_count_ll) or classic_count_ll < 0:
classic_count_ll = 0.0
classic_rate_ul = classic_count_ul / t / EEF # count rate limits
classic_rate_ll = classic_count_ll / t / EEF
classic_rate_UL = classic_count_UL / t / EEF
classic_flux_ul = classic_rate_ul * ECF # flux limits
classic_flux_ll = classic_rate_ll * ECF
classic_flux_UL = classic_rate_UL * ECF
# FLUX ESTIMATION
# ****************************************************************
@@ -291,37 +337,224 @@ class UpperLimitView(APIView):
S = N - B # counts as simply counts within aperture
# with the background estimate subtracted
CR = S / t / EEF # count rate
CR = S / t / EEF # source count rate
BR = B / t # background rate within aperture
FL = CR * ECF # conversion to flux
Flux = max(FL, 0) # flux cannot be lower than zero
# NORMALIZED BACKGROUND RATE
# ****************************************************************
# in units of ct/s/keV/arcmin^2
return Response({
sr_to_arcmin2 = (180 / np.pi * 60) ** 2
'ClassicUpperLimit' : classic_count_ul,
'ClassicLowerLimit' : classic_count_ll,
'ClassicCountRateUpperLimit' : classic_rate_ul,
'ClassicCountRateLowerLimit' : classic_rate_ll,
'ClassicFluxUpperLimit' : classic_flux_ul,
'ClassicFluxLowerLimit' : classic_flux_ll,
pix_area = (
hp.nside2pixarea(nside=4096) * sr_to_arcmin2
) # nside2pixarea yields area in sr
'BayesianUpperLimit' : bayesian_count_ul,
'BayesianLowerLimit' : bayesian_count_ll,
'BayesianCountRateUpperLimit' : bayesian_rate_ul,
'BayesianCountRateLowerLimit' : bayesian_rate_ll,
'BayesianFluxUpperLimit' : bayesian_flux_ul,
'BayesianFluxLowerLimit' : bayesian_flux_ll,
bg_area = pix_area * Bnpix # get total bg region area
'FluxEstimate' : Flux,
en_range = 12 - 4
# 'N' : N,
# 'Nnpix' : Nnpix,
# 'Bcounts' : Bcounts,
# 'Bnpix' : Bnpix,
# 'B' : B,
# 'tsum' : tsum,
# 't' : t
NBR = Bcounts / t / en_range / bg_area
}, status=status.HTTP_200_OK)
# handle exceptions: write error text to the response
# zero out all math results
except Exception as e:
error_message = str(e)
N = Nnpix = Bcounts = Bnpix = B = t = S = CR = BR = 0.0
Flux = 0.0
classic_count_ul = classic_count_ll = classic_count_UL = 0.0
classic_rate_ul = classic_rate_ll = classic_rate_UL = 0.0
classic_flux_ul = classic_flux_ll = classic_flux_UL = 0.0
bayesian_count_ul = bayesian_count_ll = bayesian_count_UL = 0.0
bayesian_rate_ul = bayesian_rate_ll = bayesian_rate_UL = 0.0
bayesian_flux_ul = bayesian_flux_ll = bayesian_flux_UL = 0.0
# NEARBY SOURCES CHECK
# ****************************************************************
# if map_radius == 0:
# radius_as = 5 * aperture_radius
# else:
# radius_as = map_radius * 2
radius_deg = 3 # radius_as / 3600
dec_min = max(dec - radius_deg, -90)
dec_max = min(dec + radius_deg, 90)
# cheap belt query
belt_sources = CatalogSource.objects.filter(
dec_deg__gte=dec_min, dec_deg__lte=dec_max
)
center_coord = SkyCoord(ra, dec, unit="deg")
nearby_sources = []
radius_map = {0: 0.06, 125: 0.15, 250: 0.5, 2000: 0.9, 20000: 2.5}
sorted_bounds = sorted(radius_map.keys())
# refine belt to circular region using astropy separation
for catsrc in belt_sources:
catsrc_coord = SkyCoord(catsrc.ra_deg, catsrc.dec_deg, unit="deg")
separation = center_coord.separation(catsrc_coord)
if separation.deg > radius_deg:
continue
f = catsrc.flux or 0.0
for lb in reversed(sorted_bounds):
if f >= lb:
mask_radius = radius_map[lb] * 3600
break
nearby_sources.append(
{
"srcid": catsrc.srcid,
"name": catsrc.name,
"ra_deg": catsrc.ra_deg,
"dec_deg": catsrc.dec_deg,
"pos_error": catsrc.pos_error,
"significance": catsrc.significance,
"flux": catsrc.flux,
"flux_error": catsrc.flux_error,
"catalog_name": catsrc.catalog_name,
"new_xray": catsrc.new_xray,
"source_type": catsrc.source_type,
"mask_radius_as": mask_radius,
"separation_as": separation.arcsecond,
}
)
nearby_sources.sort(key=lambda src: src["separation_as"])
# REGION IMAGE SERVING
# ****************************************************************
# default value if not specified in the query
# get hpids within a circle of radius sqrt(2) * outer annulus radius
if map_radius_value is None:
map_radius = annulus_outer * np.sqrt(2)
else:
map_radius = float(map_radius_value)
map_pixel_list = hp.query_disc(
nside=4096,
vec=src_vec,
inclusive=False,
nest=False,
radius=(map_radius * u.arcsecond).to(u.radian).value,
)
# fetch those pixels for the requested surveys
# summing counts and sorting by hpid
map_pixels_qs = (
Pixel.objects.filter(hpid__in=map_pixel_list, survey__in=survey_numbers)
.values("hpid")
.annotate(counts=Sum("counts"))
.order_by("hpid")
)
# map_pixels_qs = (
# Pixel.objects.filter(hpid__in=map_pixel_list, survey__in=survey_numbers)
# .values("hpid")
# .annotate(
# total_counts=Sum("counts"),
# max_contaminated_int=Max(Cast("contaminated", IntegerField())),
# )
# .annotate(
# contaminated=Case(
# When(max_contaminated_int=1, then=Value(True)),
# default=Value(False),
# output_field=BooleanField(),
# )
# )
# .order_by("hpid")
# )
# turn the queryset to a list
map_pixels_list = list(map_pixels_qs)
# get lists of healpix indices and count values
map_healpix_list = [d["hpid"] for d in map_pixels_list]
map_counts_list = [d["counts"] for d in map_pixels_list]
# map_contaminated_list = [d["contaminated"] for d in map_pixels_list]
# cont_dict = dict(
# Pixel.objects.filter(hpid__in=map_healpix_list, survey__in=survey_numbers)
# .values_list("hpid", "contaminated")
# .distinct()
# )
# map_contaminated_list = [cont_dict[h] for h in map_healpix_list]
# set map nside
map_nside = 4096
# set map order
map_order = "ring"
# assemble the result dict
map_dict = {
"healpix": map_healpix_list,
"counts": map_counts_list,
# "contaminated": map_contaminated_list,
"nside": map_nside,
"order": map_order,
"radius_as": map_radius,
}
# RESULT JSON
# ****************************************************************
result = {
"Status": status_int,
# 0 OK
# 1 either source or bg pixels missing
"ErrorMessage": error_message,
# frequentist limits
"ClassicUpperLimit": classic_count_ul,
"ClassicLowerLimit": classic_count_ll,
"ClassicOneSideUL": classic_count_UL,
"ClassicCountRateUpperLimit": classic_rate_ul,
"ClassicCountRateLowerLimit": classic_rate_ll,
"ClassicCountRateOneSideUL": classic_rate_UL,
"ClassicFluxUpperLimit": classic_flux_ul,
"ClassicFluxLowerLimit": classic_flux_ll,
"ClassicFluxOneSideUL": classic_flux_UL,
# bayesian limits
"BayesianUpperLimit": bayesian_count_ul,
"BayesianLowerLimit": bayesian_count_ll,
"BayesianOneSideUL": bayesian_count_UL,
"BayesianCountRateUpperLimit": bayesian_rate_ul,
"BayesianCountRateLowerLimit": bayesian_rate_ll,
"BayesianCountRateOneSideUL": bayesian_rate_UL,
"BayesianFluxUpperLimit": bayesian_flux_ul,
"BayesianFluxLowerLimit": bayesian_flux_ll,
"BayesianFluxOneSideUL": bayesian_flux_UL,
# flux 'center value' estimate
"FluxEstimate": Flux,
# raw data
"ApertureCounts": N,
"ApertureBackgroundCounts": B,
"SourceCounts": S,
"Exposure": t,
# count rates
"SourceRate": CR,
"BackgroundRate": BR,
"NormalizedBackgroundRate": NBR, # ct/s/keV/arcmin2
# contamination
"Contamination": contamination,
"NearbySources": nearby_sources,
# count map for the frontend image
"CountMap": map_dict,
}
clean = sanitize(result) # calling sanitize() to convert NaN to null
return Response(clean, status=status.HTTP_200_OK)