Compare commits

...

4 Commits

Author SHA1 Message Date
641c9421b2 update 2025-05-08 15:01:07 +03:00
04b881b5b6 update 2025-05-08 15:00:16 +03:00
e28236aca5 update 2025-05-08 14:55:40 +03:00
84c339d556 update 2025-05-08 14:42:39 +03:00
10 changed files with 442 additions and 85 deletions

View File

@ -1,3 +1,48 @@
# axc_ul # axc_ul
ART-XC upper limits django application ART-XC upper limits django application
## Management commands
- load_survey
Allows to populate the database with data for a survey in healpix format. Takes about 100 minutes for an nside=4096 map to load using sqlite. Supports resuming after interruption.
Syntax: python manage.py load_survey --counts='*path_to_counts_map*' --exposure='*path_to_exposure_map*' --survey=*integer survey number*
## Models (models.py)
- Survey:
- survey number
- Pixel:
- survey foreign key
- hpid (healpix pixel id)
- counts
- exposure
- contamination flag
## Views (views.py)
**Before public deployment views providing direct pixel data should be altered to not include it in the response json.**
- PixelAggregateView:
For testing, outputs the aggregated data across specified surveys for a healpix pixel.
Returns:
Sum of counts and exposure over specified surveys for the specified pixel.
URL: /api/pixel-aggregate/?pixel=123&survey=1
(/api/pixel-aggregate/?pixel=123&survey=1,3,5)
(/api/pixel-aggregate/?pixel=123&survey=1-5)
- UpperLimitView:
The view providing upper limit calculation.
Returns:
Upper and lower limits in units of counts, count rate, and physical flux for Bayesian and classic approaches, as well as a count rate estimate and aperture photometry data (counts in the aperture, background estimate, etc).
URL: /api/upper-limit/?ra=10.0&dec=10.0&cl=.95&survey=1 (or list 1,3,5 or hyphenated range 1-5).

View File

@ -1,12 +1,20 @@
# axc_ul/management/commands/load_survey.py
import numpy as np import numpy as np
from astropy.io import fits from astropy.io import fits
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from django.db import transaction from django.db import transaction
from axc_ul.models import Pixel from axc_ul.models import Pixel, Survey
from django.db.models import Max
from itertools import islice from itertools import islice
from datetime import datetime
# DEFINE BATCH SIZE AND BATCH
# **************************************************************
BATCH_SIZE = 1000000 BATCH_SIZE = 1000000
def batch(iterable, size): def batch(iterable, size):
@ -21,9 +29,14 @@ def batch(iterable, size):
yield chunk yield chunk
class Command(BaseCommand): class Command(BaseCommand):
help = "Process FITS files and store the data in the database" help = "Process FITS files and store the data in the database"
# COMMAND LINE ARGUMENTS
# **************************************************************
def add_arguments(self, parser): def add_arguments(self, parser):
parser.add_argument( parser.add_argument(
'--counts', '--counts',
@ -37,21 +50,29 @@ class Command(BaseCommand):
required=True, required=True,
help='Path of the exposure file' help='Path of the exposure file'
) )
# parser.add_argument( parser.add_argument(
# '--survey_number', '--survey_number',
# type=int, type=int,
# required=True, required=True,
# help='Integer ID of the survey being read' help='Integer ID of the survey being read'
# ) )
def handle(self, *args, **options): def handle(self, *args, **options):
# GET FILENAMES FROM ARGUMENTS
# **************************************************************
counts_file = options['counts'] counts_file = options['counts']
exposure_file = options['exposure'] exposure_file = options['exposure']
# survey_number = options['survey_number'] survey_number = options['survey_number']
self.stdout.write(f"Counts file: {counts_file}") self.stdout.write(f"\nCounts file:\t{counts_file}")
self.stdout.write(f"Exposure file: {exposure_file}") self.stdout.write(f"Exposure file:\t{exposure_file}")
# OPEN BOTH FILES, RAVEL EACH
# **************************************************************
with fits.open(counts_file) as hdul: with fits.open(counts_file) as hdul:
@ -68,43 +89,62 @@ class Command(BaseCommand):
exposure_data = exposure_map.ravel() exposure_data = exposure_map.ravel()
# COMPARE DATA SHAPES, ENSURE THEY'RE THE SAME
# **************************************************************
self.stdout.write(f"\nCounts Data Shape:\t{counts_data.shape}")
self.stdout.write(f"Counts Data Shape: {counts_data.shape}") self.stdout.write(f"Exposure Data Shape:\t{exposure_data.shape}")
self.stdout.write(f"Exposure Data Shape: {exposure_data.shape}")
total_pixels = counts_data.shape[0]
self.stdout.write(f"\nTotal pixels to insert:\t{total_pixels}")
assert counts_data.shape == exposure_data.shape, "Counts and exposure maps must have the same shape" assert counts_data.shape == exposure_data.shape, "Counts and exposure maps must have the same shape"
#rate_data = np.divide(counts_data, exposure_data) # CREATE THE SURVEY IF IT DOES NOT EXIST
# **************************************************************
# with transaction.atomic(): with transaction.atomic():
# survey,created = Survey.objects.get_or_create(number=survey_number) survey,created = Survey.objects.get_or_create(number=survey_number)
# if created: if created:
# self.stdout.write(f"Created a new survey instance with number: {survey.number}") self.stdout.write(f"Created a new survey instance with number: {survey.number}")
# else: else:
# self.stdout.write(f"Using existing survey instance with the number: {survey.number}") self.stdout.write(f"Using existing survey instance with the number: {survey.number}")
# FETCH THE LAST PROCESSED HPID AND CONTINUE FROM IT
# **************************************************************
last_hpid = (
Pixel.objects
.filter(survey=survey)
.aggregate(max_hpid=Max('hpid'))['max_hpid']
or -1
)
start_index = last_hpid + 1
# Create a generator that yields Pixel objects one by one.
pixel_generator = ( pixel_generator = (
Pixel( Pixel(
hpid=i, hpid=i,
counts=int(count), counts=int(count),
exposure=float(exposure), exposure=float(exposure),
#rate=float(rate), survey=survey
#survey=survey
) )
for i, (count, exposure) in enumerate(zip(counts_data, exposure_data)) for i, (count, exposure) in enumerate(zip(counts_data, exposure_data))
if i >= start_index
) )
total_inserted = 0
# Process the generator in batches. total_inserted = start_index
# Process in batches
for pixel_batch in batch(pixel_generator, BATCH_SIZE): for pixel_batch in batch(pixel_generator, BATCH_SIZE):
with transaction.atomic(): with transaction.atomic():
Pixel.objects.bulk_create(pixel_batch) Pixel.objects.bulk_create(pixel_batch)
total_inserted += len(pixel_batch) total_inserted += len(pixel_batch)
self.stdout.write(f"Inserted {total_inserted} pixels") percentage = total_inserted / total_pixels * 100
timestamp = datetime.now().strftime("%H:%M:%S")
self.stdout.write(
f"[{timestamp}] {percentage:.2f}% inserted"
)
self.stdout.write(f"Inserted a total of {total_inserted} pixels.") self.stdout.write(f"Inserted a total of {total_inserted} pixels.")

View File

@ -0,0 +1,26 @@
# Generated by Django 5.1.6 on 2025-05-07 08:58
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('axc_ul', '0006_pixel_contaminated'),
]
operations = [
migrations.CreateModel(
name='Survey',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('number', models.IntegerField(unique=True)),
],
),
migrations.AddField(
model_name='pixel',
name='survey',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, related_name='pixels', to='axc_ul.survey'),
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 5.1.6 on 2025-05-08 11:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('axc_ul', '0007_survey_pixel_survey'),
]
operations = [
migrations.AddField(
model_name='survey',
name='nside',
field=models.IntegerField(default=4096),
),
]

View File

@ -0,0 +1,17 @@
# Generated by Django 5.1.6 on 2025-05-08 11:26
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('axc_ul', '0008_survey_nside'),
]
operations = [
migrations.RemoveField(
model_name='survey',
name='nside',
),
]

View File

@ -1,9 +1,37 @@
# axc_ul/models.py
from django.db import models from django.db import models
from django.db.models import UniqueConstraint
class Survey(models.Model):
number = models.IntegerField(unique=True)
# nside = models.IntegerField(
# default=4096
# )
def __str__(self):
return f"Survey {self.number} of NSIDE {self.nside}"
class Pixel(models.Model): class Pixel(models.Model):
id = models.AutoField(primary_key=True) id = models.AutoField(primary_key=True)
survey = models.ForeignKey(
Survey,
on_delete=models.CASCADE,
related_name='pixels',
default=0
)
hpid = models.IntegerField(db_index=True) hpid = models.IntegerField(db_index=True)
counts = models.IntegerField() counts = models.IntegerField()
@ -12,3 +40,10 @@ class Pixel(models.Model):
contaminated = models.BooleanField(default=False) contaminated = models.BooleanField(default=False)
class Meta:
constraints = [
UniqueConstraint(fields=['survey', 'hpid'], name='unique_hpid_per_survey'),
]
def __str__(self):
return f"Pixel {self.id} (Survey {self.survey.number})"

6
requirements.txt Normal file
View File

@ -0,0 +1,6 @@
astropy
numpy
healpy
scipy
django
djangorestframework

View File

@ -1,8 +1,8 @@
# serializers.py # axc_ul/serializers.py
from rest_framework import serializers from rest_framework import serializers
from axc_ul.models import Pixel from axc_ul.models import Pixel
class PixelSerializer(serializers.ModelSerializer): # class PixelSerializer(serializers.ModelSerializer):
class Meta: # class Meta:
model = Pixel # model = Pixel
fields = ('hpid', 'counts', 'exposure', 'contaminated') # fields = ('hpid', 'counts', 'exposure', 'contaminated')

10
urls.py
View File

@ -1,8 +1,10 @@
# urls.py # axc_ul/urls.py
from django.urls import path from django.urls import path
from .views import PixelDetailView, UpperLimitView from .views import PixelAggregateView, UpperLimitView #, PixelDetailView
urlpatterns = [ urlpatterns = [
path('pixel/<int:hpid>/', PixelDetailView.as_view(), name='pixel-detail'), #path('pixel/<int:hpid>/', PixelDetailView.as_view(), name='pixel-detail'),
path('upper-limit/', UpperLimitView.as_view(), name='upper-limit') path('pixel-aggregate/', PixelAggregateView.as_view(), name='pixel-aggregate'),
path('upper-limit/', UpperLimitView.as_view(), name='upper-limit'),
] ]

266
views.py
View File

@ -7,7 +7,9 @@ import astropy.units as u
from astropy.coordinates import SkyCoord from astropy.coordinates import SkyCoord
import scipy.special as sp import scipy.special as sp
from astropy.stats import poisson_conf_interval
from django.db.models import Sum
from rest_framework.views import APIView from rest_framework.views import APIView
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework import status from rest_framework import status
@ -15,41 +17,114 @@ from rest_framework import status
from django.shortcuts import get_object_or_404 from django.shortcuts import get_object_or_404
from axc_ul.models import Pixel from axc_ul.models import Pixel
#from axc_ul_server.axc_ul.serializers import PixelSerializer
from .serializers import PixelSerializer
class PixelDetailView(APIView): # SURVEY PARAMETER PARSER
""" # **************************************************************
API endpoint that returns the pixel data (counts, exposure, rate)
for a given survey number and hpid. def parse_survey_param(raw: str) -> list[int]:
""" surveys = set()
def get(self, request, hpid): for part in raw.split(","):
# Get the survey using the survey_number field. if "-" in part:
# survey = get_object_or_404(Survey, number=survey_number) start, end = part.split("-", 1)
surveys.update(range(int(start), int(end) + 1))
else:
surveys.add(int(part))
return sorted(surveys)
# PIXEL VIEW (MOSTLY FOR TESTING)
# **************************************************************
class PixelAggregateView(APIView):
def get(self, request):
# GET PARAMETERS FROM THE QUERY
# **************************************************************
raw_pixel = request.query_params.get("pixel")
raw_survey = request.query_params.get("survey")
# 400 BADREQUEST
# **************************************************************
if raw_pixel is None or raw_survey is None:
return Response(
{"detail": "Both `pixel` and `survey` parameters are required."},
status=status.HTTP_400_BAD_REQUEST
)
# FILTER THE INPUTS
# **************************************************************
try:
hpid = int(raw_pixel)
except ValueError:
return Response(
{"detail": "`pixel` must be an integer."},
status=status.HTTP_400_BAD_REQUEST
)
try:
survey_numbers = parse_survey_param(raw_survey)
except ValueError:
return Response(
{"detail": "Malformed `survey`; use N, N,M or N-M formats."},
status=status.HTTP_400_BAD_REQUEST
)
# FILTER AND AGGREGATE
# **************************************************************
qs = Pixel.objects.filter(
hpid=hpid,
survey__number__in=survey_numbers
)
if not qs.exists():
# no matching pixel(s) → 404
get_object_or_404(Pixel, hpid=hpid, survey__number__in=survey_numbers)
result = qs.aggregate(
total_counts=Sum("counts"),
total_exposure=Sum("exposure")
)
# RETURN THE SUMS
# **************************************************************
return Response(result, status=status.HTTP_200_OK)
# class PixelDetailView(APIView):
# """
# API endpoint that returns the pixel data (counts, exposure, rate)
# for a given hpid.
# """
# def get(self, request, hpid):
# # Get the survey using the survey_number field.
# # survey = get_object_or_404(Survey, number=survey_number)
# Get the pixel corresponding to the survey and hpid. # # Get the pixel corresponding to the survey and hpid.
pixel = get_object_or_404(Pixel, hpid=hpid) # pixel = get_object_or_404(Pixel, hpid=hpid)
# Serialize the pixel data to JSON. # # Serialize the pixel data to JSON.
serializer = PixelSerializer(pixel) # serializer = PixelSerializer(pixel)
return Response(serializer.data, status=status.HTTP_200_OK) # return Response(serializer.data, status=status.HTTP_200_OK)
# UPPER LIMIT COMPUTATION VIEW
# **************************************************************
class UpperLimitView(APIView): class UpperLimitView(APIView):
""" """
Calculate and return upper limits (UL, CRUL, FLUL) based on provided RA, DEC, and CL parameters. Calculate confidence bounds based on aperture photometry using classic and bayesian methods
Args:
request: HTTP GET request containing 'ra', 'dec', and 'cl' query parameters.
Returns:
Response object with JSON data containing calculated UL, CRUL, and FLUL values.
Raises:
ValueError if provided parameters are invalid.
""" """
def get(self, request): def get(self, request):
# GET PARAMETERS FROM THE REQUEST
# **************************************************************
try: try:
ra = float(request.query_params.get('ra')) ra = float(request.query_params.get('ra'))
dec = float(request.query_params.get('dec')) dec = float(request.query_params.get('dec'))
@ -59,24 +134,48 @@ class UpperLimitView(APIView):
{"error": "Invalud parameters, provide RA, DEC, and CL"}, {"error": "Invalud parameters, provide RA, DEC, and CL"},
status = status.HTTP_400_BAD_REQUEST status = status.HTTP_400_BAD_REQUEST
) )
# ── NEW: pull & parse survey selection ──
raw_survey = request.query_params.get('survey')
if raw_survey is None:
return Response(
{"error": "Missing required `survey` parameter (e.g. ?survey=1,3-5)"},
status=status.HTTP_400_BAD_REQUEST
)
try:
survey_numbers = parse_survey_param(raw_survey)
except ValueError:
return Response(
{"error": "Malformed `survey`; use formats like 1, 2-5, or 1,3-4"},
status=status.HTTP_400_BAD_REQUEST
)
# hp = HEALPix( # hp = HEALPix(
# nside = 4096, # nside = 4096,
# order = 'ring', # order = 'ring',
# frame = 'icrs' # frame = 'icrs'
# ) # )
# CREATE SKYCOORD, CONVERT TO GALACTIC BECAUSE THE HEALPIX
# MAP ITSELF WAS MADE IN GALACTIC COORDINATES
# **************************************************************
src_coord = SkyCoord( src_coord = SkyCoord(
ra, dec, unit = 'deg', frame = 'icrs' ra, dec, unit = 'deg', frame = 'icrs'
) )
gal = src_coord.galactic gal = src_coord.galactic
src_vec = hp.ang2vec(gal.l.deg, gal.b.deg, lonlat = True) src_vec = hp.ang2vec(gal.l.deg, gal.b.deg, lonlat = True)
aperture_radius = 50 # radius of the aperture in arc seconds # DEFINE APERTURE AND ANNULUS RADII
# **************************************************************
aperture_radius = 71 # radius of the aperture in arc seconds
# HPD ~48 arcseconds # HPD ~48 arcseconds
# 90% ~100 arcseconds # 90% ~100 arcseconds
annulus_inner = 100 # 2 * aperture_radius annulus_inner = 142 # 2 * aperture_radius
annulus_outer = 200 # 4 * aperture_radius annulus_outer = 284 # 4 * aperture_radius
# FETCH PIXEL DATA DEFINED VIA HP.QUERY_DISC (INCLUSIVE=FALSE)
# **************************************************************
source_pixel_list = hp.query_disc( source_pixel_list = hp.query_disc(
nside = 4096, nside = 4096,
@ -108,10 +207,26 @@ class UpperLimitView(APIView):
] ]
source_pixels = Pixel.objects.filter(hpid__in = source_pixel_list) source_pixels = Pixel.objects.filter(
hpid__in = source_pixel_list,
survey__number__in = survey_numbers
)
annulus_pixels = Pixel.objects.filter(hpid__in = annulus_pixel_list) annulus_pixels = Pixel.objects.filter(
hpid__in = annulus_pixel_list,
survey__number__in = survey_numbers
)
if not source_pixels.exists() and not annulus_pixels.exists():
return Response(
{"detail": "No pixel data for the given survey selection."},
status=status.HTTP_404_NOT_FOUND
)
# COMPUTE COUNTS, BACKGROUND ESTIMATE, EXPOSURE
# **************************************************************
N = sum(obj.counts for obj in source_pixels) N = sum(obj.counts for obj in source_pixels)
@ -127,31 +242,84 @@ class UpperLimitView(APIView):
t = tsum / Nnpix t = tsum / Nnpix
# CONSTANTS
# **************************************************************
EEF = .9 # eclosed energy fraction, .5 for hpd, .9 for w90 EEF = .9 # eclosed energy fraction, .5 for hpd, .9 for w90
ECF = 4e-11 # energy conversion factor ECF = 4e-11 # energy conversion factor
UL = (
sp.gammaincinv(
N + 1,
confidence_level * sp.gammaincc(N + 1, B) + sp.gammainc(N + 1, B)
) - B
) # count upper limit
# BAYESIAN IMPLEMENTATION VIA POISSON_CONF_INTERVAL
CRUL = UL / t / EEF # count rate upper limit # **************************************************************
low, high = poisson_conf_interval(
n = N,
background = B,
interval = 'kraft-burrows-nousek',
confidence_level=confidence_level
)
bayesian_count_ul = high
bayesian_count_ll = low
bayesian_rate_ul = bayesian_count_ul / t / EEF # count rate limits
bayesian_rate_ll = bayesian_count_ll / t / EEF
bayesian_flux_ul = bayesian_rate_ul * ECF # flux limits
bayesian_flux_ll = bayesian_rate_ll * ECF
# CLASSICAL IMPLEMENTATION VIA GAMMAINCCINV
# ****************************************************************
classic_count_ul = sp.gammainccinv(N+1, 1 - confidence_level) - B
classic_count_ll = sp.gammainccinv(N, confidence_level) - B
classic_count_ll = max(classic_count_ll, 0)
classic_rate_ul = classic_count_ul / t / EEF # count rate limits
classic_rate_ll = classic_count_ll / t / EEF
classic_flux_ul = classic_rate_ul * ECF # flux limits
classic_flux_ll = classic_rate_ll * ECF
# FLUX ESTIMATION
# ****************************************************************
S = N - B # counts as simply counts within aperture
# with the background estimate subtracted
CR = S / t / EEF # count rate
FL = CR * ECF # conversion to flux
Flux = max(FL, 0) # flux cannot be lower than zero
FLUL = CRUL * ECF # flux upper limit
return Response({ return Response({
'UpperLimit' : UL,
'CountRateUpperLimit' : CRUL, 'ClassicUpperLimit' : classic_count_ul,
'FluxUpperLimit' : FLUL, 'ClassicLowerLimit' : classic_count_ll,
'N' : N, 'ClassicCountRateUpperLimit' : classic_rate_ul,
'Nnpix' : Nnpix, 'ClassicCountRateLowerLimit' : classic_rate_ll,
'Bcounts' : Bcounts, 'ClassicFluxUpperLimit' : classic_flux_ul,
'Bnpix' : Bnpix, 'ClassicFluxLowerLimit' : classic_flux_ll,
'B' : B,
'tsum' : tsum, 'BayesianUpperLimit' : bayesian_count_ul,
't' : t 'BayesianLowerLimit' : bayesian_count_ll,
'BayesianCountRateUpperLimit' : bayesian_rate_ul,
'BayesianCountRateLowerLimit' : bayesian_rate_ll,
'BayesianFluxUpperLimit' : bayesian_flux_ul,
'BayesianFluxLowerLimit' : bayesian_flux_ll,
'FluxEstimate' : Flux,
# 'N' : N,
# 'Nnpix' : Nnpix,
# 'Bcounts' : Bcounts,
# 'Bnpix' : Bnpix,
# 'B' : B,
# 'tsum' : tsum,
# 't' : t
}, status=status.HTTP_200_OK) }, status=status.HTTP_200_OK)