code formatting

This commit is contained in:
Никита Тырин 2025-05-19 15:09:04 +03:00
parent cf3213a0f9
commit 26f848d274
6 changed files with 373 additions and 417 deletions

View File

@ -1,22 +1,22 @@
# uplim/management/commands/load_survey.py
import numpy as np
from astropy.io import fits
from itertools import islice
from datetime import datetime
from django.core.management.base import BaseCommand
from django.db import transaction
from uplim.models import Pixel
from django.db.models import Max
from itertools import islice
from datetime import datetime
# DEFINE BATCH SIZE AND BATCH
# **************************************************************
# BATCH_SIZE = 1000000
def batch(iterable, size):
"""
Generator that yields successive chunks of size 'size' from 'iterable'.
@ -29,8 +29,6 @@ def batch(iterable, size):
yield chunk
class Command(BaseCommand):
help = "Process FITS files and store the data in the database"
@ -39,40 +37,33 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'--counts',
type=str,
required=True,
help='Path of the counts file'
"--counts", type=str, required=True, help="Path of the counts file"
)
parser.add_argument(
'--exposure',
type=str,
required=True,
help='Path of the exposure file'
"--exposure", type=str, required=True, help="Path of the exposure file"
)
parser.add_argument(
'--survey_number',
"--survey_number",
type=int,
required=True,
help='Integer ID of the survey being read'
help="Integer ID of the survey being read",
)
parser.add_argument(
'--batch_size',
"--batch_size",
type=int,
default=1000,
help='Integer number of pixels to be inserted into the database at once'
help="Integer number of pixels to be inserted into the database at once",
)
def handle(self, *args, **options):
# GET FILENAMES FROM ARGUMENTS
# **************************************************************
counts_file = options['counts']
exposure_file = options['exposure']
survey_number = options['survey_number']
BATCH_SIZE = options['batch_size']
counts_file = options["counts"]
exposure_file = options["exposure"]
survey_number = options["survey_number"]
BATCH_SIZE = options["batch_size"]
self.stdout.write(f"\nCounts file:\t{counts_file}")
self.stdout.write(f"Exposure file:\t{exposure_file}")
@ -87,7 +78,6 @@ class Command(BaseCommand):
counts_data = counts_map.ravel()
with fits.open(exposure_file) as hdul:
column_name = "T"
@ -104,7 +94,9 @@ class Command(BaseCommand):
total_pixels = counts_data.shape[0]
self.stdout.write(f"\nTotal pixels to insert:\t{total_pixels}")
assert counts_data.shape == exposure_data.shape, "Counts and exposure maps must have the same shape"
assert (
counts_data.shape == exposure_data.shape
), "Counts and exposure maps must have the same shape"
# CREATE THE SURVEY IF IT DOES NOT EXIST
# **************************************************************
@ -122,27 +114,24 @@ class Command(BaseCommand):
# **************************************************************
last_hpid = (
Pixel.objects
.filter(survey=survey_number)
.aggregate(max_hpid=Max('hpid'))['max_hpid']
Pixel.objects.filter(survey=survey_number).aggregate(max_hpid=Max("hpid"))[
"max_hpid"
]
or -1
)
start_index = last_hpid + 1
pixel_generator = (
Pixel(
hpid=i,
counts=int(count),
exposure=float(exposure),
survey=survey_number
survey=survey_number,
)
for i, (count, exposure) in enumerate(zip(counts_data, exposure_data))
if i >= start_index
)
total_inserted = start_index
# Process in batches
for pixel_batch in batch(pixel_generator, BATCH_SIZE):
@ -151,8 +140,6 @@ class Command(BaseCommand):
total_inserted += len(pixel_batch)
percentage = total_inserted / total_pixels * 100
timestamp = datetime.now().strftime("%H:%M:%S")
self.stdout.write(
f"[{timestamp}] {percentage:.2f}% inserted"
)
self.stdout.write(f"[{timestamp}] {percentage:.2f}% inserted")
self.stdout.write(f"Inserted a total of {total_inserted} pixels.")

View File

@ -7,20 +7,20 @@
from django.core.management.base import BaseCommand
from django.db import transaction
from uplim.models import Pixel, CatalogSource
import pandas as pd
import healpy as hp
import numpy as np
from astropy.coordinates import SkyCoord
from uplim.models import Pixel, CatalogSource
from itertools import islice
from datetime import datetime
BATCH_SIZE = 900
def batch(iterable, size):
iterable = iter(iterable)
while True:
@ -39,10 +39,7 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'--catalog',
type=str,
required=False,
help='Path to the catalog.dat file'
"--catalog", type=str, required=False, help="Path to the catalog.dat file"
)
# parser.add_argument(
@ -53,10 +50,10 @@ class Command(BaseCommand):
# )
parser.add_argument(
'--reset',
action='store_true',
"--reset",
action="store_true",
default=False,
help='Reset the contamination flag across all pixels back to False.'
help="Reset the contamination flag across all pixels back to False.",
)
def handle(self, *args, **options):
@ -64,7 +61,7 @@ class Command(BaseCommand):
# RESET BEHAVIOR: SET CONTAMINATION FLAG TO FALSE FOR ALL PIXELS
# **************************************************************
if options['reset']:
if options["reset"]:
self.stdout.write("Resetting the contamination flag...")
@ -73,11 +70,11 @@ class Command(BaseCommand):
self.stdout.write("Done")
return
if not options['catalog']:
if not options["catalog"]:
self.stdout.write("No catalog file provided, exiting")
return
catalog_file = options['catalog']
catalog_file = options["catalog"]
self.stdout.write(f"Catalog file:\t{catalog_file}")
@ -96,60 +93,67 @@ class Command(BaseCommand):
(77, 89), # e_Flux (78-89)
(90, 118), # CName (91-118)
(119, 120), # NewXray (120)
(121, 134) # Type (122-134)
(121, 134), # Type (122-134)
]
# Define column names
colnames = [
"SrcID", "Name", "RAdeg", "DEdeg", "ePos", "Signi", "Flux",
"e_Flux", "CName", "NewXray", "Type"
"SrcID",
"Name",
"RAdeg",
"DEdeg",
"ePos",
"Signi",
"Flux",
"e_Flux",
"CName",
"NewXray",
"Type",
]
# Read the file using the fixed-width format
catalog = pd.read_fwf(catalog_file, colspecs=colspecs, names=colnames)
for col in ['Name', 'CName', 'Type']:
catalog[col] = catalog[col].fillna('')
for col in ["Name", "CName", "Type"]:
catalog[col] = catalog[col].fillna("")
self.stdout.write(str(catalog.head()))
# LOAD THE CATALOG INTO THE DATABASE
# **********************************
existing_srcids = set(
CatalogSource.objects.values_list('srcid', flat=True)
)
existing_srcids = set(CatalogSource.objects.values_list("srcid", flat=True))
to_create = []
for _, row in catalog.iterrows():
srcid = int(row['SrcID'])
srcid = int(row["SrcID"])
if srcid in existing_srcids:
continue
to_create.append(
CatalogSource(
srcid=srcid,
name = row['Name'].strip(),
ra_deg = float(row['RAdeg']),
dec_deg = float(row['DEdeg']),
pos_error = float(row['ePos']),
significance = float(row['Signi']),
flux = float(row['Flux']),
flux_error = float(row['e_Flux']),
catalog_name = row['CName'].strip(),
new_xray = bool(int(row['NewXray'])),
source_type = row['Type'].strip()
name=row["Name"].strip(),
ra_deg=float(row["RAdeg"]),
dec_deg=float(row["DEdeg"]),
pos_error=float(row["ePos"]),
significance=float(row["Signi"]),
flux=float(row["Flux"]),
flux_error=float(row["e_Flux"]),
catalog_name=row["CName"].strip(),
new_xray=bool(int(row["NewXray"])),
source_type=row["Type"].strip(),
)
)
if to_create:
self.stdout.write(f'Inserting {len(to_create)} new catalog rows.')
self.stdout.write(f"Inserting {len(to_create)} new catalog rows.")
for chunk in batch(to_create, BATCH_SIZE):
CatalogSource.objects.bulk_create(chunk, ignore_conflicts=True)
self.stdout.write('Catalog update complete.')
self.stdout.write("Catalog update complete.")
else:
self.stdout.write('All catalog rows already exist in the database.')
self.stdout.write("All catalog rows already exist in the database.")
# hard coded nside and flux-radius mapping
# maybe change that
@ -158,27 +162,31 @@ class Command(BaseCommand):
npix = hp.nside2npix(nside)
flux_bins = [0, 125, 250, 2000, 20000, np.inf] # define bin edges
mask_radii_deg = [ 0.06, 0.15, 0.5, 0.9, 2.5 ] # corresponding mask radii in degrees
mask_radii_deg = [
0.06,
0.15,
0.5,
0.9,
2.5,
] # corresponding mask radii in degrees
# Convert mask radii from degrees to radians (required by query_disc)
mask_radii = [np.radians(r) for r in mask_radii_deg]
# Use pandas.cut to assign each source a bin index (0, 1, or 2)
catalog['flux_bin'] = pd.cut(catalog['Flux'], bins=flux_bins, labels=False)
catalog["flux_bin"] = pd.cut(catalog["Flux"], bins=flux_bins, labels=False)
# manually add and change some sources
manual_additions = pd.DataFrame(
[
{'RAdeg' : 279.9804336, 'DEdeg' : 5.0669542, 'flux_bin' : 3},
{'RAdeg' : 266.5173685, 'DEdeg' : -29.1252321, 'flux_bin' : 3},
{"RAdeg": 279.9804336, "DEdeg": 5.0669542, "flux_bin": 3},
{"RAdeg": 266.5173685, "DEdeg": -29.1252321, "flux_bin": 3},
]
)
catalog = pd.concat([catalog, manual_additions], ignore_index=True)
catalog.loc[catalog['SrcID'] == 1101, 'flux_bin'] = 2
catalog.loc[catalog["SrcID"] == 1101, "flux_bin"] = 2
mask_array = np.ones(npix, dtype=bool)
@ -189,17 +197,15 @@ class Command(BaseCommand):
# process each source in the catalog
for _, row in catalog.iterrows():
ra = row['RAdeg']
dec = row['DEdeg']
ra = row["RAdeg"]
dec = row["DEdeg"]
src_coord = SkyCoord(
ra, dec, unit = 'deg', frame = 'icrs'
)
src_coord = SkyCoord(ra, dec, unit="deg", frame="icrs")
gal = src_coord.galactic
ra, dec = gal.l.deg, gal.b.deg
flux_bin = row['flux_bin'] # 0, 1, or 2
flux_bin = row["flux_bin"] # 0, 1, or 2
# Get the corresponding mask radius (in radians) for this flux bin
radius = mask_radii[flux_bin]
@ -217,8 +223,6 @@ class Command(BaseCommand):
# Add the pixel indices to our set of masked pixels
masked_pixels_set.update(pix_indices)
# Convert the set of masked pixels to a sorted list.
masked_pixels_list = sorted(list(masked_pixels_set))
@ -226,15 +230,13 @@ class Command(BaseCommand):
self.stdout.write("\nList ready, updating the database...")
if not masked_pixels_list:
self.stdout.write("No pixels marked as contaminated, exiting.")
return
total = len(masked_pixels_list)
updated = 0
self.stdout.write(f'\nUpdating contaminated flag in batches of {BATCH_SIZE}')
self.stdout.write(f"\nUpdating contaminated flag in batches of {BATCH_SIZE}")
for chunk in batch(masked_pixels_list, BATCH_SIZE):
with transaction.atomic():
@ -244,6 +246,8 @@ class Command(BaseCommand):
percentage = updated / total * 100
timestamp = datetime.now().strftime("%H:%M:%S")
self.stdout.write(f'[{timestamp}] {updated}/{total} ({percentage:.1f}%) updated')
self.stdout.write(
f"[{timestamp}] {updated}/{total} ({percentage:.1f}%) updated"
)
self.stdout.write(f'\n Marked {updated} pixels as contaminated.')
self.stdout.write(f"\n Marked {updated} pixels as contaminated.")

View File

@ -4,8 +4,6 @@ from django.db import models
from django.db.models import UniqueConstraint
class Pixel(models.Model):
# id = models.AutoField(primary_key=True) # ~200 million pixels for a 4096 survey
@ -27,8 +25,6 @@ class Pixel(models.Model):
return f"Pixel {self.id} hpid {self.hpid} (Survey {self.survey.number})"
class CatalogSource(models.Model):
srcid = models.SmallIntegerField(primary_key=True)

View File

@ -1,8 +1,3 @@
# uplim/serializers.py
from rest_framework import serializers
from uplim.models import Pixel
# class PixelSerializer(serializers.ModelSerializer):
# class Meta:
# model = Pixel
# fields = ('hpid', 'counts', 'exposure', 'contaminated')

View File

@ -5,6 +5,6 @@ from .views import PixelAggregateView, UpperLimitView #, PixelDetailView
urlpatterns = [
# path('pixel/<int:hpid>/', PixelDetailView.as_view(), name='pixel-detail'),
path('pixel-aggregate/', PixelAggregateView.as_view(), name='pixel-aggregate'),
path('upper-limit/', UpperLimitView.as_view(), name='upper-limit'),
path("pixel-aggregate/", PixelAggregateView.as_view(), name="pixel-aggregate"),
path("upper-limit/", UpperLimitView.as_view(), name="upper-limit"),
]

188
views.py
View File

@ -4,25 +4,24 @@
# search for pixels non-inclusively
import healpy as hp
import astropy.units as u
from astropy.coordinates import SkyCoord, Angle
import numpy as np
import scipy.special as sp
from astropy.coordinates import SkyCoord, Angle
from astropy.stats import poisson_conf_interval
from django.db.models import Sum
from django.shortcuts import get_object_or_404
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from django.shortcuts import get_object_or_404
from uplim.models import Pixel, CatalogSource
# SANITIZE RESPONSE DATA BEFORE JSON CONVERSION FOR DEBUGGING NANS
# now NaNs are converted to 'null' beforehand
# ****************************************************************
def sanitize(obj):
if isinstance(obj, dict):
return {k: sanitize(v) for k, v in obj.items()}
@ -52,11 +51,11 @@ def parse_survey_param(raw):
return sorted(surveys)
# PIXEL VIEW (MOSTLY FOR TESTING)
# add healpix indices into the output
# **************************************************************
class PixelAggregateView(APIView):
def get(self, request):
@ -70,7 +69,7 @@ class PixelAggregateView(APIView):
if raw_pixel is None or raw_survey is None:
return Response(
{"detail": "Both `pixel` and `survey` parameters are required."},
status=status.HTTP_400_BAD_REQUEST
status=status.HTTP_400_BAD_REQUEST,
)
# FILTER THE INPUTS
@ -80,7 +79,7 @@ class PixelAggregateView(APIView):
except ValueError:
return Response(
{"detail": "`pixel` must be an integer."},
status=status.HTTP_400_BAD_REQUEST
status=status.HTTP_400_BAD_REQUEST,
)
try:
@ -88,35 +87,25 @@ class PixelAggregateView(APIView):
except ValueError:
return Response(
{"detail": "Malformed `survey`; use N, N,M or N-M formats."},
status=status.HTTP_400_BAD_REQUEST
status=status.HTTP_400_BAD_REQUEST,
)
# FILTER AND AGGREGATE
# **************************************************************
qs = Pixel.objects.filter(
hpid=hpid,
survey__in=survey_numbers
)
qs = Pixel.objects.filter(hpid=hpid, survey__in=survey_numbers)
if not qs.exists():
# no matching pixel(s) → 404
get_object_or_404(
Pixel,
hpid=hpid,
survey__in=survey_numbers
)
get_object_or_404(Pixel, hpid=hpid, survey__in=survey_numbers)
aggregates = qs.aggregate(
# pixel_hpid=hpid,
# survey_number=survey_numbers,
total_counts=Sum("counts"),
total_exposure=Sum("exposure")
total_exposure=Sum("exposure"),
)
plusdata = {
'pixel_hpid' : hpid,
'surveys' : survey_numbers
}
plusdata = {"pixel_hpid": hpid, "surveys": survey_numbers}
result = {**aggregates, **plusdata}
@ -125,7 +114,6 @@ class PixelAggregateView(APIView):
return Response(result, status=status.HTTP_200_OK)
# UPPER LIMIT COMPUTATION VIEW
# **************************************************************
@ -134,33 +122,34 @@ class UpperLimitView(APIView):
"""
Calculate confidence bounds based on aperture photometry using classic and bayesian methods
"""
def get(self, request):
# GET PARAMETERS FROM THE REQUEST
# **************************************************************
try:
ra = float(request.query_params.get('ra'))
dec = float(request.query_params.get('dec'))
confidence_level = float(request.query_params.get('cl'))
ra = float(request.query_params.get("ra"))
dec = float(request.query_params.get("dec"))
confidence_level = float(request.query_params.get("cl"))
except (TypeError, ValueError):
return Response(
{"error": "Invalud parameters, provide RA, DEC, and CL"},
status = status.HTTP_400_BAD_REQUEST
status=status.HTTP_400_BAD_REQUEST,
)
# ── NEW: pull & parse survey selection ──
raw_survey = request.query_params.get('survey')
raw_survey = request.query_params.get("survey")
if raw_survey is None:
return Response(
{"error": "Missing required `survey` parameter (e.g. ?survey=1,3-5)"},
status=status.HTTP_400_BAD_REQUEST
status=status.HTTP_400_BAD_REQUEST,
)
try:
survey_numbers = parse_survey_param(raw_survey)
except ValueError:
return Response(
{"error": "Malformed `survey`; use formats like 1, 2-5, or 1,3-4"},
status=status.HTTP_400_BAD_REQUEST
status=status.HTTP_400_BAD_REQUEST,
)
# hp = HEALPix(
@ -173,9 +162,7 @@ class UpperLimitView(APIView):
# MAP ITSELF WAS MADE IN GALACTIC COORDINATES
# **************************************************************
src_coord = SkyCoord(
ra, dec, unit = 'deg', frame = 'icrs'
)
src_coord = SkyCoord(ra, dec, unit="deg", frame="icrs")
gal = src_coord.galactic
src_vec = hp.ang2vec(gal.l.deg, gal.b.deg, lonlat=True)
@ -196,7 +183,7 @@ class UpperLimitView(APIView):
vec=src_vec,
inclusive=False,
nest=False,
radius = (aperture_radius * u.arcsecond).to(u.radian).value
radius=(aperture_radius * u.arcsecond).to(u.radian).value,
)
inner_pixel_list = hp.query_disc(
@ -204,7 +191,7 @@ class UpperLimitView(APIView):
vec=src_vec,
inclusive=False,
nest=False,
radius = (annulus_inner * u.arcsecond).to(u.radian).value
radius=(annulus_inner * u.arcsecond).to(u.radian).value,
)
outer_pixel_list = hp.query_disc(
@ -212,40 +199,33 @@ class UpperLimitView(APIView):
vec=src_vec,
inclusive=False,
nest=False,
radius = (annulus_outer * u.arcsecond).to(u.radian).value
radius=(annulus_outer * u.arcsecond).to(u.radian).value,
)
annulus_pixel_list = [
item for item in outer_pixel_list if item not in inner_pixel_list
]
source_pixels = Pixel.objects.filter(
hpid__in = source_pixel_list,
survey__in = survey_numbers
hpid__in=source_pixel_list, survey__in=survey_numbers
)
annulus_pixels = Pixel.objects.filter(
hpid__in = annulus_pixel_list,
survey__in = survey_numbers
hpid__in=annulus_pixel_list, survey__in=survey_numbers
)
# check contamination
contamination = (
source_pixels.filter(contaminated=True).exists() or
annulus_pixels.filter(contaminated=True).exists()
source_pixels.filter(contaminated=True).exists()
or annulus_pixels.filter(contaminated=True).exists()
)
if not source_pixels.exists() and not annulus_pixels.exists():
return Response(
{"detail": "No pixel data for the given survey selection."},
status=status.HTTP_404_NOT_FOUND
status=status.HTTP_404_NOT_FOUND,
)
# COMPUTE COUNTS, BACKGROUND ESTIMATE, EXPOSURE
# **************************************************************
@ -269,18 +249,17 @@ class UpperLimitView(APIView):
# EEF = .9 # eclosed energy fraction, .5 for hpd, .9 for w90
# ECF = 4e-11 # energy conversion factor
EEF = .80091 # use values from the paper
EEF = 0.80091 # use values from the paper
ECF = 3.3423184e-11
# BAYESIAN IMPLEMENTATION VIA POISSON_CONF_INTERVAL
# **************************************************************
low, high = poisson_conf_interval(
n=N,
background=B,
interval = 'kraft-burrows-nousek',
confidence_level=confidence_level
interval="kraft-burrows-nousek",
confidence_level=confidence_level,
)
bayesian_count_ul = high
@ -301,7 +280,6 @@ class UpperLimitView(APIView):
if not np.isfinite(classic_count_ll) or classic_count_ll < 0:
classic_count_ll = 0.0
classic_rate_ul = classic_count_ul / t / EEF # count rate limits
classic_rate_ll = classic_count_ll / t / EEF
@ -314,7 +292,6 @@ class UpperLimitView(APIView):
S = N - B # counts as simply counts within aperture
# with the background estimate subtracted
CR = S / t / EEF # source count rate
BR = B / t # background rate within aperture
@ -334,35 +311,33 @@ class UpperLimitView(APIView):
# cheap belt query
belt_sources = CatalogSource.objects.filter(
dec_deg__gte = dec_min,
dec_deg__lte = dec_max
dec_deg__gte=dec_min, dec_deg__lte=dec_max
)
center_coord = SkyCoord(ra, dec, unit='deg')
center_coord = SkyCoord(ra, dec, unit="deg")
nearby_sources = []
# refine belt to circular region using astropy separation
for catsrc in belt_sources:
catsrc_coord = SkyCoord(catsrc.ra_deg, catsrc.dec_deg, unit='deg')
catsrc_coord = SkyCoord(catsrc.ra_deg, catsrc.dec_deg, unit="deg")
if center_coord.separation(catsrc_coord).deg <= radius_deg:
nearby_sources.append(
{
'srcid' : catsrc.srcid,
'name' : catsrc.name,
'ra_deg' : catsrc.ra_deg,
'dec_deg' : catsrc.dec_deg,
'pos_error' : catsrc.pos_error,
'significance' : catsrc.significance,
'flux' : catsrc.flux,
'flux_error' : catsrc.flux_error,
'catalog_name' : catsrc.catalog_name,
'new_xray' : catsrc.new_xray,
'source_type' : catsrc.source_type
"srcid": catsrc.srcid,
"name": catsrc.name,
"ra_deg": catsrc.ra_deg,
"dec_deg": catsrc.dec_deg,
"pos_error": catsrc.pos_error,
"significance": catsrc.significance,
"flux": catsrc.flux,
"flux_error": catsrc.flux_error,
"catalog_name": catsrc.catalog_name,
"new_xray": catsrc.new_xray,
"source_type": catsrc.source_type,
}
)
# SQUARE REGION IMAGE SERVING
# ****************************************************************
@ -374,40 +349,39 @@ class UpperLimitView(APIView):
vec=src_vec,
inclusive=False,
nest=False,
radius = (map_radius * u.arcsecond).to(u.radian).value
radius=(map_radius * u.arcsecond).to(u.radian).value,
)
# fetch those pixels for the requested surveys
# summing counts and sorting by hpid
map_pixels_qs = (
Pixel.objects
.filter(hpid__in = map_pixel_list, survey__in = survey_numbers)
.values('hpid')
.annotate(counts=Sum('counts'))
.order_by('hpid')
Pixel.objects.filter(hpid__in=map_pixel_list, survey__in=survey_numbers)
.values("hpid")
.annotate(counts=Sum("counts"))
.order_by("hpid")
)
# turn the queryset to a list
map_pixels_list = list(map_pixels_qs)
# get lists of healpix indices and count values
map_healpix_list = [d['hpid'] for d in map_pixels_list]
map_counts_list = [d['counts'] for d in map_pixels_list]
map_healpix_list = [d["hpid"] for d in map_pixels_list]
map_counts_list = [d["counts"] for d in map_pixels_list]
# set map nside
map_nside = 4096
# set map order
map_order = 'ring'
map_order = "ring"
# assemble the result dict
map_dict = {
'healpix' : map_healpix_list,
'counts' : map_counts_list,
'nside' : map_nside,
'order' : map_order,
'radius_as' : map_radius
"healpix": map_healpix_list,
"counts": map_counts_list,
"nside": map_nside,
"order": map_order,
"radius_as": map_radius,
}
# RESULT JSON
@ -415,34 +389,34 @@ class UpperLimitView(APIView):
result = {
# frequentist limits
'ClassicUpperLimit' : classic_count_ul,
'ClassicLowerLimit' : classic_count_ll,
'ClassicCountRateUpperLimit' : classic_rate_ul,
'ClassicCountRateLowerLimit' : classic_rate_ll,
'ClassicFluxUpperLimit' : classic_flux_ul,
'ClassicFluxLowerLimit' : classic_flux_ll,
"ClassicUpperLimit": classic_count_ul,
"ClassicLowerLimit": classic_count_ll,
"ClassicCountRateUpperLimit": classic_rate_ul,
"ClassicCountRateLowerLimit": classic_rate_ll,
"ClassicFluxUpperLimit": classic_flux_ul,
"ClassicFluxLowerLimit": classic_flux_ll,
# bayesian limits
'BayesianUpperLimit' : bayesian_count_ul,
'BayesianLowerLimit' : bayesian_count_ll,
'BayesianCountRateUpperLimit' : bayesian_rate_ul,
'BayesianCountRateLowerLimit' : bayesian_rate_ll,
'BayesianFluxUpperLimit' : bayesian_flux_ul,
'BayesianFluxLowerLimit' : bayesian_flux_ll,
"BayesianUpperLimit": bayesian_count_ul,
"BayesianLowerLimit": bayesian_count_ll,
"BayesianCountRateUpperLimit": bayesian_rate_ul,
"BayesianCountRateLowerLimit": bayesian_rate_ll,
"BayesianFluxUpperLimit": bayesian_flux_ul,
"BayesianFluxLowerLimit": bayesian_flux_ll,
# flux 'center value' estimate
'FluxEstimate' : Flux,
"FluxEstimate": Flux,
# raw data
'ApertureCounts' : N,
'ApertureBackgroundCounts' : B,
'SourceCounts' : S,
'Exposure' : t,
"ApertureCounts": N,
"ApertureBackgroundCounts": B,
"SourceCounts": S,
"Exposure": t,
# count rates
'SourceRate' : CR,
'BackgroundRate' : BR,
"SourceRate": CR,
"BackgroundRate": BR,
# contamination
'Contamination' : contamination,
'NearbySources' : nearby_sources,
"Contamination": contamination,
"NearbySources": nearby_sources,
# count map for the frontend image
'CountMap' : map_dict
"CountMap": map_dict,
}
clean = sanitize(result) # calling sanitize() to convert NaN to null