implemented a naive
approach to stacking analysis via StackedUpperLimitView
This commit is contained in:
17
GeVgal.csv
Normal file
17
GeVgal.csv
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
Name,l,b,Dist Mpc,log(M/M⊙)
|
||||||
|
NGC0628,138.617,-45.705,10.19,10.128 ± 0.136
|
||||||
|
NGC0660,141.607,-47.347,11.57,10.098 ± 0.331
|
||||||
|
NGC1291,247.524,-57.042,9.08,10.707 ± 0.136
|
||||||
|
NGC1433,255.691,-51.195,9.04,10.070 ± 0.201
|
||||||
|
NGC1512,248.668,-48.166,11.63,10.172 ± 0.160
|
||||||
|
NGC1532,233.168,-46.584,14.26,10.528 ± 0.600
|
||||||
|
NGC2903,208.710,44.540,8.87,10.404 ± 0.136
|
||||||
|
NGC3368,234.435,57.010,10.42,10.523 ± 0.136
|
||||||
|
NGC3877,150.719,65.956,14.63,10.096 ± 0.476
|
||||||
|
NGC4192,265.434,74.960,12.68,10.371 ± 0.136
|
||||||
|
NGC4666,299.538,62.368,14.70,10.298 ± 0.136
|
||||||
|
NGC4818,305.212,54.323,11.04,10.008 ± 0.530
|
||||||
|
NGC5248,335.929,68.751,13.75,10.264 ± 0.606
|
||||||
|
NGC7331,93.722,-20.724,12.62,10.724 ± 0.327
|
||||||
|
NGC7814,106.410,-45.175,14.40,10.520 ± 0.136
|
||||||
|
PGC032861,245.103,55.513,14.45,12.827 ± 0.502
|
|
170
stack_request_test.ipynb
Normal file
170
stack_request_test.ipynb
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"id": "4cffd6c5",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"RA List: [np.float64(24.1741435187182), np.float64(25.759787769012984), np.float64(49.32806570275772), np.float64(55.50613654382996), np.float64(60.97590646798826), np.float64(63.018113741788355), np.float64(143.0416839498853), np.float64(161.69014062163393), np.float64(176.53216723085026), np.float64(183.45178780664702), np.float64(191.28607761407375), np.float64(194.20387680952962), np.float64(204.38304472867193), np.float64(339.2669893687885), np.float64(0.8130105246497964), np.float64(164.03825008738184)]\n",
|
||||||
|
"Dec List: [np.float64(15.783869592105441), np.float64(13.645005280982105), np.float64(-41.10817408896737), np.float64(-47.22221653992977), np.float64(-43.34878852480052), np.float64(-32.87392378612486), np.float64(21.501827494459036), np.float64(11.819887017791151), np.float64(47.49512808371984), np.float64(14.900261322222237), np.float64(-0.4622668171007003), np.float64(-8.524938659400307), np.float64(8.885499320910338), np.float64(34.4157929700228), np.float64(16.14532447253357), np.float64(6.1729868291531)]\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"import csv\n",
|
||||||
|
"from astropy.coordinates import SkyCoord\n",
|
||||||
|
"import astropy.units as u\n",
|
||||||
|
"\n",
|
||||||
|
"# Initialize empty lists for RA and Dec\n",
|
||||||
|
"ra_list = []\n",
|
||||||
|
"dec_list = []\n",
|
||||||
|
"\n",
|
||||||
|
"# Define the path to your CSV file\n",
|
||||||
|
"csv_file_path = \"GeVgal.csv\"\n",
|
||||||
|
"\n",
|
||||||
|
"# Open and read the CSV file\n",
|
||||||
|
"with open(csv_file_path, 'r') as csvfile:\n",
|
||||||
|
" # Use csv.reader to handle the file, skipping the header row\n",
|
||||||
|
" csv_reader = csv.reader(csvfile)\n",
|
||||||
|
" next(csv_reader) # Skip the header row\n",
|
||||||
|
"\n",
|
||||||
|
" # Loop through each row in the CSV\n",
|
||||||
|
" for row in csv_reader:\n",
|
||||||
|
" try:\n",
|
||||||
|
" # Extract l and b, which are in the second and third columns (index 1 and 2)\n",
|
||||||
|
" l = float(row[1])\n",
|
||||||
|
" b = float(row[2])\n",
|
||||||
|
"\n",
|
||||||
|
" # Create a SkyCoord object with galactic coordinates\n",
|
||||||
|
" galactic_coord = SkyCoord(l=l*u.degree, b=b*u.degree, frame='galactic')\n",
|
||||||
|
"\n",
|
||||||
|
" # Convert to ICRS (equatorial) coordinates\n",
|
||||||
|
" icrs_coord = galactic_coord.icrs\n",
|
||||||
|
"\n",
|
||||||
|
" # Append the RA and Dec values to the lists\n",
|
||||||
|
" ra_list.append(icrs_coord.ra.deg)\n",
|
||||||
|
" dec_list.append(icrs_coord.dec.deg)\n",
|
||||||
|
"\n",
|
||||||
|
" except (ValueError, IndexError) as e:\n",
|
||||||
|
" # Handle potential errors if a row is malformed\n",
|
||||||
|
" print(f\"Skipping a malformed row: {row} - Error: {e}\")\n",
|
||||||
|
"\n",
|
||||||
|
"# Now, ra_list and dec_list contain the converted coordinates\n",
|
||||||
|
"print(\"RA List:\", ra_list)\n",
|
||||||
|
"print(\"Dec List:\", dec_list)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "ff1d339a",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Request successful!\n",
|
||||||
|
"{\n",
|
||||||
|
" \"Status\": 0,\n",
|
||||||
|
" \"ErrorMessage\": \"\",\n",
|
||||||
|
" \"ClassicUpperLimit\": 36.99647944311798,\n",
|
||||||
|
" \"ClassicLowerLimit\": 0.0,\n",
|
||||||
|
" \"ClassicOneSideUL\": 33.54440898622437,\n",
|
||||||
|
" \"ClassicCountRateUpperLimit\": 0.022452965897451615,\n",
|
||||||
|
" \"ClassicCountRateLowerLimit\": 0.0,\n",
|
||||||
|
" \"ClassicCountRateOneSideUL\": 0.020357922763323065,\n",
|
||||||
|
" \"ClassicFluxUpperLimit\": 7.504496105362505e-13,\n",
|
||||||
|
" \"ClassicFluxLowerLimit\": 0.0,\n",
|
||||||
|
" \"ClassicFluxOneSideUL\": 6.804265983763352e-13,\n",
|
||||||
|
" \"BayesianUpperLimit\": 33.83295926177776,\n",
|
||||||
|
" \"BayesianLowerLimit\": 0.10792639479099073,\n",
|
||||||
|
" \"BayesianOneSideUL\": 33.727107645421896,\n",
|
||||||
|
" \"BayesianCountRateUpperLimit\": 0.020533042385357955,\n",
|
||||||
|
" \"BayesianCountRateLowerLimit\": 6.549995291856849e-05,\n",
|
||||||
|
" \"BayesianCountRateOneSideUL\": 0.020468801604397097,\n",
|
||||||
|
" \"BayesianFluxUpperLimit\": 6.862796537256179e-13,\n",
|
||||||
|
" \"BayesianFluxLowerLimit\": 2.189216978388652e-15,\n",
|
||||||
|
" \"BayesianFluxOneSideUL\": 6.841325222832595e-13,\n",
|
||||||
|
" \"FluxEstimate\": 3.2382519031831067e-13,\n",
|
||||||
|
" \"ApertureCounts\": 94,\n",
|
||||||
|
" \"ApertureBackgroundCounts\": 78.03571428571428,\n",
|
||||||
|
" \"SourceCounts\": 15.964285714285722,\n",
|
||||||
|
" \"Exposure\": 2057.325295693534,\n",
|
||||||
|
" \"SourceRate\": 0.009688639787230046,\n",
|
||||||
|
" \"BackgroundRate\": 0.03793066388142964,\n",
|
||||||
|
" \"NormalizedBackgroundRate\": 1.6914547063541448e-05,\n",
|
||||||
|
" \"Contamination\": false,\n",
|
||||||
|
" \"CountMap\": {\n",
|
||||||
|
" \"healpix\": [],\n",
|
||||||
|
" \"counts\": [],\n",
|
||||||
|
" \"exposure\": [],\n",
|
||||||
|
" \"nside\": 4096,\n",
|
||||||
|
" \"order\": \"ring\",\n",
|
||||||
|
" \"radius_as\": 0.0\n",
|
||||||
|
" }\n",
|
||||||
|
"}\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"import requests\n",
|
||||||
|
"import json\n",
|
||||||
|
"\n",
|
||||||
|
"# Define the URL of your Django API endpoint\n",
|
||||||
|
"url = \"http://localhost:8000/api/stacked-upper-limit/\"\n",
|
||||||
|
"\n",
|
||||||
|
"payload = {\n",
|
||||||
|
" \"ra\": ra_list, # List of RA values\n",
|
||||||
|
" \"dec\": dec_list, # List of Dec values\n",
|
||||||
|
" \"cl\": 0.95, # A numeric confidence level\n",
|
||||||
|
" \"survey\": \"1-4\", # A string for the survey parameter\n",
|
||||||
|
" \"mr\": 0 # A numeric value for map_radius\n",
|
||||||
|
"}\n",
|
||||||
|
"\n",
|
||||||
|
"try:\n",
|
||||||
|
" # Send the PUT request with the JSON payload\n",
|
||||||
|
" response = requests.put(url, json=payload)\n",
|
||||||
|
"\n",
|
||||||
|
" # Check the response status code\n",
|
||||||
|
" response.raise_for_status() # This will raise an HTTPError for bad responses (4xx or 5xx)\n",
|
||||||
|
"\n",
|
||||||
|
" # Print the JSON response from the server\n",
|
||||||
|
" print(\"Request successful!\")\n",
|
||||||
|
" print(json.dumps(response.json(), indent=4))\n",
|
||||||
|
"\n",
|
||||||
|
"except requests.exceptions.HTTPError as err:\n",
|
||||||
|
" print(f\"HTTP Error: {err}\")\n",
|
||||||
|
" print(f\"Response body: {err.response.text}\")\n",
|
||||||
|
"except requests.exceptions.RequestException as err:\n",
|
||||||
|
" print(f\"An error occurred: {err}\")"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": ".venv-pypy",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.11.11"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 5
|
||||||
|
}
|
11
urls.py
11
urls.py
@@ -1,10 +1,19 @@
|
|||||||
# uplim/urls.py
|
# uplim/urls.py
|
||||||
|
|
||||||
from django.urls import path
|
from django.urls import path
|
||||||
from .views import PixelAggregateView, UpperLimitView # , PixelDetailView
|
from .views import (
|
||||||
|
PixelAggregateView,
|
||||||
|
UpperLimitView,
|
||||||
|
StackedUpperLimitView,
|
||||||
|
) # , PixelDetailView
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
# path('pixel/<int:hpid>/', PixelDetailView.as_view(), name='pixel-detail'),
|
# path('pixel/<int:hpid>/', PixelDetailView.as_view(), name='pixel-detail'),
|
||||||
path("pixel-aggregate/", PixelAggregateView.as_view(), name="pixel-aggregate"),
|
path("pixel-aggregate/", PixelAggregateView.as_view(), name="pixel-aggregate"),
|
||||||
path("upper-limit/", UpperLimitView.as_view(), name="upper-limit"),
|
path("upper-limit/", UpperLimitView.as_view(), name="upper-limit"),
|
||||||
|
path(
|
||||||
|
"stacked-upper-limit/",
|
||||||
|
StackedUpperLimitView.as_view(),
|
||||||
|
name="stacked-upper-limit",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
506
views.py
506
views.py
@@ -24,6 +24,7 @@ from django.db.models import (
|
|||||||
Value,
|
Value,
|
||||||
)
|
)
|
||||||
from django.db.models.functions import Cast
|
from django.db.models.functions import Cast
|
||||||
|
from django.db.models import Q
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
@@ -147,7 +148,7 @@ class UpperLimitView(APIView):
|
|||||||
confidence_level = float(request.query_params.get("cl"))
|
confidence_level = float(request.query_params.get("cl"))
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Invalud parameters, provide RA, DEC, and CL"},
|
{"error": "Invalid parameters, provide RA, DEC, and CL"},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
# pull & parse survey selection
|
# pull & parse survey selection
|
||||||
@@ -262,7 +263,7 @@ class UpperLimitView(APIView):
|
|||||||
# create and populate a list of average exposures per survey
|
# create and populate a list of average exposures per survey
|
||||||
survey_averages = []
|
survey_averages = []
|
||||||
|
|
||||||
for survey_id, exposures in t_by_survey.items():
|
for _, exposures in t_by_survey.items():
|
||||||
average_exposure = sum(exposures) / len(exposures)
|
average_exposure = sum(exposures) / len(exposures)
|
||||||
survey_averages.append(average_exposure)
|
survey_averages.append(average_exposure)
|
||||||
|
|
||||||
@@ -367,7 +368,7 @@ class UpperLimitView(APIView):
|
|||||||
|
|
||||||
error_message = str(e)
|
error_message = str(e)
|
||||||
|
|
||||||
N = Nnpix = Bcounts = Bnpix = B = t = S = CR = BR = 0.0
|
N = Nnpix = Bcounts = Bnpix = B = t = S = CR = BR = NBR = 0.0
|
||||||
Flux = 0.0
|
Flux = 0.0
|
||||||
classic_count_ul = classic_count_ll = classic_count_UL = 0.0
|
classic_count_ul = classic_count_ll = classic_count_UL = 0.0
|
||||||
classic_rate_ul = classic_rate_ll = classic_rate_UL = 0.0
|
classic_rate_ul = classic_rate_ll = classic_rate_UL = 0.0
|
||||||
@@ -460,39 +461,15 @@ class UpperLimitView(APIView):
|
|||||||
.order_by("hpid")
|
.order_by("hpid")
|
||||||
)
|
)
|
||||||
|
|
||||||
# map_pixels_qs = (
|
|
||||||
# Pixel.objects.filter(hpid__in=map_pixel_list, survey__in=survey_numbers)
|
|
||||||
# .values("hpid")
|
|
||||||
# .annotate(
|
|
||||||
# total_counts=Sum("counts"),
|
|
||||||
# max_contaminated_int=Max(Cast("contaminated", IntegerField())),
|
|
||||||
# )
|
|
||||||
# .annotate(
|
|
||||||
# contaminated=Case(
|
|
||||||
# When(max_contaminated_int=1, then=Value(True)),
|
|
||||||
# default=Value(False),
|
|
||||||
# output_field=BooleanField(),
|
|
||||||
# )
|
|
||||||
# )
|
|
||||||
# .order_by("hpid")
|
|
||||||
# )
|
|
||||||
|
|
||||||
# turn the queryset to a list
|
# turn the queryset to a list
|
||||||
map_pixels_list = list(map_pixels_qs)
|
map_pixels_list = list(map_pixels_qs)
|
||||||
|
|
||||||
# get lists of healpix indices and count values
|
# get lists of healpix indices and count values
|
||||||
map_healpix_list = [d["hpid"] for d in map_pixels_list]
|
map_healpix_list = [d["hpid"] for d in map_pixels_list]
|
||||||
map_counts_list = [d["counts"] for d in map_pixels_list]
|
map_counts_list = [d["counts"] for d in map_pixels_list]
|
||||||
|
map_exposure_list = [d["exposure"] for d in map_pixels_list]
|
||||||
# map_contaminated_list = [d["contaminated"] for d in map_pixels_list]
|
# map_contaminated_list = [d["contaminated"] for d in map_pixels_list]
|
||||||
|
|
||||||
# cont_dict = dict(
|
|
||||||
# Pixel.objects.filter(hpid__in=map_healpix_list, survey__in=survey_numbers)
|
|
||||||
# .values_list("hpid", "contaminated")
|
|
||||||
# .distinct()
|
|
||||||
# )
|
|
||||||
|
|
||||||
# map_contaminated_list = [cont_dict[h] for h in map_healpix_list]
|
|
||||||
|
|
||||||
# set map nside
|
# set map nside
|
||||||
map_nside = 4096
|
map_nside = 4096
|
||||||
|
|
||||||
@@ -503,6 +480,7 @@ class UpperLimitView(APIView):
|
|||||||
map_dict = {
|
map_dict = {
|
||||||
"healpix": map_healpix_list,
|
"healpix": map_healpix_list,
|
||||||
"counts": map_counts_list,
|
"counts": map_counts_list,
|
||||||
|
"exposure": map_exposure_list,
|
||||||
# "contaminated": map_contaminated_list,
|
# "contaminated": map_contaminated_list,
|
||||||
"nside": map_nside,
|
"nside": map_nside,
|
||||||
"order": map_order,
|
"order": map_order,
|
||||||
@@ -558,3 +536,475 @@ class UpperLimitView(APIView):
|
|||||||
clean = sanitize(result) # calling sanitize() to convert NaN to null
|
clean = sanitize(result) # calling sanitize() to convert NaN to null
|
||||||
|
|
||||||
return Response(clean, status=status.HTTP_200_OK)
|
return Response(clean, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
|
class StackedUpperLimitView(APIView):
|
||||||
|
"""
|
||||||
|
Calculate confidence bounds based on aperture photometry using classic and bayesian methods for a set of sources
|
||||||
|
"""
|
||||||
|
|
||||||
|
def put(self, request):
|
||||||
|
|
||||||
|
data = request.data
|
||||||
|
|
||||||
|
try:
|
||||||
|
ra_list = data.get("ra", [])
|
||||||
|
dec_list = data.get("dec", [])
|
||||||
|
confidence_level = data.get("cl")
|
||||||
|
|
||||||
|
if not isinstance(ra_list, list) or not all(
|
||||||
|
isinstance(x, (float, int)) for x in ra_list
|
||||||
|
):
|
||||||
|
raise TypeError
|
||||||
|
if not isinstance(dec_list, list) or not all(
|
||||||
|
isinstance(x, (float, int)) for x in dec_list
|
||||||
|
):
|
||||||
|
raise TypeError
|
||||||
|
if not isinstance(confidence_level, (float, int)):
|
||||||
|
raise TypeError
|
||||||
|
|
||||||
|
except TypeError:
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Invalid parameters, provide 'ra', 'dec' lists of numbers, and a numeric 'cl'"
|
||||||
|
},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
raw_survey = data.get("survey")
|
||||||
|
|
||||||
|
if raw_survey is None:
|
||||||
|
return Response(
|
||||||
|
{"error": "Missing required `survey` parameter"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
survey_numbers = parse_survey_param(raw_survey)
|
||||||
|
except ValueError:
|
||||||
|
return Response(
|
||||||
|
{"error": "Malformed `survey`; use formats like 1, 2-5, or 1,3-4"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
map_radius_value = data.get("mr")
|
||||||
|
|
||||||
|
# DEFINE APERTURE AND ANNULUS RADII
|
||||||
|
# **************************************************************
|
||||||
|
|
||||||
|
aperture_radius = 71 # radius of the aperture in arc seconds
|
||||||
|
# HPD ~48 arcseconds
|
||||||
|
# 90% ~100 arcseconds
|
||||||
|
annulus_inner = 213 # 3 * aperture_radius
|
||||||
|
annulus_outer = 355 # 5 * aperture_radius
|
||||||
|
|
||||||
|
# CREATE SKYCOORD, CONVERT TO GALACTIC BECAUSE THE HEALPIX
|
||||||
|
# MAP ITSELF WAS MADE IN GALACTIC COORDINATES
|
||||||
|
# **************************************************************
|
||||||
|
|
||||||
|
# a list of skycoords
|
||||||
|
src_coord_list = SkyCoord(ra_list, dec_list, unit="deg", frame="icrs")
|
||||||
|
gal_list = src_coord_list.galactic
|
||||||
|
# and then vectors
|
||||||
|
src_vec_list = hp.ang2vec(gal_list.l.deg, gal_list.b.deg, lonlat=True)
|
||||||
|
|
||||||
|
# FETCH PIXEL DATA DEFINED VIA HP.QUERY_DISC (INCLUSIVE=FALSE)
|
||||||
|
# **************************************************************
|
||||||
|
|
||||||
|
# first init pixel lists
|
||||||
|
source_pixel_dict, inner_pixel_dict, outer_pixel_dict = {}, {}, {}
|
||||||
|
|
||||||
|
# then loop over vectors appending indices to lists
|
||||||
|
for index, src_vec in enumerate(src_vec_list):
|
||||||
|
|
||||||
|
source_pixel_dict[index] = hp.query_disc(
|
||||||
|
nside=4096,
|
||||||
|
vec=src_vec,
|
||||||
|
inclusive=False,
|
||||||
|
nest=False,
|
||||||
|
radius=(aperture_radius * u.arcsecond).to(u.radian).value,
|
||||||
|
)
|
||||||
|
|
||||||
|
inner_pixel_dict[index] = hp.query_disc(
|
||||||
|
nside=4096,
|
||||||
|
vec=src_vec,
|
||||||
|
inclusive=False,
|
||||||
|
nest=False,
|
||||||
|
radius=(annulus_inner * u.arcsecond).to(u.radian).value,
|
||||||
|
)
|
||||||
|
|
||||||
|
outer_pixel_dict[index] = hp.query_disc(
|
||||||
|
nside=4096,
|
||||||
|
vec=src_vec,
|
||||||
|
inclusive=False,
|
||||||
|
nest=False,
|
||||||
|
radius=(annulus_outer * u.arcsecond).to(u.radian).value,
|
||||||
|
)
|
||||||
|
|
||||||
|
# flatten dicts into lists for most of math
|
||||||
|
|
||||||
|
outer_pixel_list = [
|
||||||
|
item for sublist in outer_pixel_dict.values() for item in sublist
|
||||||
|
]
|
||||||
|
|
||||||
|
inner_pixel_list = [
|
||||||
|
item for sublist in inner_pixel_dict.values() for item in sublist
|
||||||
|
]
|
||||||
|
|
||||||
|
source_pixel_list = [
|
||||||
|
item for sublist in source_pixel_dict.values() for item in sublist
|
||||||
|
]
|
||||||
|
|
||||||
|
# proceed as is
|
||||||
|
annulus_pixel_list = [
|
||||||
|
item for item in outer_pixel_list if item not in inner_pixel_list
|
||||||
|
]
|
||||||
|
|
||||||
|
source_pixels = Pixel.objects.filter(
|
||||||
|
hpid__in=source_pixel_list, survey__in=survey_numbers
|
||||||
|
)
|
||||||
|
|
||||||
|
annulus_pixels = Pixel.objects.filter(
|
||||||
|
hpid__in=annulus_pixel_list, survey__in=survey_numbers
|
||||||
|
)
|
||||||
|
|
||||||
|
# check contamination
|
||||||
|
contamination = (
|
||||||
|
source_pixels.filter(contaminated=True).exists()
|
||||||
|
or annulus_pixels.filter(contaminated=True).exists()
|
||||||
|
)
|
||||||
|
|
||||||
|
# exclude contaminated pixels from the background and source regions
|
||||||
|
annulus_pixels = annulus_pixels.exclude(contaminated=True)
|
||||||
|
# source_pixels = source_pixels.exclude(contaminated=True)
|
||||||
|
|
||||||
|
status_int = 0
|
||||||
|
error_message = ""
|
||||||
|
|
||||||
|
if not source_pixels.exists() or not annulus_pixels.exists():
|
||||||
|
status_int = 1 # status 1 if there are no source or bg pixels
|
||||||
|
|
||||||
|
# COMPUTE COUNTS, BACKGROUND ESTIMATE, EXPOSURE
|
||||||
|
# **************************************************************
|
||||||
|
try:
|
||||||
|
# summing counts across all surveys
|
||||||
|
N = sum(obj.counts for obj in source_pixels)
|
||||||
|
|
||||||
|
Nnpix = len(source_pixels)
|
||||||
|
|
||||||
|
Bcounts = sum(obj.counts for obj in annulus_pixels)
|
||||||
|
|
||||||
|
Bnpix = len(annulus_pixels)
|
||||||
|
|
||||||
|
B = Bcounts / Bnpix * Nnpix
|
||||||
|
|
||||||
|
t = 0
|
||||||
|
|
||||||
|
for source_pixel_sublist in source_pixel_dict.values():
|
||||||
|
|
||||||
|
source_pixels = Pixel.objects.filter(
|
||||||
|
hpid__in=source_pixel_sublist, survey__in=survey_numbers
|
||||||
|
)
|
||||||
|
|
||||||
|
# create a dict of exposures keyed by survey
|
||||||
|
t_by_survey = defaultdict(list)
|
||||||
|
|
||||||
|
for pixel in source_pixels:
|
||||||
|
t_by_survey[pixel.survey].append(pixel.exposure)
|
||||||
|
|
||||||
|
# create and populate a list of average exposures per survey
|
||||||
|
survey_averages = []
|
||||||
|
|
||||||
|
for _, exposures in t_by_survey.items():
|
||||||
|
average_exposure = sum(exposures) / len(exposures)
|
||||||
|
survey_averages.append(average_exposure)
|
||||||
|
|
||||||
|
# sum them up across surveys
|
||||||
|
t += sum(survey_averages)
|
||||||
|
|
||||||
|
# CONSTANTS
|
||||||
|
# **************************************************************
|
||||||
|
|
||||||
|
# EEF = .9 # eclosed energy fraction, .5 for hpd, .9 for w90
|
||||||
|
# ECF = 4e-11 # energy conversion factor
|
||||||
|
|
||||||
|
EEF = 0.80091 # use values from the paper
|
||||||
|
ECF = 3.3423184e-11
|
||||||
|
|
||||||
|
# BAYESIAN IMPLEMENTATION VIA POISSON_CONF_INTERVAL
|
||||||
|
# **************************************************************
|
||||||
|
|
||||||
|
# double sided interval
|
||||||
|
low, high = poisson_conf_interval(
|
||||||
|
n=N,
|
||||||
|
background=B,
|
||||||
|
interval="kraft-burrows-nousek",
|
||||||
|
confidence_level=confidence_level,
|
||||||
|
)
|
||||||
|
|
||||||
|
# because poisson_conf_interval returns lists
|
||||||
|
bayesian_count_ul = high[0]
|
||||||
|
bayesian_count_ll = low[0]
|
||||||
|
|
||||||
|
# compute upper limit
|
||||||
|
bayesian_count_UL = (
|
||||||
|
sp.gammaincinv(
|
||||||
|
N + 1,
|
||||||
|
confidence_level * sp.gammaincc(N + 1, B) + sp.gammainc(N + 1, B),
|
||||||
|
)
|
||||||
|
- B
|
||||||
|
)
|
||||||
|
|
||||||
|
bayesian_rate_ul = bayesian_count_ul / t / EEF # count rate limits
|
||||||
|
bayesian_rate_ll = bayesian_count_ll / t / EEF
|
||||||
|
bayesian_rate_UL = bayesian_count_UL / t / EEF
|
||||||
|
|
||||||
|
bayesian_flux_ul = bayesian_rate_ul * ECF # flux limits
|
||||||
|
bayesian_flux_ll = bayesian_rate_ll * ECF
|
||||||
|
bayesian_flux_UL = bayesian_rate_UL * ECF
|
||||||
|
|
||||||
|
# CLASSICAL IMPLEMENTATION VIA GAMMAINCCINV
|
||||||
|
# ****************************************************************
|
||||||
|
|
||||||
|
# confidence level for the double sided interval
|
||||||
|
cl2 = (1 + confidence_level) / 2
|
||||||
|
# double sided interval
|
||||||
|
classic_count_ul = sp.gammainccinv(N + 1, 1 - cl2) - B
|
||||||
|
classic_count_ll = sp.gammainccinv(N, cl2) - B
|
||||||
|
# one sided interval
|
||||||
|
classic_count_UL = sp.gammainccinv(N + 1, 1 - confidence_level) - B
|
||||||
|
|
||||||
|
if not np.isfinite(classic_count_ll) or classic_count_ll < 0:
|
||||||
|
classic_count_ll = 0.0
|
||||||
|
|
||||||
|
classic_rate_ul = classic_count_ul / t / EEF # count rate limits
|
||||||
|
classic_rate_ll = classic_count_ll / t / EEF
|
||||||
|
classic_rate_UL = classic_count_UL / t / EEF
|
||||||
|
|
||||||
|
classic_flux_ul = classic_rate_ul * ECF # flux limits
|
||||||
|
classic_flux_ll = classic_rate_ll * ECF
|
||||||
|
classic_flux_UL = classic_rate_UL * ECF
|
||||||
|
|
||||||
|
# FLUX ESTIMATION
|
||||||
|
# ****************************************************************
|
||||||
|
|
||||||
|
S = N - B # counts as simply counts within aperture
|
||||||
|
# with the background estimate subtracted
|
||||||
|
|
||||||
|
CR = S / t / EEF # source count rate
|
||||||
|
|
||||||
|
BR = B / t # background rate within aperture
|
||||||
|
|
||||||
|
FL = CR * ECF # conversion to flux
|
||||||
|
|
||||||
|
Flux = max(FL, 0) # flux cannot be lower than zero
|
||||||
|
|
||||||
|
# NORMALIZED BACKGROUND RATE
|
||||||
|
# ****************************************************************
|
||||||
|
# in units of ct/s/keV/arcmin^2
|
||||||
|
|
||||||
|
sr_to_arcmin2 = (180 / np.pi * 60) ** 2
|
||||||
|
|
||||||
|
pix_area = (
|
||||||
|
hp.nside2pixarea(nside=4096) * sr_to_arcmin2
|
||||||
|
) # nside2pixarea yields area in sr
|
||||||
|
|
||||||
|
bg_area = pix_area * Bnpix # get total bg region area
|
||||||
|
|
||||||
|
en_range = 12 - 4
|
||||||
|
|
||||||
|
NBR = Bcounts / t / en_range / bg_area
|
||||||
|
|
||||||
|
# handle exceptions: write error text to the response
|
||||||
|
# zero out all math results
|
||||||
|
except Exception as e:
|
||||||
|
|
||||||
|
error_message = str(e)
|
||||||
|
|
||||||
|
N = Nnpix = Bcounts = Bnpix = B = t = S = CR = BR = 0.0
|
||||||
|
Flux = 0.0
|
||||||
|
classic_count_ul = classic_count_ll = classic_count_UL = 0.0
|
||||||
|
classic_rate_ul = classic_rate_ll = classic_rate_UL = 0.0
|
||||||
|
classic_flux_ul = classic_flux_ll = classic_flux_UL = 0.0
|
||||||
|
bayesian_count_ul = bayesian_count_ll = bayesian_count_UL = 0.0
|
||||||
|
bayesian_rate_ul = bayesian_rate_ll = bayesian_rate_UL = 0.0
|
||||||
|
bayesian_flux_ul = bayesian_flux_ll = bayesian_flux_UL = 0.0
|
||||||
|
|
||||||
|
# NEARBY SOURCES CHECK
|
||||||
|
# ****************************************************************
|
||||||
|
# disabled for now, many-to-many separation computation is expensive
|
||||||
|
# if map_radius == 0:
|
||||||
|
# radius_as = 5 * aperture_radius
|
||||||
|
# else:
|
||||||
|
# radius_as = map_radius * 2
|
||||||
|
|
||||||
|
# radius_deg = 3 # radius_as / 3600
|
||||||
|
|
||||||
|
# # dec_min_list = max(dec_list - radius_deg, -90)
|
||||||
|
# # dec_max_list = min(dec_list + radius_deg, 90)
|
||||||
|
|
||||||
|
# dec_min_list = [max(d - radius_deg, -90) for d in dec_list]
|
||||||
|
# dec_max_list = [min(d + radius_deg, 90) for d in dec_list]
|
||||||
|
|
||||||
|
# # cheap belt query
|
||||||
|
# # belt_sources = CatalogSource.objects.filter(
|
||||||
|
# # dec_deg__gte=dec_min, dec_deg__lte=dec_max
|
||||||
|
# # )
|
||||||
|
|
||||||
|
# query = Q()
|
||||||
|
|
||||||
|
# for dec_min, dec_max in zip(dec_min_list, dec_max_list):
|
||||||
|
# query |= Q(dec_deg__gte=dec_min, dec_deg__lte=dec_max)
|
||||||
|
|
||||||
|
# belt_list_sources = CatalogSource.objects.filter(query)
|
||||||
|
|
||||||
|
# center_coord = SkyCoord(ra_list, dec_list, unit="deg")
|
||||||
|
|
||||||
|
# nearby_sources = []
|
||||||
|
|
||||||
|
# radius_map = {0: 0.06, 125: 0.15, 250: 0.5, 2000: 0.9, 20000: 2.5}
|
||||||
|
|
||||||
|
# sorted_bounds = sorted(radius_map.keys())
|
||||||
|
|
||||||
|
# # refine belt to circular region using astropy separation
|
||||||
|
# for catsrc in belt_list_sources:
|
||||||
|
# catsrc_coord = SkyCoord(catsrc.ra_deg, catsrc.dec_deg, unit="deg")
|
||||||
|
# separation = center_coord.separation(catsrc_coord)
|
||||||
|
# if separation.deg > radius_deg:
|
||||||
|
# continue
|
||||||
|
# f = catsrc.flux or 0.0
|
||||||
|
# for lb in reversed(sorted_bounds):
|
||||||
|
# if f >= lb:
|
||||||
|
# mask_radius = radius_map[lb] * 3600
|
||||||
|
# break
|
||||||
|
|
||||||
|
# nearby_sources.append(
|
||||||
|
# {
|
||||||
|
# "srcid": catsrc.srcid,
|
||||||
|
# "name": catsrc.name,
|
||||||
|
# "ra_deg": catsrc.ra_deg,
|
||||||
|
# "dec_deg": catsrc.dec_deg,
|
||||||
|
# "pos_error": catsrc.pos_error,
|
||||||
|
# "significance": catsrc.significance,
|
||||||
|
# "flux": catsrc.flux,
|
||||||
|
# "flux_error": catsrc.flux_error,
|
||||||
|
# "catalog_name": catsrc.catalog_name,
|
||||||
|
# "new_xray": catsrc.new_xray,
|
||||||
|
# "source_type": catsrc.source_type,
|
||||||
|
# "mask_radius_as": mask_radius,
|
||||||
|
# "separation_as": separation.arcsecond,
|
||||||
|
# }
|
||||||
|
# )
|
||||||
|
# nearby_sources.sort(key=lambda src: src["separation_as"])
|
||||||
|
|
||||||
|
# REGION IMAGE SERVING
|
||||||
|
# ****************************************************************
|
||||||
|
#
|
||||||
|
# default value if not specified in the query
|
||||||
|
# get hpids within a circle of radius sqrt(2) * outer annulus radius
|
||||||
|
if map_radius_value is None:
|
||||||
|
map_radius = annulus_outer * np.sqrt(2)
|
||||||
|
else:
|
||||||
|
map_radius = float(map_radius_value)
|
||||||
|
|
||||||
|
# iterate over source vectors, making maps
|
||||||
|
|
||||||
|
# Create a single list to hold all Healpix IDs
|
||||||
|
all_map_pixels = []
|
||||||
|
|
||||||
|
# Iterate over source vectors and get all unique Healpix IDs
|
||||||
|
for index, src_vec in enumerate(src_vec_list):
|
||||||
|
pixels = hp.query_disc(
|
||||||
|
nside=4096,
|
||||||
|
vec=src_vec,
|
||||||
|
inclusive=False,
|
||||||
|
nest=False,
|
||||||
|
radius=(map_radius * u.arcsecond).to(u.radian).value,
|
||||||
|
)
|
||||||
|
all_map_pixels.extend(pixels)
|
||||||
|
|
||||||
|
# Get only unique Healpix IDs
|
||||||
|
unique_map_pixels = list(set(all_map_pixels))
|
||||||
|
|
||||||
|
# Perform a single database query for all unique pixels
|
||||||
|
map_pixels_qs = (
|
||||||
|
Pixel.objects.filter(hpid__in=unique_map_pixels, survey__in=survey_numbers)
|
||||||
|
.values("hpid")
|
||||||
|
.annotate(counts=Sum("counts"), exposure=Sum("exposure"))
|
||||||
|
.order_by("hpid")
|
||||||
|
)
|
||||||
|
|
||||||
|
# turn the queryset to a list
|
||||||
|
map_pixels_list = list(map_pixels_qs)
|
||||||
|
|
||||||
|
# get lists of healpix indices and count values
|
||||||
|
map_healpix_list = [d["hpid"] for d in map_pixels_list]
|
||||||
|
map_counts_list = [d["counts"] for d in map_pixels_list]
|
||||||
|
map_exposure_list = [d["exposure"] for d in map_pixels_list]
|
||||||
|
# map_contaminated_list = [d["contaminated"] for d in map_pixels_list]
|
||||||
|
|
||||||
|
# set map nside
|
||||||
|
map_nside = 4096
|
||||||
|
|
||||||
|
# set map order
|
||||||
|
map_order = "ring"
|
||||||
|
|
||||||
|
# assemble the result dict
|
||||||
|
map_dict = {
|
||||||
|
"healpix": map_healpix_list,
|
||||||
|
"counts": map_counts_list,
|
||||||
|
"exposure": map_exposure_list,
|
||||||
|
# "contaminated": map_contaminated_list,
|
||||||
|
"nside": map_nside,
|
||||||
|
"order": map_order,
|
||||||
|
"radius_as": map_radius,
|
||||||
|
}
|
||||||
|
|
||||||
|
# RESULT JSON
|
||||||
|
# ****************************************************************
|
||||||
|
|
||||||
|
result = {
|
||||||
|
"Status": status_int,
|
||||||
|
# 0 OK
|
||||||
|
# 1 either source or bg pixels missing
|
||||||
|
"ErrorMessage": error_message,
|
||||||
|
# frequentist limits
|
||||||
|
"ClassicUpperLimit": classic_count_ul,
|
||||||
|
"ClassicLowerLimit": classic_count_ll,
|
||||||
|
"ClassicOneSideUL": classic_count_UL,
|
||||||
|
"ClassicCountRateUpperLimit": classic_rate_ul,
|
||||||
|
"ClassicCountRateLowerLimit": classic_rate_ll,
|
||||||
|
"ClassicCountRateOneSideUL": classic_rate_UL,
|
||||||
|
"ClassicFluxUpperLimit": classic_flux_ul,
|
||||||
|
"ClassicFluxLowerLimit": classic_flux_ll,
|
||||||
|
"ClassicFluxOneSideUL": classic_flux_UL,
|
||||||
|
# bayesian limits
|
||||||
|
"BayesianUpperLimit": bayesian_count_ul,
|
||||||
|
"BayesianLowerLimit": bayesian_count_ll,
|
||||||
|
"BayesianOneSideUL": bayesian_count_UL,
|
||||||
|
"BayesianCountRateUpperLimit": bayesian_rate_ul,
|
||||||
|
"BayesianCountRateLowerLimit": bayesian_rate_ll,
|
||||||
|
"BayesianCountRateOneSideUL": bayesian_rate_UL,
|
||||||
|
"BayesianFluxUpperLimit": bayesian_flux_ul,
|
||||||
|
"BayesianFluxLowerLimit": bayesian_flux_ll,
|
||||||
|
"BayesianFluxOneSideUL": bayesian_flux_UL,
|
||||||
|
# flux 'center value' estimate
|
||||||
|
"FluxEstimate": Flux,
|
||||||
|
# raw data
|
||||||
|
"ApertureCounts": N,
|
||||||
|
"ApertureBackgroundCounts": B,
|
||||||
|
"SourceCounts": S,
|
||||||
|
"Exposure": t,
|
||||||
|
# count rates
|
||||||
|
"SourceRate": CR,
|
||||||
|
"BackgroundRate": BR,
|
||||||
|
"NormalizedBackgroundRate": NBR, # ct/s/keV/arcmin2
|
||||||
|
# contamination
|
||||||
|
"Contamination": contamination,
|
||||||
|
# "NearbySources": nearby_sources,
|
||||||
|
# count map for the frontend image
|
||||||
|
"CountMap": map_dict,
|
||||||
|
}
|
||||||
|
|
||||||
|
clean = sanitize(result) # calling sanitize() to convert NaN to null
|
||||||
|
|
||||||
|
return Response(clean, status=status.HTTP_200_OK)
|
||||||
|
Reference in New Issue
Block a user