initial commit
This commit is contained in:
110
management/commands/load_survey.py
Normal file
110
management/commands/load_survey.py
Normal file
@@ -0,0 +1,110 @@
|
||||
import numpy as np
|
||||
from astropy.io import fits
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
from axc_ul.models import Pixel
|
||||
|
||||
from itertools import islice
|
||||
|
||||
BATCH_SIZE = 1000000
|
||||
|
||||
def batch(iterable, size):
|
||||
"""
|
||||
Generator that yields successive chunks of size 'size' from 'iterable'.
|
||||
"""
|
||||
iterable = iter(iterable)
|
||||
while True:
|
||||
chunk = list(islice(iterable, size))
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Process FITS files and store the data in the database"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--counts',
|
||||
type=str,
|
||||
required=True,
|
||||
help='Path of the counts file'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--exposure',
|
||||
type=str,
|
||||
required=True,
|
||||
help='Path of the exposure file'
|
||||
)
|
||||
# parser.add_argument(
|
||||
# '--survey_number',
|
||||
# type=int,
|
||||
# required=True,
|
||||
# help='Integer ID of the survey being read'
|
||||
# )
|
||||
|
||||
def handle(self, *args, **options):
|
||||
|
||||
counts_file = options['counts']
|
||||
exposure_file = options['exposure']
|
||||
# survey_number = options['survey_number']
|
||||
|
||||
self.stdout.write(f"Counts file: {counts_file}")
|
||||
self.stdout.write(f"Exposure file: {exposure_file}")
|
||||
|
||||
with fits.open(counts_file) as hdul:
|
||||
|
||||
column_name = "T"
|
||||
counts_map = hdul[1].data[column_name]
|
||||
|
||||
counts_data = counts_map.ravel()
|
||||
|
||||
|
||||
with fits.open(exposure_file) as hdul:
|
||||
|
||||
column_name = "T"
|
||||
exposure_map = hdul[1].data[column_name]
|
||||
|
||||
exposure_data = exposure_map.ravel()
|
||||
|
||||
|
||||
|
||||
self.stdout.write(f"Counts Data Shape: {counts_data.shape}")
|
||||
self.stdout.write(f"Exposure Data Shape: {exposure_data.shape}")
|
||||
|
||||
assert counts_data.shape == exposure_data.shape, "Counts and exposure maps must have the same shape"
|
||||
|
||||
#rate_data = np.divide(counts_data, exposure_data)
|
||||
|
||||
# with transaction.atomic():
|
||||
|
||||
# survey,created = Survey.objects.get_or_create(number=survey_number)
|
||||
|
||||
# if created:
|
||||
# self.stdout.write(f"Created a new survey instance with number: {survey.number}")
|
||||
# else:
|
||||
# self.stdout.write(f"Using existing survey instance with the number: {survey.number}")
|
||||
|
||||
|
||||
# Create a generator that yields Pixel objects one by one.
|
||||
pixel_generator = (
|
||||
Pixel(
|
||||
hpid=i,
|
||||
counts=int(count),
|
||||
exposure=float(exposure),
|
||||
#rate=float(rate),
|
||||
#survey=survey
|
||||
)
|
||||
for i, (count, exposure) in enumerate(zip(counts_data, exposure_data))
|
||||
)
|
||||
|
||||
total_inserted = 0
|
||||
# Process the generator in batches.
|
||||
for pixel_batch in batch(pixel_generator, BATCH_SIZE):
|
||||
with transaction.atomic():
|
||||
Pixel.objects.bulk_create(pixel_batch)
|
||||
total_inserted += len(pixel_batch)
|
||||
self.stdout.write(f"Inserted {total_inserted} pixels")
|
||||
|
||||
self.stdout.write(f"Inserted a total of {total_inserted} pixels.")
|
Reference in New Issue
Block a user