Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion datalab/datalab_session/analysis/variable_star.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,18 +45,19 @@ def variable_star(input: dict, user: User):
continue

target_source = find_target_source(cat_hdu, target_ra, target_dec)

if target_source is None:
log.info(f"No source found matching target coordinates: RA={target_ra}, DEC={target_dec} in image {basename}")
excluded_images.append(basename)
continue

try:
mag = target_source['mag']
print(f'mag: {mag}')
magerr = target_source['magerr']
except KeyError as e:
# If mag or magerr is not present, fallback convert flux to mag
mag, magerr = flux_to_mag(target_source['flux'], target_source['fluxerr'])
print(f'flux mag: {mag}')
flux_fallback = True
except Exception as e:
log.warning(f"Invalid magnitude or magnitude error for target in image {basename}")
Expand All @@ -69,6 +70,7 @@ def variable_star(input: dict, user: User):
'julian_date': Time(image.get("observation_date")).jd,
'observation_date': image.get("observation_date")
})
print(f'light_curve: {light_curve}')

try:
frequency, power, period, fap = calculate_period(light_curve)
Expand Down
1 change: 1 addition & 0 deletions datalab/datalab_session/analysis/wcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from astropy.wcs import WCS, WcsError
from django.contrib.auth.models import User


from datalab.datalab_session.exceptions import ClientAlertException
from datalab.datalab_session.utils.file_utils import get_hdu
from datalab.datalab_session.utils.filecache import FileCache
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,14 @@
import os
import numpy as np
from astropy.io import fits
import logging

from datalab.datalab_session.utils.file_utils import create_jpgs, temp_file_manager
from datalab.datalab_session.utils.s3_utils import save_files_to_s3
from datalab.datalab_session.utils.filecache import FileCache


log = logging.getLogger()
log.setLevel(logging.INFO)
class FITSOutputHandler():
"""A class to handle FITS output files and create jpgs.

Expand Down Expand Up @@ -35,6 +37,7 @@ def __init__(self, cache_key: str, data: np.array, dir: str, comment: str=None,
self.primary_hdu = fits.PrimaryHDU(header=fits.Header([('DLAB_KEY', cache_key)]))
self.image_hdu = fits.CompImageHDU(data=data, header=data_header, name='SCI')
self.dir = dir
log.info(f"[DEBUG] Initializing FITSOutputHandler with header: {data_header}")

if comment: self.set_comment(comment)

Expand All @@ -45,6 +48,7 @@ def set_comment(self, comment: str):
"""Add a comment to the FITS file."""
self.primary_hdu.header.add_comment(comment)

## add arg: header and call copy wcs method
def create_and_save_data_products(self, format, index: int=None, large_jpg_path: str=None, small_jpg_path: str=None, tif_path: str=None):
"""
When you're done with the operation and would like to save the FITS file and jpgs in S3. JPGs are required, any other file is optional.
Expand All @@ -65,6 +69,7 @@ def create_and_save_data_products(self, format, index: int=None, large_jpg_path:
# Create the output FITS file
fits_output_path = fits_output_file.name
hdu_list.writeto(fits_output_path, overwrite=True)

FileCache().add_file_to_cache(fits_output_path)

# Create jpgs if not provided
Expand Down
5 changes: 3 additions & 2 deletions datalab/datalab_session/data_operations/median.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,16 +56,17 @@ def operate(self, submitter: User):

input_fits_list = []
for index, input in enumerate(input_list, start=1):
## Add method to inputdatahandler to get the header
input_fits_list.append(InputDataHandler(submitter, input['basename'], input['source']))
log.info(f'input fits list: {input_fits_list}')
self.set_operation_progress(Median.PROGRESS_STEPS['MEDIAN_MIDPOINT'] * (index / len(input_list)))

cropped_data, shape = crop_arrays([image.sci_data for image in input_fits_list], flatten=True)
median = np.median(cropped_data, axis=0, overwrite_input=True)
median = np.reshape(median, shape)

self.set_operation_progress(Median.PROGRESS_STEPS['MEDIAN_CALCULATION_PERCENTAGE_COMPLETION'])

output = FITSOutputHandler(self.cache_key, median, self.temp, comment).create_and_save_data_products(Format.FITS)
output = FITSOutputHandler(self.cache_key, median, self.temp, comment, data_header=input_fits_list[0].sci_hdu.header.copy()).create_and_save_data_products(Format.FITS)
log.info(f'Median output: {output}')
self.set_output(output)
self.set_operation_progress(Median.PROGRESS_STEPS['OUTPUT_PERCENTAGE_COMPLETION'])
Expand Down
1 change: 0 additions & 1 deletion datalab/datalab_session/data_operations/normalization.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,6 @@ def operate(self, submitter: User):
median = np.median(image.sci_data)
normalized_image = image.sci_data / median
comment = f'Datalab Normalization on file {input_list[index-1]["basename"]}'

output = FITSOutputHandler(f'{self.cache_key}', normalized_image, self.temp, comment, data_header=image.sci_hdu.header.copy()).create_and_save_data_products(Format.FITS, index=index)
output_files.append(output)
self.set_output(output_files)
Expand Down
5 changes: 3 additions & 2 deletions datalab/datalab_session/data_operations/stacking.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,16 +61,17 @@ def operate(self, submitter: User):

input_fits_list = []
for index, input in enumerate(input_files, start=1):
log.info(f'this is index: {index}' f'and input: {input}')
input_fits_list.append(InputDataHandler(submitter, input['basename'], input['source']))
log.info(f'input fits list in normalization: {input_fits_list}')
self.set_operation_progress(Stack.PROGRESS_STEPS['STACKING_MIDPOINT'] * (index / len(input_files)))

cropped_data, _ = crop_arrays([image.sci_data for image in input_fits_list])
self.set_operation_progress(Stack.PROGRESS_STEPS['STACKING_PERCENTAGE_COMPLETION'])

stacked_sum = np.sum(cropped_data, axis=0)
self.set_operation_progress(Stack.PROGRESS_STEPS['STACKING_OUTPUT_PERCENTAGE_COMPLETION'])

output = FITSOutputHandler(self.cache_key, stacked_sum, self.temp, comment).create_and_save_data_products(Format.FITS)
output = FITSOutputHandler(self.cache_key, stacked_sum, self.temp, comment, data_header=input_fits_list[0].sci_hdu.header.copy()).create_and_save_data_products(Format.FITS)

log.info(f'Stacked output: {output}')
self.set_output(output)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# Generated by Django 4.2.25 on 2025-10-09 22:26

from django.db import migrations


class Migration(migrations.Migration):

dependencies = [
('datalab_session', '0002_dataoperation_cache_key'),
]

operations = [
migrations.AlterModelOptions(
name='dataoperation',
options={'ordering': ['pk']},
),
migrations.AlterModelOptions(
name='datasession',
options={'ordering': ['-modified']},
),
]
Binary file modified datalab/datalab_session/tests/test_files/median/median_1_2.fits
Binary file not shown.
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ drf-nested-routers = "^0.93.4"
ocs-authentication = "^0.2.2"
psycopg2-binary = "^2.9.9"
django-dramatiq = "^0.11.6"
dramatiq = "<2.0"
redis = "^5.0.1"
rabbitmq = "^0.2.0"
pika = "^1.3.2"
Expand Down