Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 38 additions & 15 deletions cellpack/autopack/Gradient.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,11 @@
# TODO: fix the save/restore grid
"""

import numpy
from random import random
import bisect
from random import random

import numpy

from cellpack.autopack.utils import get_distances_from_point


Expand Down Expand Up @@ -104,13 +106,12 @@ def update_ingredient_gradient(ingr, arguments):
return ingr

@staticmethod
def scale_between_0_and_1(values):
def normalize_by_max_value(values):
"""
Scale values between 0 and 1
Normalize values by their maximum value
"""
max_value = numpy.nanmax(values)
min_value = numpy.nanmin(values)
return (values - min_value) / (max_value - min_value)
return (values / max_value) if max_value != 0 else values

@staticmethod
def get_combined_gradient_weight(gradient_list, gradient_weights=None):
Expand All @@ -131,10 +132,17 @@ def get_combined_gradient_weight(gradient_list, gradient_weights=None):

weight_list = numpy.zeros((len(gradient_list), len(gradient_list[0].weight)))
for i in range(len(gradient_list)):
weight_list[i] = Gradient.scale_between_0_and_1(gradient_list[i].weight)
weight_list[i] = Gradient.normalize_by_max_value(gradient_list[i].weight)

if isinstance(gradient_weights, dict):
total = sum(gradient_weights.values())
gradient_weights = [
gradient_weights.get(gradient.name, 0) / total
for gradient in gradient_list
]

combined_weight = numpy.average(weight_list, axis=0, weights=gradient_weights)
combined_weight = Gradient.scale_between_0_and_1(combined_weight)
combined_weight = Gradient.normalize_by_max_value(combined_weight)

return combined_weight

Expand All @@ -158,7 +166,7 @@ def pick_point_from_weight(weight, points):
"""
weights_to_use = numpy.take(weight, points)
weights_to_use[numpy.isnan(weights_to_use)] = 0
weights_to_use = Gradient.scale_between_0_and_1(weights_to_use)
weights_to_use = Gradient.normalize_by_max_value(weights_to_use)

point_probabilities = weights_to_use / numpy.sum(weights_to_use)

Expand Down Expand Up @@ -253,6 +261,8 @@ def build_weight_map(self, bb, master_grid_positions):
self.build_radial_weight_map(bb, master_grid_positions)
elif self.mode == "surface":
self.build_surface_distance_weight_map()
elif self.mode == "uniform":
self.build_uniform_weight_map(master_grid_positions)

def get_gauss_weights(self, number_of_points, degree=5):
"""
Expand Down Expand Up @@ -317,9 +327,16 @@ def build_axis_weight_map(self, bb, master_grid_positions):
self.distances = numpy.abs((master_grid_positions[:, ind] - mini))
self.set_weights_by_mode()

def build_uniform_weight_map(self, master_grid_positions):
"""
Build a uniform weight map
"""
self.distances = numpy.zeros(len(master_grid_positions), dtype=numpy.uint8)
self.weight = numpy.ones(len(master_grid_positions))

def set_weights_by_mode(self):

self.scaled_distances = Gradient.scale_between_0_and_1(self.distances)
self.scaled_distances = Gradient.normalize_by_max_value(self.distances)

if (numpy.nanmax(self.scaled_distances) > 1.0) or (
numpy.nanmin(self.scaled_distances) < 0.0
Expand All @@ -342,11 +359,17 @@ def set_weights_by_mode(self):
"power"
]
elif self.weight_mode == "exponential":
self.weight = numpy.exp(
-self.scaled_distances / self.weight_mode_settings["decay_length"]
)
if self.weight_mode_settings["decay_length"] == 0:
self.weight = numpy.ones(len(self.scaled_distances))
else:
self.weight = numpy.exp(
-self.scaled_distances / self.weight_mode_settings["decay_length"]
)
else:
raise ValueError(f"Unknown weight mode: {self.weight_mode}")

# normalize the weight
self.weight = Gradient.scale_between_0_and_1(self.weight)
self.weight = Gradient.normalize_by_max_value(self.weight)

if (numpy.nanmax(self.weight) > 1.0) or (numpy.nanmin(self.weight) < 0.0):
raise ValueError(
Expand Down Expand Up @@ -491,7 +514,7 @@ def create_voxelization(self, image_writer):
)
if channel_values is None:
continue
normalized_values = Gradient.scale_between_0_and_1(channel_values)
normalized_values = Gradient.normalize_by_max_value(channel_values)
reshaped_values = numpy.reshape(
normalized_values, image_writer.image_size, order="F"
)
Expand Down
68 changes: 47 additions & 21 deletions cellpack/autopack/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,27 +150,53 @@ def checkPath():
log.error(str(autopack_path_pref_file) + "file is not found")
checkPath()

doit = False
if os.path.isfile(autopack_user_path_pref_file):
f = open(autopack_user_path_pref_file, "r")
doit = True
elif os.path.isfile(autopack_path_pref_file):
f = open(autopack_path_pref_file, "r")
doit = True
if doit:
log.info(f"autopack_path_pref_file {autopack_path_pref_file}")
pref_path = json.load(f)
f.close()
if "autoPACKserver" not in pref_path:
log.warning(f"problem with autopack_path_pref_file {autopack_path_pref_file}")
else:
autoPACKserver = pref_path["autoPACKserver"]
if "filespath" in pref_path:
if pref_path["filespath"] != "default":
filespath = pref_path["filespath"]
if "autopackdir" in pref_path:
if pref_path["autopackdir"] != "default":
autopackdir = pref_path["autopackdir"]

def load_path_preferences():
"""Load path preferences from user or default preference files."""
global autoPACKserver, filespath, autopackdir

# Determine which preference file to use
pref_file = None
if os.path.isfile(autopack_user_path_pref_file):
pref_file = autopack_user_path_pref_file
elif os.path.isfile(autopack_path_pref_file):
pref_file = autopack_path_pref_file

if pref_file is None:
log.warning("No preference files found")
return

try:
with open(pref_file, "r") as f:
content = f.read().strip()
if not content:
log.warning(f"Preference file {pref_file} is empty")
return

pref_path = json.loads(content)

if not isinstance(pref_path, dict):
log.warning(f"Invalid preference file format in {pref_file}")
return

if "autoPACKserver" not in pref_path:
log.warning(f"Missing 'autoPACKserver' key in {pref_file}")
else:
autoPACKserver = pref_path["autoPACKserver"]

if "filespath" in pref_path and pref_path["filespath"] != "default":
filespath = pref_path["filespath"]

if "autopackdir" in pref_path and pref_path["autopackdir"] != "default":
autopackdir = pref_path["autopackdir"]

except json.JSONDecodeError as e:
log.error(f"Failed to parse JSON in {pref_file}: {e}")
except Exception as e:
log.error(f"Error loading preferences from {pref_file}: {e}")


load_path_preferences()


REPLACE_PATH = {
Expand Down
2 changes: 1 addition & 1 deletion cellpack/autopack/ingredient/Ingredient.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,7 @@ def validate_ingredient_info(ingredient_info):
if isinstance(ingredient_info["gradient"], list):
if "gradient_weights" in ingredient_info:
# check if gradient_weights are missing
if not isinstance(ingredient_info["gradient_weights"], list):
if not isinstance(ingredient_info["gradient_weights"], list | dict):
raise Exception(
f"Invalid gradient weights for ingredient {ingredient_info['name']}"
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ class GradientModes(MetaEnum):
VECTOR = "vector"
RADIAL = "radial"
SURFACE = "surface"
UNIFORM = "uniform"


class WeightModes(MetaEnum):
Expand Down
7 changes: 5 additions & 2 deletions cellpack/autopack/upy/simularium/simularium_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,8 +347,11 @@ def add_grid_data_to_scene(self, incoming_name, positions, values, radius=0.5):

positions, values = self.sort_values(positions, values)

normalized_values = (values - np.min(values)) / (
np.max(values) - np.min(values)
max_value = np.nanmax(values)
min_value = np.nanmin(values)
value_range = max_value - min_value
normalized_values = (
(values - min_value) / value_range if value_range != 0 else values
)
colormap = matplotlib.cm.Reds(normalized_values)

Expand Down
13 changes: 10 additions & 3 deletions cellpack/autopack/validation/recipe_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ class GradientMode(str, Enum):
VECTOR = "vector"
RADIAL = "radial"
SURFACE = "surface"
UNIFORM = "uniform"


class WeightMode(str, Enum):
Expand Down Expand Up @@ -134,7 +135,7 @@ class WeightModeOptions(str, Enum):


class WeightModeSettings(BaseModel):
decay_length: Optional[float] = Field(None, gt=0)
decay_length: Optional[float] = Field(None, ge=0)
power: Optional[float] = Field(None, gt=0)


Expand Down Expand Up @@ -284,7 +285,7 @@ class RecipeObject(BaseModel):
partners: Optional[Union[List[Partner], Dict[str, Any]]] = None
# Gradient field supports multiple formats:
# - str: Simple reference to gradient name (standard format)
# - List[str]: List of gradient names (for multiple gradients)
# - List[str] OR dict[str, dict[str, Any]]: List of gradient names (for multiple gradients)
# - RecipeGradient: Full gradient definition (for unnested Firebase recipes)
# - List[RecipeGradient]: List of full gradient definitions (for unnested Firebase recipes)
#
Expand All @@ -294,7 +295,13 @@ class RecipeObject(BaseModel):
# Unnested Firebase: {"name": "gradient_name", "mode": "surface", ...}
# Converted Firebase list: [{"name": "grad1", "mode": "X"}, {"name": "grad2", "mode": "Y"}]
gradient: Optional[
Union[str, List[str], "RecipeGradient", List["RecipeGradient"]]
Union[
str,
list[str],
dict[str, dict[str, Any]],
"RecipeGradient",
list["RecipeGradient"],
]
] = None
weight: Optional[float] = Field(None, ge=0)
is_attractor: Optional[bool] = None
Expand Down
5 changes: 4 additions & 1 deletion cellpack/autopack/writers/ImageWriter.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
import logging
from pathlib import Path

import numpy
from bioio_ome_tiff.writers import OmeTiffWriter
from scipy.ndimage import convolve

log = logging.getLogger(__name__)

"""
ImageWriter provides a class to export cellpack packings as tiff images
"""
Expand Down Expand Up @@ -209,7 +212,7 @@ def export_image(self):
"""
Saves the results as a tiff file
"""
print(f"Exporting image to {self.output_path}")
log.debug(f"Exporting image to {self.output_path}")
(
concatenated_image,
channel_names,
Expand Down
5 changes: 3 additions & 2 deletions cellpack/autopack/writers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,13 @@

import json
import os
import numpy
from collections import OrderedDict

import numpy

import cellpack.autopack.transformation as tr
from cellpack import autopack
from cellpack.autopack.ingredient.grow import ActinIngredient, GrowIngredient
import cellpack.autopack.transformation as tr


def updatePositionsRadii(ingr):
Expand Down
8 changes: 5 additions & 3 deletions cellpack/bin/validate.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import logging
import logging.config
import fire
from pathlib import Path

from cellpack.autopack.loaders.recipe_loader import RecipeLoader
import fire

from cellpack.autopack.interface_objects.database_ids import DATABASE_IDS
from cellpack.autopack.loaders.recipe_loader import RecipeLoader

###############################################################################
log_file_path = Path(__file__).parent.parent / "logging.conf"
Expand Down Expand Up @@ -35,7 +36,8 @@ def validate(recipe_path):
"Remote database not initialized. Please set up credentials for the database."
)
log.error(
"See: https://github.com/mesoscope/cellpack?tab=readme-ov-file#introduction-to-remote-databases"
"See: https://github.com/mesoscope/cellpack?tab=readme-ov-file"
"#introduction-to-remote-databases"
)
else:
log.error(f"Error loading recipe: {e}")
Expand Down
Loading