diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..fabcfdc2 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,37 @@ +# Distribution / packaging +build/ +develop-eggs/ +eggs/ +.eggs/ +*.egg-info/ +.installed.cfg +*.egg +__pycache__ + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +*.cover +cellpack/tests/ +cellpack/bin/tests/ + +# Documentation +docs/ + +# virtualenv +.venv +venv/ +ENV/ + +# Generated data +out/ +*.simularium +**/converted/* +data/ +results/ + +# git +.github/ diff --git a/cellpack/autopack/DBRecipeHandler.py b/cellpack/autopack/DBRecipeHandler.py index 08c12cc3..61c86745 100644 --- a/cellpack/autopack/DBRecipeHandler.py +++ b/cellpack/autopack/DBRecipeHandler.py @@ -644,7 +644,7 @@ def upload_recipe(self, recipe_meta_data, recipe_data): recipe_to_save["recipe_path"] = self.db.create_path("recipes", recipe_id) self.upload_data("recipes", recipe_to_save, recipe_id) - def upload_result_metadata(self, file_name, url): + def upload_result_metadata(self, file_name, url, job_id=None): """ Upload the metadata of the result file to the database. """ @@ -654,7 +654,12 @@ def upload_result_metadata(self, file_name, url): self.db.update_or_create( "results", file_name, - {"user": username, "timestamp": timestamp, "url": url}, + { + "user": username, + "timestamp": timestamp, + "url": url, + "batch_job_id": job_id, + }, ) diff --git a/cellpack/autopack/__init__.py b/cellpack/autopack/__init__.py index 3ff238da..f2ed6bb0 100755 --- a/cellpack/autopack/__init__.py +++ b/cellpack/autopack/__init__.py @@ -392,13 +392,15 @@ def read_text_file(filename, destination="", cache="collisionTrees", force=None) return sphere_data -def load_file(filename, destination="", cache="geometries", force=None): +def load_file( + filename, destination="", cache="geometries", force=None, use_docker=False +): if is_remote_path(filename): database_name, file_path = convert_db_shortname_to_url(filename) # command example: `pack -r firebase:recipes/[FIREBASE-RECIPE-ID] -c [CONFIG-FILE-PATH]` if database_name == "firebase": db = DATABASE_IDS.handlers().get(database_name) - initialize_db = db() + initialize_db = db(default_db="staging") if use_docker else db() if not initialize_db._initialized: readme_url = "https://github.com/mesoscope/cellpack?tab=readme-ov-file#introduction-to-remote-databases" sys.exit( diff --git a/cellpack/autopack/interface_objects/partners.py b/cellpack/autopack/interface_objects/partners.py index 72cac5ac..514b82e1 100644 --- a/cellpack/autopack/interface_objects/partners.py +++ b/cellpack/autopack/interface_objects/partners.py @@ -60,9 +60,11 @@ def __init__(self, partners): partner["name"], partner["position"] if "position" in partner else [0, 0, 0], partner["weight"] if "weight" in partner else weight, - partner["binding_probability"] - if "binding_probability" in partner - else 1.0, + ( + partner["binding_probability"] + if "binding_probability" in partner + else 1.0 + ), ) self.all_partners.append(partner) diff --git a/cellpack/autopack/loaders/recipe_loader.py b/cellpack/autopack/loaders/recipe_loader.py index cf0be99b..a9e32c0a 100644 --- a/cellpack/autopack/loaders/recipe_loader.py +++ b/cellpack/autopack/loaders/recipe_loader.py @@ -27,7 +27,7 @@ class RecipeLoader(object): # TODO: add all default values here default_values = default_recipe_values.copy() - def __init__(self, input_file_path, save_converted_recipe=False): + def __init__(self, input_file_path, save_converted_recipe=False, use_docker=False): _, file_extension = os.path.splitext(input_file_path) self.current_version = CURRENT_VERSION self.file_path = input_file_path @@ -36,7 +36,7 @@ def __init__(self, input_file_path, save_converted_recipe=False): self.compartment_list = [] self.save_converted_recipe = save_converted_recipe autopack.CURRENT_RECIPE_PATH = os.path.dirname(self.file_path) - self.recipe_data = self._read() + self.recipe_data = self._read(use_docker=use_docker) @staticmethod def _resolve_object(key, objects): @@ -156,8 +156,10 @@ def _migrate_version(self, old_recipe): f"{old_recipe['format_version']} is not a format version we support" ) - def _read(self, resolve_inheritance=True): - new_values, database_name = autopack.load_file(self.file_path, cache="recipes") + def _read(self, resolve_inheritance=True, use_docker=False): + new_values, database_name = autopack.load_file( + self.file_path, cache="recipes", use_docker=use_docker + ) if database_name == "firebase": objects, gradients, composition = DBRecipeLoader.collect_and_sort_data( new_values["composition"] diff --git a/cellpack/autopack/upy/colors.py b/cellpack/autopack/upy/colors.py index 69d60b14..3fc2c5b3 100644 --- a/cellpack/autopack/upy/colors.py +++ b/cellpack/autopack/upy/colors.py @@ -16,6 +16,7 @@ You should have received a copy of the GNU General Public License along with upy. If not, see . """ + import numpy from math import floor diff --git a/cellpack/autopack/upy/simularium/__init__.py b/cellpack/autopack/upy/simularium/__init__.py index acafa855..46a2d600 100644 --- a/cellpack/autopack/upy/simularium/__init__.py +++ b/cellpack/autopack/upy/simularium/__init__.py @@ -16,5 +16,6 @@ You should have received a copy of the GNU General Public License along with upy. If not, see . """ + # CRITICAL_DEPENDENCIES = ['blender','c4d'] __revision__ = "01" diff --git a/cellpack/autopack/upy/simularium/simularium_helper.py b/cellpack/autopack/upy/simularium/simularium_helper.py index bd09210b..845bec2c 100644 --- a/cellpack/autopack/upy/simularium/simularium_helper.py +++ b/cellpack/autopack/upy/simularium/simularium_helper.py @@ -1398,23 +1398,35 @@ def raycast_test(self, obj, start, end, length, **kw): def post_and_open_file(self, file_name, open_results_in_browser=True): simularium_file = Path(f"{file_name}.simularium") url = None + job_id = os.environ.get("AWS_BATCH_JOB_ID", None) file_name, url = simulariumHelper.store_result_file( - simularium_file, storage="aws" + simularium_file, storage="aws", batch_job_id=job_id ) if file_name and url: - simulariumHelper.store_metadata(file_name, url, db="firebase") + simulariumHelper.store_metadata( + file_name, url, db="firebase", job_id=job_id + ) if open_results_in_browser: simulariumHelper.open_in_simularium(url) @staticmethod - def store_result_file(file_path, storage=None): + def store_result_file(file_path, storage=None, batch_job_id=None): if storage == "aws": handler = DATABASE_IDS.handlers().get(storage) - initialized_handler = handler( - bucket_name="cellpack-results", - sub_folder_name="simularium", - region_name="us-west-2", - ) + # if batch_job_id is not None, then we are in a batch job and should use the temp bucket + # TODO: use cellpack-results bucket for batch jobs once we have the correct permissions + if batch_job_id: + initialized_handler = handler( + bucket_name="cellpack-demo", + sub_folder_name="simularium", + region_name="us-west-2", + ) + else: + initialized_handler = handler( + bucket_name="cellpack-results", + sub_folder_name="simularium", + region_name="us-west-2", + ) file_name, url = initialized_handler.save_file_and_get_url(file_path) if not file_name or not url: db_maintainer = DBMaintenance(initialized_handler) @@ -1424,7 +1436,7 @@ def store_result_file(file_path, storage=None): return file_name, url @staticmethod - def store_metadata(file_name, url, db=None): + def store_metadata(file_name, url, db=None, job_id=None): if db == "firebase": handler = DATABASE_IDS.handlers().get(db) initialized_db = handler( @@ -1432,7 +1444,7 @@ def store_metadata(file_name, url, db=None): ) # default to staging for metadata uploads if initialized_db._initialized: db_uploader = DBUploader(initialized_db) - db_uploader.upload_result_metadata(file_name, url) + db_uploader.upload_result_metadata(file_name, url, job_id) else: db_maintainer = DBMaintenance(initialized_db) print( diff --git a/cellpack/bin/pack.py b/cellpack/bin/pack.py index 80d052c8..b20f187c 100644 --- a/cellpack/bin/pack.py +++ b/cellpack/bin/pack.py @@ -20,18 +20,19 @@ ############################################################################### -def pack(recipe, config_path=None, analysis_config_path=None): +def pack(recipe, config_path=None, analysis_config_path=None, docker=False): """ Initializes an autopack packing from the command line :param recipe: string argument, path to recipe :param config_path: string argument, path to packing config file :param analysis_config_path: string argument, path to analysis config file + :param docker: boolean argument, are we using docker :return: void """ packing_config_data = ConfigLoader(config_path).config recipe_data = RecipeLoader( - recipe, packing_config_data["save_converted_recipe"] + recipe, packing_config_data["save_converted_recipe"], docker ).recipe_data analysis_config_data = {} if analysis_config_path is not None: diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh index 305d0607..a66d45a3 100644 --- a/docker-entrypoint.sh +++ b/docker-entrypoint.sh @@ -1,15 +1,20 @@ #!/bin/bash -e -# if [ -z "$recipe" ]; then -# echo "Required recipe parameter is missing, please include recipe in Docker run script, ie: -e r=path/to/recipe" -# exit -# fi - -# if [ -z "$config" ]; then -# echo "Required config parameter is missing, please include packing config in Docker run script, ie: -e c=path/to/config" -# exit -# fi +if [ -z "$recipe" ]; then + echo "Required recipe parameter is missing, please include recipe in Docker run script, ie: -e recipe=path/to/recipe" + exit; +else + echo "recipe passed in: '$recipe'" +fi cd /cellpack -# pack -r ${recipe} -c ${config} -pack -r examples/recipes/v2/one_sphere.json -c examples/packing-configs/run.json \ No newline at end of file + +if [ -z "$config" ]; then + echo "Config parameter not included, using default value" + pack -r $recipe -d + exit; +else + echo "config passed in: '$config'" +fi + +pack -r $recipe -c $config -d