Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add upload_results as a config option #191

Merged
merged 14 commits into from
Sep 27, 2023
Merged
15 changes: 15 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,21 @@ See [CONTRIBUTING.md](CONTRIBUTING.md) for information related to developing the
each set of changes to `main` atomic and as a side effect naturally encourages small
well defined PR's.

## Introduction to Remote Databases
### AWS S3
1. Pre-requisites
* Obtain an AWS account for AICS. Please contact the IT team or the code owner.
* Generate an `aws_access_key_id` and `aws_secret_access_key` in your AWS account.

2. Step-by-step Guide
* Download and install the [AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html)
* Configure AWS CLI by running `aws configure`, then enter your credentials as prompted.
* Ensure that Boto3, the AWS SDK for Python is installed and included in the requirements section of `setup.py`.

### Firebase Firestore
1. Step-by-step Guide
* Create a Firebase project in test mode with your google account, select `firebase_admin` as the SDK. [Firebase Firestore tutorial](https://firebase.google.com/docs/firestore)
* Generate a new private key by navigating to "Project settings">"Service account" in the project's dashboard.

**MIT license**

85 changes: 85 additions & 0 deletions cellpack/autopack/AWSHandler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
import logging
from pathlib import Path

import boto3
from botocore.exceptions import ClientError


class AWSHandler(object):
"""
Handles all the AWS S3 operations
"""

def __init__(
self,
bucket_name,
sub_folder_name=None,
region_name=None,
):
self.bucket_name = bucket_name
self.folder_name = sub_folder_name
session = boto3.Session()
self.s3_client = session.client(
"s3",
endpoint_url=f"https://s3.{region_name}.amazonaws.com",
region_name=region_name,
)

def get_aws_object_key(self, object_name):
if self.folder_name is not None:
object_name = self.folder_name + object_name
else:
object_name = object_name
return object_name

def upload_file(self, file_path):
"""Upload a file to an S3 bucket
:param file_path: File to upload
:param bucket: Bucket to upload to
:param object_name: S3 object name. If not specified then file_path is used
:return: True if file was uploaded, else False
"""

file_name = Path(file_path).name

object_name = self.get_aws_object_key(file_name)
# Upload the file
try:
self.s3_client.upload_file(file_path, self.bucket_name, object_name)
self.s3_client.put_object_acl(
ACL="public-read", Bucket=self.bucket_name, Key=object_name
)

except ClientError as e:
logging.error(e)
return False
return file_name

def create_presigned_url(self, object_name, expiration=3600):
"""Generate a presigned URL to share an S3 object
:param object_name: string
:param expiration: Time in seconds for the presigned URL to remain valid
:return: Presigned URL as string. If error, returns None.
"""
object_name = self.get_aws_object_key(object_name)
# Generate a presigned URL for the S3 object
try:
url = self.s3_client.generate_presigned_url(
"get_object",
Params={"Bucket": self.bucket_name, "Key": object_name},
ExpiresIn=expiration,
)
except ClientError as e:
logging.error(e)
return None
# The response contains the presigned URL
# https://{self.bucket_name}.s3.{region}.amazonaws.com/{object_key}
return url

def save_file(self, file_path):
"""
Uploads a file to S3 and returns the presigned url
"""
file_name = self.upload_file(file_path)
if file_name:
return self.create_presigned_url(file_name)
40 changes: 40 additions & 0 deletions cellpack/autopack/upy/simularium/simularium_helper.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
# -*- coding: utf-8 -*-
# standardmodule
import os
import webbrowser
from pathlib import Path

import matplotlib
import numpy as np
import trimesh
from botocore.exceptions import NoCredentialsError

from simulariumio import (
TrajectoryConverter,
Expand All @@ -19,6 +23,7 @@
from simulariumio.constants import DISPLAY_TYPE, VIZ_TYPE

from cellpack.autopack.upy import hostHelper
from cellpack.autopack.AWSHandler import AWSHandler
import collada


Expand Down Expand Up @@ -1335,6 +1340,7 @@ def writeToFile(self, file_name, bb, recipe_name, version):
spatial_units=UnitData("nm"), # nanometers
)
TrajectoryConverter(converted_data).save(file_name, False)
return file_name

def raycast(self, **kw):
intersect = False
Expand All @@ -1348,3 +1354,37 @@ def raycast(self, **kw):

def raycast_test(self, obj, start, end, length, **kw):
return

def post_and_open_file(self, file_name):
simularium_file = Path(f"{file_name}.simularium")
url = None
try:
url = simulariumHelper.store_results_to_s3(simularium_file)
except Exception as e:
aws_readme_url = "https://github.com/mesoscope/cellpack/blob/feature/main/README.md#aws-s3"
if isinstance(e, NoCredentialsError):
print(
f"need to configure your aws account, find instructions here: {aws_readme_url}"
)
else:
print(
f"An error occurred while storing the file {simularium_file} to S3: {e}"
)
if url is not None:
simulariumHelper.open_in_simularium(url)

@staticmethod
def store_results_to_s3(file_path):
handler = AWSHandler(
bucket_name="cellpack-results",
sub_folder_name="simularium/",
region_name="us-west-2",
)
url = handler.save_file(file_path)
return url

@staticmethod
def open_in_simularium(aws_url):
webbrowser.open_new_tab(
f"https://simularium.allencell.org/viewer?trajUrl={aws_url}"
)
8 changes: 7 additions & 1 deletion cellpack/autopack/writers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,13 @@ def save_as_simularium(self, env, all_ingr_as_array, compartments):
env.helper.add_grid_data_to_scene(
f"{gradient.name}-weights", grid_positions, gradient.weight
)
env.helper.writeToFile(env.result_file, env.boundingBox, env.name, env.version)
file_name = env.helper.writeToFile(
env.result_file, env.boundingBox, env.name, env.version
)
if env.config_data is None or env.config_data.get(
"upload_results", env.config_data.get("number_of_packings", 1) <= 1
):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

two things: I don't thin env.config_data can ever be none, so I don't think you need that check
I think you could do 3 or less. I have run some multiple packings recently that I would like to see the results of.

autopack.helper.post_and_open_file(file_name)

def save_Mixed_asJson(
self,
Expand Down
3 changes: 2 additions & 1 deletion cellpack/tests/packing-configs/test_parallel_config.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,6 @@
"number_of_packings": 5,
"spacing": null,
"use_periodicity": false,
"show_sphere_trees": true
"show_sphere_trees": true,
"upload_results": false
}
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,6 @@
"spacing": null,
"parallel": false,
"use_periodicity": false,
"show_sphere_trees": true
"show_sphere_trees": true,
"upload_results": false
}
3 changes: 2 additions & 1 deletion examples/packing-configs/pcna_parallel_packing_config.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,5 +16,6 @@
"show_progress_bar": true,
"spacing": 2.5,
"use_periodicity": false,
"show_sphere_trees": false
"show_sphere_trees": false,
"upload_results": false
}
3 changes: 2 additions & 1 deletion examples/packing-configs/peroxisome_packing_config.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,6 @@
"spacing": 2.5,
"use_periodicity": false,
"show_sphere_trees": false,
"load_from_grid_file": true
"load_from_grid_file": true,
"upload_results": false
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

all config settings should also have a default setting in the config loader

}
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@
"show_sphere_trees": false,
"image_export_options": {
"hollow": false,
"voxel_size": [1,1,1],
"voxel_size": [1, 1, 1],
"projection_axis": "z"
}
},
"upload_results": false
}
3 changes: 2 additions & 1 deletion examples/packing-configs/run.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,6 @@
"use_periodicity": false,
"show_sphere_trees": false,
"load_from_grid_file": false,
"save_converted_recipe": true
"save_converted_recipe": true,
"upload_results": true
}
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
]

requirements = [
"boto3>=1.28.3",
"fire>=0.4.0",
"firebase_admin>=6.0.1",
"matplotlib>=3.3.4",
Expand Down