diff --git a/examples/00_user_setup/install_client.sh b/examples/00_user_setup/install_client.sh index 8a8e99c..661c027 100755 --- a/examples/00_user_setup/install_client.sh +++ b/examples/00_user_setup/install_client.sh @@ -1,4 +1,4 @@ -VERSION="1.3.2" +VERSION="1.3.3" ENVNAME=fractal-client-$VERSION conda deactivate diff --git a/examples/01_cardio_tiny_dataset/.gitignore b/examples/01_cardio_tiny_dataset/.gitignore index 3d09921..ff96ccb 100644 --- a/examples/01_cardio_tiny_dataset/.gitignore +++ b/examples/01_cardio_tiny_dataset/.gitignore @@ -3,3 +3,4 @@ output* task_list .fractal.env proj* +workflow.json diff --git a/examples/01_cardio_tiny_dataset/README.md b/examples/01_cardio_tiny_dataset/README.md index eff6d05..96fd2d4 100755 --- a/examples/01_cardio_tiny_dataset/README.md +++ b/examples/01_cardio_tiny_dataset/README.md @@ -9,9 +9,19 @@ This needs to be done in each example folder you're running 2. Get the example data: `pip install zenodo_get`, then run `. ./fetch_test_data_from_zenodo.sh` 3. One can then either go through the project creation, dataset creation, workflow creation & submission one by one. Or run it all at once by running: `. ./run_example.sh` -This should complete fairly quickly (submitting the script to it being finished took 30s on my machine). One can check the status with `fractal job show ID` (where the ID is the job ID of the submitted workflow, 1 for the first workflow submitted. This is shown when submitting the workflow) +This should complete fairly quickly (submitting the script to it being finished took 30s on my machine). One can check the status with `fractal job show ID` (where the ID is the job ID of the submitted workflow: 1 for the first workflow submitted. This is shown when submitting the workflow) + +## Running partial workflows +You can use the run_example_with_partial_execution.sh as an example of how to run only parts of workflows. +Modify its last lines if you don't want to automatically run the second part of the workflow after a short wait. + + +## Import a workflow, then run it +1. Create a project & add a workflow to it that is downloaded by running `prepare_and_export_workflow.sh` +2. Create a second project, import the workflow and apply it by running `import_and_run_workflow.sh` + Check the client documentation for details on using the Fractal Client: https://fractal-analytics-platform.github.io/fractal/install.html Check the Fractal Tasks Core documentation for details on the individual tasks of this example workflow: https://fractal-analytics-platform.github.io/fractal-tasks-core/ -Successfully run with `fractal-server==1.3.0a5`, `fractal-client==1.3.0a3` and `fractal-tasks-core==0.10.0a4` +Successfully run with `fractal-server==1.3.4`, `fractal-client==1.3.3` and `fractal-tasks-core==0.10.0` diff --git a/examples/01_cardio_tiny_dataset_with_import_export/import_and_run_workflow.sh b/examples/01_cardio_tiny_dataset/import_and_run_workflow.sh similarity index 100% rename from examples/01_cardio_tiny_dataset_with_import_export/import_and_run_workflow.sh rename to examples/01_cardio_tiny_dataset/import_and_run_workflow.sh diff --git a/examples/01_cardio_tiny_dataset_with_import_export/prepare_and_export_workflow.sh b/examples/01_cardio_tiny_dataset/prepare_and_export_workflow.sh similarity index 100% rename from examples/01_cardio_tiny_dataset_with_import_export/prepare_and_export_workflow.sh rename to examples/01_cardio_tiny_dataset/prepare_and_export_workflow.sh diff --git a/examples/01_cardio_tiny_dataset/run_example_with_partial_execution.sh b/examples/01_cardio_tiny_dataset/run_example_with_partial_execution.sh new file mode 100755 index 0000000..1ea91b2 --- /dev/null +++ b/examples/01_cardio_tiny_dataset/run_example_with_partial_execution.sh @@ -0,0 +1,81 @@ +LABEL="cardiac-test-partial-1" + +############################################################################### +# IMPORTANT: This defines the location of input & output data +INPUT_PATH=`pwd`/../images/10.5281_zenodo.7059515/ +OUTPUT_PATH=`pwd`/output_${LABEL} +############################################################################### + +# Get the credentials: If you followed the instructions, they can be copied +# from the .fractal.env file in ../00_user_setup. Alternatively, you can write +# a .fractal.env file yourself or add --user & --password entries to all fractal +# commands below +cp ../00_user_setup/.fractal.env .fractal.env + +# Set useful variables +PRJ_NAME="proj-$LABEL" +DS_IN_NAME="input-ds-$LABEL" +DS_OUT_NAME="output-ds-$LABEL" +WF_NAME="Workflow $LABEL" + +# Set cache path and remove any previous file from there +export FRACTAL_CACHE_PATH=`pwd`/".cache" +rm -rv ${FRACTAL_CACHE_PATH} 2> /dev/null + +############################################################################### + +# Create project +OUTPUT=`fractal --batch project new $PRJ_NAME` +PRJ_ID=`echo $OUTPUT | cut -d ' ' -f1` +DS_IN_ID=`echo $OUTPUT | cut -d ' ' -f2` +echo "PRJ_ID: $PRJ_ID" +echo "DS_IN_ID: $DS_IN_ID" + +# Update dataset name/type, and add a resource +fractal dataset edit --new-name "$DS_IN_NAME" --new-type image --make-read-only $PRJ_ID $DS_IN_ID +fractal dataset add-resource $PRJ_ID $DS_IN_ID $INPUT_PATH + +# Add output dataset, and add a resource to it +DS_OUT_ID=`fractal --batch project add-dataset $PRJ_ID "$DS_OUT_NAME"` +echo "DS_OUT_ID: $DS_OUT_ID" + +fractal dataset edit --new-type zarr --remove-read-only $PRJ_ID $DS_OUT_ID +fractal dataset add-resource $PRJ_ID $DS_OUT_ID $OUTPUT_PATH + +# Create workflow +WF_ID=`fractal --batch workflow new "$WF_NAME" $PRJ_ID` +echo "WF_ID: $WF_ID" + +############################################################################### + +# Prepare some JSON files for task arguments (note: this has to happen here, +# because we need to include the path of the current directory) +CURRENT_FOLDER=`pwd` +echo "{ + \"level\": 0, + \"input_ROI_table\": \"well_ROI_table\", + \"workflow_file\": \"$CURRENT_FOLDER/regionprops_from_existing_labels_feature.yaml\", + \"input_specs\": { + \"dapi_img\": { \"type\": \"image\", \"channel\":{ \"wavelength_id\": \"A01_C01\" } }, + \"label_img\": { \"type\": \"label\", \"label_name\": \"nuclei\" } + }, + \"output_specs\": { + \"regionprops_DAPI\": { \"type\": \"dataframe\", \"table_name\": \"nuclei\" } + } +} +" > Parameters/args_measurement.json + +############################################################################### + +# Add tasks to workflow +fractal --batch workflow add-task $PRJ_ID $WF_ID --task-name "Create OME-Zarr structure" --args-file Parameters/args_create_ome_zarr.json --meta-file Parameters/example_meta.json +fractal --batch workflow add-task $PRJ_ID $WF_ID --task-name "Convert Yokogawa to OME-Zarr" +fractal --batch workflow add-task $PRJ_ID $WF_ID --task-name "Copy OME-Zarr structure" +fractal --batch workflow add-task $PRJ_ID $WF_ID --task-name "Maximum Intensity Projection" +fractal --batch workflow add-task $PRJ_ID $WF_ID --task-name "Cellpose Segmentation" --args-file Parameters/args_cellpose_segmentation.json #--meta-file Parameters/cellpose_meta.json +fractal --batch workflow add-task $PRJ_ID $WF_ID --task-name "Napari workflows wrapper" --args-file Parameters/args_measurement.json --meta-file Parameters/example_meta.json + +# Apply workflow +fractal workflow apply $PRJ_ID $WF_ID $DS_IN_ID $DS_OUT_ID --end 1 +sleep 90 +fractal workflow apply $PRJ_ID $WF_ID $DS_OUT_ID $DS_OUT_ID --start 2 diff --git a/examples/01_cardio_tiny_dataset_with_import_export/Parameters/.gitignore b/examples/01_cardio_tiny_dataset_with_import_export/Parameters/.gitignore deleted file mode 100644 index 33bd906..0000000 --- a/examples/01_cardio_tiny_dataset_with_import_export/Parameters/.gitignore +++ /dev/null @@ -1 +0,0 @@ -args_measurement.json diff --git a/examples/01_cardio_tiny_dataset_with_import_export/Parameters/args_cellpose_segmentation.json b/examples/01_cardio_tiny_dataset_with_import_export/Parameters/args_cellpose_segmentation.json deleted file mode 100644 index b862b99..0000000 --- a/examples/01_cardio_tiny_dataset_with_import_export/Parameters/args_cellpose_segmentation.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "level": 3, - "channel":{ - "wavelength_id": "A01_C01" - }, - "output_label_name": "nuclei", - "input_ROI_table": "well_ROI_table", - "model_type": "nuclei", - "diameter_level0": 60 -} diff --git a/examples/01_cardio_tiny_dataset_with_import_export/Parameters/args_create_ome_zarr.json b/examples/01_cardio_tiny_dataset_with_import_export/Parameters/args_create_ome_zarr.json deleted file mode 100644 index 5dba6a9..0000000 --- a/examples/01_cardio_tiny_dataset_with_import_export/Parameters/args_create_ome_zarr.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "allowed_channels": [ - { - "color": "00FFFF", - "wavelength_id": "A01_C01", - "label": "DAPI", - "window":{ - "start": 110, - "end": 800 - } - }, - { - "color": "FF00FF", - "wavelength_id": "A01_C02", - "label": "nanog", - "window": { - "start": 110, - "end": 290 - } - }, - { - "color": "FFFF00", - "wavelength_id": "A02_C03", - "label": "Lamin B1", - "window": { - "start": 110, - "end": 1600 - } - } - ] - , - "coarsening_xy": 2, - "num_levels": 5, - "image_extension": "png" -} diff --git a/examples/01_cardio_tiny_dataset_with_import_export/Parameters/example_meta.json b/examples/01_cardio_tiny_dataset_with_import_export/Parameters/example_meta.json deleted file mode 100644 index 7f9ea2e..0000000 --- a/examples/01_cardio_tiny_dataset_with_import_export/Parameters/example_meta.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "cpus_per_task": 1, - "mem": 4000 -} diff --git a/examples/01_cardio_tiny_dataset_with_import_export/README.md b/examples/01_cardio_tiny_dataset_with_import_export/README.md deleted file mode 100755 index e480706..0000000 --- a/examples/01_cardio_tiny_dataset_with_import_export/README.md +++ /dev/null @@ -1,18 +0,0 @@ -# Setting up a fractal client & run a workflow through Fractal - -## Client setup (from `00_user_setup` folder) -This only needs to be done once (unless the server is restarted again). Follow the instructions in the `00_user_setup` folder. - -## Running an example through Fractal -This needs to be done in each example folder you're running -1. Switch to this example folder. If you followed the instructions above, credentials should be used automatically. Alternatively, check the top of the script to set them up manually. -2. Get the example data (unless you already did in the other 01 example): `pip install zenodo_get`, then run `. ./fetch_test_data_from_zenodo.sh` -3. Create a project & add a workflow to it that is downloaded by running `prepare_and_export_workflow.sh` -4. Create a second project, import the workflow and apply it by running `import_and_run_workflow.sh` - -This should complete fairly quickly. One can check the status with `fractal job show ID` (where the ID is the job ID of the submitted workflow, 1 for the first workflow submitted. This is shown when submitting the workflow) - -Check the client documentation for details on using the Fractal Client: https://fractal-analytics-platform.github.io/fractal/install.html -Check the Fractal Tasks Core documentation for details on the individual tasks of this example workflow: https://fractal-analytics-platform.github.io/fractal-tasks-core/ - -Successfully run with `fractal-server==1.3.0a5`, `fractal-client==1.3.0a3` and `fractal-tasks-core==0.10.0a4` diff --git a/examples/01_cardio_tiny_dataset_with_import_export/fetch_test_data_from_zenodo.sh b/examples/01_cardio_tiny_dataset_with_import_export/fetch_test_data_from_zenodo.sh deleted file mode 100755 index fcda5db..0000000 --- a/examples/01_cardio_tiny_dataset_with_import_export/fetch_test_data_from_zenodo.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -DOI="10.5281/zenodo.7059515" -CLEAN_DOI=${DOI/\//_} -zenodo_get $DOI -o ../images/${CLEAN_DOI} diff --git a/examples/01_cardio_tiny_dataset_with_import_export/regionprops_from_existing_labels_feature.yaml b/examples/01_cardio_tiny_dataset_with_import_export/regionprops_from_existing_labels_feature.yaml deleted file mode 100755 index 880f0c5..0000000 --- a/examples/01_cardio_tiny_dataset_with_import_export/regionprops_from_existing_labels_feature.yaml +++ /dev/null @@ -1,12 +0,0 @@ -!!python/object:napari_workflows._workflow.Workflow -_tasks: - regionprops_DAPI: !!python/tuple - - !!python/name:napari_skimage_regionprops._regionprops.regionprops_table '' - - dapi_img - - label_img - - true - - true - - false - - false - - false - - false diff --git a/examples/server/install_script.sh b/examples/server/install_script.sh index b22eed3..d711b5e 100644 --- a/examples/server/install_script.sh +++ b/examples/server/install_script.sh @@ -1,4 +1,4 @@ -VERSION="1.3.3" +VERSION="1.3.4" ENVNAME=fractal-server-$VERSION conda deactivate