diff --git a/.gitignore b/.gitignore index 113985a6..23506842 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,4 @@ build/ *.swp __pycache__ node_modules +dist diff --git a/VERSIONLOG.md b/VERSIONLOG.md index 55c3a719..6dc2d8d0 100644 --- a/VERSIONLOG.md +++ b/VERSIONLOG.md @@ -4,6 +4,10 @@ Make ONT volume calculations script case-agnostic for concentration units. +## 20240826.1 + +Add script for AVITI run manifest generation, re-organize repo to follow best-practice modularization and implement EPP wrapper. + ## 20240823.2 Add function to fetch sample-level Q30 for AVITI diff --git a/requirements.txt b/requirements.txt index f3d74068..70438d85 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,6 +10,7 @@ oauth2client pandas protobuf psycopg2 +python_levenshtein PyYAML Requests scilifelab_parsers @ git+https://github.com/SciLifeLab/scilifelab_parsers diff --git a/calc_from_args_utils/__init__.py b/scilifelab_epps/calc_from_args/__init__.py similarity index 100% rename from calc_from_args_utils/__init__.py rename to scilifelab_epps/calc_from_args/__init__.py diff --git a/calc_from_args_utils/calculation_methods.py b/scilifelab_epps/calc_from_args/calculation_methods.py similarity index 99% rename from calc_from_args_utils/calculation_methods.py rename to scilifelab_epps/calc_from_args/calculation_methods.py index 5cb33f28..f048a0ea 100644 --- a/calc_from_args_utils/calculation_methods.py +++ b/scilifelab_epps/calc_from_args/calculation_methods.py @@ -7,12 +7,12 @@ import tabulate from genologics.entities import Process -from calc_from_args_utils.udf_arg_methods import ( +from scilifelab_epps.calc_from_args.udf_arg_methods import ( fetch_from_arg, get_UDF_source, get_UDF_source_name, ) -from epp_utils import formula, udf_tools +from scilifelab_epps.utils import formula, udf_tools DESC = """This file contains the method functions for a UDF-agnostic script.""" diff --git a/calc_from_args_utils/udf_arg_methods.py b/scilifelab_epps/calc_from_args/udf_arg_methods.py similarity index 98% rename from calc_from_args_utils/udf_arg_methods.py rename to scilifelab_epps/calc_from_args/udf_arg_methods.py index 3f5a56f4..1040c91d 100644 --- a/calc_from_args_utils/udf_arg_methods.py +++ b/scilifelab_epps/calc_from_args/udf_arg_methods.py @@ -5,7 +5,7 @@ import yaml from genologics.entities import Artifact, Process -from epp_utils import udf_tools +from scilifelab_epps.utils import udf_tools def fetch_from_arg( diff --git a/epp_utils/__init__.py b/scilifelab_epps/utils/__init__.py similarity index 100% rename from epp_utils/__init__.py rename to scilifelab_epps/utils/__init__.py diff --git a/epp_utils/formula.py b/scilifelab_epps/utils/formula.py similarity index 100% rename from epp_utils/formula.py rename to scilifelab_epps/utils/formula.py diff --git a/epp_utils/udf_tools.py b/scilifelab_epps/utils/udf_tools.py similarity index 100% rename from epp_utils/udf_tools.py rename to scilifelab_epps/utils/udf_tools.py diff --git a/scilifelab_epps/wrapper.py b/scilifelab_epps/wrapper.py new file mode 100644 index 00000000..5380fbee --- /dev/null +++ b/scilifelab_epps/wrapper.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python + +import logging +import os +import sys + +from genologics.config import BASEURI, PASSWORD, USERNAME +from genologics.entities import Process +from genologics.lims import Lims + +from scilifelab_epps.epp import upload_file + + +def epp_decorator(script_path: str, timestamp: str): + """This top-level decorator is meant to be used on EPP scripts' main functions. + + It receives the script path (__file__) and timestamp (yymmdd_hhmmss) as arguments to + pass on to it's children which wrap the main function to handle logging and graceful failure. + """ + script_name: str = os.path.basename(script_path).split(".")[0] + + def _epp_decorator(script_main): + def epp_wrapper(args): + """General wrapper for EPP scripts.""" + + # Set up LIMS + lims = Lims(BASEURI, USERNAME, PASSWORD) + lims.check_version() + process = Process(lims, id=args.pid) + + # Name log file + log_filename: str = ( + "_".join( + [ + script_name, + process.id, + timestamp, + process.technician.name.replace(" ", ""), + ] + ) + + ".log" + ) + + # Set up logging + logging.basicConfig( + filename=log_filename, + filemode="w", + format="%(levelname)s: %(message)s", + level=logging.INFO, + ) + + # Start logging + logging.info(f"Script '{script_name}' started at {timestamp}.") + logging.info( + f"Launched in step '{process.type.name}' ({process.id}) by {process.technician.name}." + ) + args_str = "\n\t".join( + [f"'{arg}': {getattr(args, arg)}" for arg in vars(args)] + ) + logging.info(f"Script called with arguments: \n\t{args_str}") + + # Run + try: + script_main(args) + + # On script error + except Exception as e: + # Post error to LIMS GUI + logging.error(str(e), exc_info=True) + logging.shutdown() + upload_file( + file_path=log_filename, + file_slot=args.log, + process=process, + lims=lims, + ) + os.remove(log_filename) + sys.stderr.write(str(e)) + sys.exit(2) + + # On script success + else: + logging.info("Script completed successfully.") + logging.shutdown() + upload_file( + file_path=log_filename, + file_slot=args.log, + process=process, + lims=lims, + ) + # Check log for errors and warnings + log_content = open(log_filename).read() + os.remove(log_filename) + if "ERROR:" in log_content or "WARNING:" in log_content: + sys.stderr.write( + "Script finished successfully, but log contains errors or warnings, please have a look." + ) + sys.exit(2) + else: + sys.exit(0) + + return epp_wrapper + + return _epp_decorator diff --git a/scilifelab_epps/zika/__init__.py b/scilifelab_epps/zika/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/scripts/zika_methods.py b/scilifelab_epps/zika/methods.py similarity index 95% rename from scripts/zika_methods.py rename to scilifelab_epps/zika/methods.py index a813bd47..7c596454 100644 --- a/scripts/zika_methods.py +++ b/scilifelab_epps/zika/methods.py @@ -10,9 +10,9 @@ import numpy as np import pandas as pd -import zika_utils -from epp_utils.udf_tools import is_filled +from scilifelab_epps import zika +from scilifelab_epps.utils.udf_tools import is_filled def pool_fixed_vol( @@ -57,7 +57,7 @@ def pool_fixed_vol( "dst_id": "art_tuple[1]['uri'].location[0].id", "dst_well": "art_tuple[1]['uri'].location[1]", } - df_all = zika_utils.fetch_sample_data(currentStep, to_fetch) + df_all = zika.utils.fetch_sample_data(currentStep, to_fetch) # Define deck, a dictionary mapping plate names to deck positions assert len(df_all.src_id.unique()) <= 4, "Only one to four input plates allowed" @@ -77,22 +77,22 @@ def pool_fixed_vol( df_wl = pd.concat([df_wl, df_pool], axis=0) # Format worklist - df_formatted = zika_utils.format_worklist(df_wl.copy(), deck) - wl_filename, log_filename = zika_utils.get_filenames( + df_formatted = zika.utils.format_worklist(df_wl.copy(), deck) + wl_filename, log_filename = zika.utils.get_filenames( method_name="pool", pid=currentStep.id ) # Write the output files - zika_utils.write_worklist( + zika.utils.write_worklist( df=df_formatted.copy(), deck=deck, wl_filename=wl_filename, ) - zika_utils.write_log(log, log_filename) + zika.utils.write_log(log, log_filename) # Upload files - zika_utils.upload_csv(currentStep, lims, wl_filename) - zika_utils.upload_log(currentStep, lims, log_filename) + zika.utils.upload_csv(currentStep, lims, wl_filename) + zika.utils.upload_log(currentStep, lims, log_filename) # Issue warnings, if any if any("WARNING" in entry for entry in log): @@ -187,7 +187,7 @@ def pool( if v: to_fetch[k] = f"art_tuple[1]['uri'].udf['{v}']" - df_all = zika_utils.fetch_sample_data(currentStep, to_fetch) + df_all = zika.utils.fetch_sample_data(currentStep, to_fetch) # All samples should have accessible volume assert all( @@ -321,7 +321,7 @@ def pool( ) errors = True - raise zika_utils.VolumeOverflow + raise zika.utils.VolumeOverflow log.append( "\nAn even pool can be created within the following parameter ranges:" @@ -409,7 +409,7 @@ def pool( ) errors = True - raise zika_utils.VolumeOverflow + raise zika.utils.VolumeOverflow log.append( "\nWill try to create a pool that is as even as possible. Accounting for sample depletion, a pool can be created with the following parameter ranges: " @@ -436,7 +436,7 @@ def pool( # No volume expansion is allowed, so pool volume is set to the minimum, given the conc pool_vol = pool_real_min_sample_vol - except zika_utils.VolumeOverflow: + except zika.utils.VolumeOverflow: continue # === STORE FINAL CALCULATION RESULTS === @@ -518,14 +518,14 @@ def pool( pool.put() # Get filenames and upload log if errors - wl_filename, log_filename = zika_utils.get_filenames( + wl_filename, log_filename = zika.utils.get_filenames( method_name="pool", pid=currentStep.id ) if errors: - raise zika_utils.CheckLog(log, log_filename, lims, currentStep) + raise zika.utils.CheckLog(log, log_filename, lims, currentStep) # Format worklist - df_formatted = zika_utils.format_worklist(df_wl.copy(), deck) + df_formatted = zika.utils.format_worklist(df_wl.copy(), deck) # Comments to attach to the worklist header comments = [ @@ -539,17 +539,17 @@ def pool( ) # Write the output files - zika_utils.write_worklist( + zika.utils.write_worklist( df=df_formatted.copy(), deck=deck, wl_filename=wl_filename, comments=comments, ) - zika_utils.write_log(log, log_filename) + zika.utils.write_log(log, log_filename) # Upload files - zika_utils.upload_csv(currentStep, lims, wl_filename) - zika_utils.upload_log(currentStep, lims, log_filename) + zika.utils.upload_csv(currentStep, lims, wl_filename) + zika.utils.upload_log(currentStep, lims, log_filename) # Issue warnings, if any if any("WARNING" in entry for entry in log): @@ -656,7 +656,7 @@ def norm( if v: to_fetch[k] = f"art_tuple[1]['uri'].udf['{v}']" - df = zika_utils.fetch_sample_data(currentStep, to_fetch) + df = zika.utils.fetch_sample_data(currentStep, to_fetch) conc_unit = "ng/ul" if use_customer_metrics else df.conc_units[0] amt_unit = "ng" if conc_unit == "ng/ul" else "fmol" @@ -789,34 +789,34 @@ def norm( wl_comments = [] # Resolve buffer transfers - df_buffer, wl_comments = zika_utils.resolve_buffer_transfers( + df_buffer, wl_comments = zika.utils.resolve_buffer_transfers( df=df.copy(), wl_comments=wl_comments ) # Format worklist - df_formatted = zika_utils.format_worklist(df_buffer.copy(), deck=deck) + df_formatted = zika.utils.format_worklist(df_buffer.copy(), deck=deck) wl_comments.append( f"This worklist will enact normalization of {len(df)} samples. For detailed parameters see the worklist log" ) # Write files - wl_filename, log_filename = zika_utils.get_filenames( + wl_filename, log_filename = zika.utils.get_filenames( method_name="norm", pid=currentStep.id ) - zika_utils.write_worklist( + zika.utils.write_worklist( df=df_formatted.copy(), deck=deck, wl_filename=wl_filename, comments=wl_comments, ) - zika_utils.write_log(log, log_filename) + zika.utils.write_log(log, log_filename) # Upload files - zika_utils.upload_csv(currentStep, lims, wl_filename) - zika_utils.upload_log(currentStep, lims, log_filename) + zika.utils.upload_csv(currentStep, lims, wl_filename) + zika.utils.upload_log(currentStep, lims, log_filename) # Issue warnings, if any if any("WARNING" in entry for entry in log): diff --git a/scripts/zika_utils.py b/scilifelab_epps/zika/utils.py similarity index 99% rename from scripts/zika_utils.py rename to scilifelab_epps/zika/utils.py index 941e8da7..26b0b528 100644 --- a/scripts/zika_utils.py +++ b/scilifelab_epps/zika/utils.py @@ -17,7 +17,7 @@ import pandas as pd from genologics.entities import Process -from epp_utils.udf_tools import fetch_last +from scilifelab_epps.utils.udf_tools import fetch_last def verify_step(currentStep, targets=None): diff --git a/scripts/__init__.py b/scripts/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/scripts/bravo_csv.py b/scripts/bravo_csv.py index 95f3a038..29096fe3 100644 --- a/scripts/bravo_csv.py +++ b/scripts/bravo_csv.py @@ -1,6 +1,5 @@ #!/usr/bin/env python - import logging import os import re @@ -8,12 +7,11 @@ from argparse import ArgumentParser import pandas as pd -import zika_methods -import zika_utils from genologics.config import BASEURI, PASSWORD, USERNAME from genologics.entities import Process from genologics.lims import Lims +from scilifelab_epps import zika from scilifelab_epps.epp import attach_file DESC = """EPP used to create csv files for the bravo robot""" @@ -263,12 +261,12 @@ def prepooling(currentStep, lims): if currentStep.instrument.name == "Zika": if currentStep.type.name == "Illumina DNA No-QC Library Pooling": - zika_methods.pool_fixed_vol( + zika.methods.pool_fixed_vol( currentStep=currentStep, lims=lims, ) else: - zika_methods.pool( + zika.methods.pool( currentStep=currentStep, lims=lims, udfs={ @@ -369,14 +367,14 @@ def setup_qpcr(currentStep, lims): def default_bravo(lims, currentStep, with_total_vol=True): # Re-route to Zika - if zika_utils.verify_step( + if zika.utils.verify_step( currentStep, targets=[ ("SMARTer Pico RNA", "Setup Workset/Plate"), ("QIAseq miRNA", "Setup Workset/Plate"), ], ): - zika_methods.norm( + zika.methods.norm( currentStep=currentStep, lims=lims, udfs={ @@ -388,10 +386,10 @@ def default_bravo(lims, currentStep, with_total_vol=True): "final_conc": None, }, ) - elif zika_utils.verify_step( + elif zika.utils.verify_step( currentStep, targets=[("Amplicon", "Setup Workset/Plate")] ): - zika_methods.norm( + zika.methods.norm( currentStep=currentStep, lims=lims, # Use lower minimum pipetting volume and customer metrics diff --git a/scripts/calc_from_args.py b/scripts/calc_from_args.py index b8c398d4..82332b9c 100644 --- a/scripts/calc_from_args.py +++ b/scripts/calc_from_args.py @@ -1,7 +1,4 @@ #!/usr/bin/env python -import logging -import os -import sys from argparse import ArgumentParser from datetime import datetime as dt @@ -9,8 +6,8 @@ from genologics.entities import Process from genologics.lims import Lims -from calc_from_args_utils import calculation_methods -from scilifelab_epps.epp import upload_file +from scilifelab_epps.calc_from_args import calculation_methods +from scilifelab_epps.wrapper import epp_decorator DESC = """UDF-agnostic script to perform calculations across all artifacts of a step. @@ -20,7 +17,6 @@ """ TIMESTAMP: str = dt.now().strftime("%y%m%d_%H%M%S") -SCRIPT_NAME: str = os.path.basename(__file__).split(".")[0] def parse_udf_arg(arg_string: str) -> dict: @@ -71,7 +67,8 @@ def parse_udf_arg(arg_string: str) -> dict: return arg_dict -def main(): +@epp_decorator(script_path=__file__, timestamp=TIMESTAMP) +def main(args): f"""Set up log, LIMS instance and parse args. Example 1: @@ -124,6 +121,15 @@ def main(): """ + # Set up LIMS + lims = Lims(BASEURI, USERNAME, PASSWORD) + process = Process(lims, id=args.pid) + + function_to_use = getattr(calculation_methods, args.calc) + function_to_use(process, args) + + +if __name__ == "__main__": # Parse args parser = ArgumentParser(description=DESC) parser.add_argument("--pid", type=str, help="Lims ID for current Process") @@ -134,6 +140,7 @@ def main(): help="Which function to use for calculations", ) parser.add_argument("--log", type=str, help="Which log file slot to use") + # UDFs to use for calculations udf_args = [ "vol_in", @@ -146,79 +153,7 @@ def main(): ] for udf_arg in udf_args: parser.add_argument(f"--{udf_arg}", type=parse_udf_arg) - args = parser.parse_args() - - # Set up LIMS - lims = Lims(BASEURI, USERNAME, PASSWORD) - lims.check_version() - process = Process(lims, id=args.pid) - - # Set up logging - log_filename: str = ( - "_".join( - [ - SCRIPT_NAME, - args.calc, - process.id, - TIMESTAMP, - process.technician.name.replace(" ", ""), - ] - ) - + ".log" - ) - - logging.basicConfig( - filename=log_filename, - filemode="w", - format="%(levelname)s: %(message)s", - level=logging.INFO, - ) - - # Start logging - logging.info(f"Script '{SCRIPT_NAME}' started at {TIMESTAMP}.") - logging.info( - f"Launched in step '{process.type.name}' ({process.id}) by {process.technician.name}." - ) - args_str = "\n\t".join([f"'{arg}': {getattr(args, arg)}" for arg in vars(args)]) - logging.info(f"Script called with arguments: \n\t{args_str}") - - try: - function_to_use = getattr(calculation_methods, args.calc) - function_to_use(process, args) - except Exception as e: - # Post error to LIMS GUI - logging.error(str(e), exc_info=True) - logging.shutdown() - upload_file( - file_path=log_filename, - file_slot=args.log, - process=process, - lims=lims, - remove=True, - ) - sys.stderr.write(str(e)) - sys.exit(2) - else: - logging.info("") - logging.info("Script completed successfully.") - logging.shutdown() - log_content = open(log_filename).read() - upload_file( - file_path=log_filename, - file_slot=args.log, - process=process, - lims=lims, - remove=True, - ) - # Check log for errors and warnings - if "ERROR:" in log_content or "WARNING:" in log_content: - sys.stderr.write( - "Script finished successfully, but log contains errors or warnings, please have a look." - ) - sys.exit(2) - else: - sys.exit(0) + args = parser.parse_args() -if __name__ == "__main__": - main() + main(args) diff --git a/scripts/generate_anglerfish_samplesheet.py b/scripts/generate_anglerfish_samplesheet.py index e7a6f9f7..3b4cc925 100644 --- a/scripts/generate_anglerfish_samplesheet.py +++ b/scripts/generate_anglerfish_samplesheet.py @@ -4,7 +4,6 @@ import os import re import shutil -import sys from argparse import ArgumentParser from datetime import datetime as dt @@ -16,12 +15,12 @@ from data.Chromium_10X_indexes import Chromium_10X_indexes from data.ONT_barcodes import ONT_BARCODES from scilifelab_epps.epp import upload_file +from scilifelab_epps.wrapper import epp_decorator DESC = """Script to generate Anglerfish samplesheet for ONT runs. """ TIMESTAMP = dt.now().strftime("%y%m%d_%H%M%S") -SCRIPT_NAME: str = os.path.basename(__file__).split(".")[0] def generate_anglerfish_samplesheet(process): @@ -124,7 +123,35 @@ def get_adaptor_name(reagent_label: str) -> str | list[str]: ) -def main(): +@epp_decorator(script_path=__file__, timestamp=TIMESTAMP) +def main(args): + lims = Lims(BASEURI, USERNAME, PASSWORD) + process = Process(lims, id=args.pid) + + file_name = generate_anglerfish_samplesheet(process) + + logging.info("Uploading samplesheet to LIMS...") + upload_file( + file_name, + args.file, + process, + lims, + ) + + logging.info("Moving samplesheet to ngi-nas-ns...") + try: + shutil.copyfile( + file_name, + f"/srv/ngi-nas-ns/samplesheets/anglerfish/{dt.now().year}/{file_name}", + ) + os.remove(file_name) + except: + logging.error("Failed to move samplesheet to ngi-nas-ns.") + else: + logging.info("Samplesheet moved to ngi-nas-ns.") + + +if __name__ == "__main__": # Parse args parser = ArgumentParser(description=DESC) parser.add_argument( @@ -147,96 +174,4 @@ def main(): ) args = parser.parse_args() - # Set up LIMS - lims = Lims(BASEURI, USERNAME, PASSWORD) - lims.check_version() - process = Process(lims, id=args.pid) - - # Set up logging - log_filename: str = ( - "_".join( - [ - SCRIPT_NAME, - process.id, - TIMESTAMP, - process.technician.name.replace(" ", ""), - ] - ) - + ".log" - ) - - logging.basicConfig( - filename=log_filename, - filemode="w", - format="%(levelname)s: %(message)s", - level=logging.INFO, - ) - - # Start logging - logging.info(f"Script '{SCRIPT_NAME}' started at {TIMESTAMP}.") - logging.info( - f"Launched in step '{process.type.name}' ({process.id}) by {process.technician.name}." - ) - args_str = "\n\t".join([f"'{arg}': {getattr(args, arg)}" for arg in vars(args)]) - logging.info(f"Script called with arguments: \n\t{args_str}") - - try: - file_name = generate_anglerfish_samplesheet(process) - - logging.info("Uploading samplesheet to LIMS...") - upload_file( - file_name, - args.file, - process, - lims, - ) - - logging.info("Moving samplesheet to ngi-nas-ns...") - try: - shutil.copyfile( - file_name, - f"/srv/ngi-nas-ns/samplesheets/anglerfish/{dt.now().year}/{file_name}", - ) - os.remove(file_name) - except: - logging.error("Failed to move samplesheet to ngi-nas-ns.") - else: - logging.info("Samplesheet moved to ngi-nas-ns.") - - except Exception as e: - # Post error to LIMS GUI - logging.error(str(e), exc_info=True) - logging.shutdown() - upload_file( - file_path=log_filename, - file_slot=args.log, - process=process, - lims=lims, - ) - os.remove(log_filename) - sys.stderr.write(str(e)) - sys.exit(2) - else: - logging.info("") - logging.info("Script completed successfully.") - logging.shutdown() - upload_file( - file_path=log_filename, - file_slot=args.log, - process=process, - lims=lims, - ) - # Check log for errors and warnings - log_content = open(log_filename).read() - os.remove(log_filename) - if "ERROR:" in log_content or "WARNING:" in log_content: - sys.stderr.write( - "Script finished successfully, but log contains errors or warnings, please have a look." - ) - sys.exit(2) - else: - sys.exit(0) - - -if __name__ == "__main__": main() diff --git a/scripts/generate_aviti_run_manifest.py b/scripts/generate_aviti_run_manifest.py new file mode 100644 index 00000000..2cae17f3 --- /dev/null +++ b/scripts/generate_aviti_run_manifest.py @@ -0,0 +1,335 @@ +#!/usr/bin/env python + +import logging +import os +import re +import shutil +from argparse import ArgumentParser, Namespace +from datetime import datetime as dt + +import pandas as pd +from genologics.config import BASEURI, PASSWORD, USERNAME +from genologics.entities import Process +from genologics.lims import Lims +from Levenshtein import hamming as distance + +from scilifelab_epps.epp import upload_file +from scilifelab_epps.wrapper import epp_decorator +from scripts.generate_minknow_samplesheet import get_pool_sample_label_mapping + +TIMESTAMP = dt.now().strftime("%y%m%d_%H%M%S") +LABEL_SEQ_SUBSTRING = re.compile(r"[ACGT]{4,}(-[ACGT]{4,})?") + + +def get_flowcell_id(process: Process) -> str: + flowcell_ids = [ + op.container.name for op in process.all_outputs() if op.type == "Analyte" + ] + + assert len(set(flowcell_ids)) == 1, "Expected one flowcell ID." + flowcell_id = flowcell_ids[0] + + if "-" in flowcell_id: + logging.warning( + f"Container name {flowcell_id} contains a dash, did you forget to set the name of the LIMS container to the flowcell ID?" + ) + + return flowcell_id + + +def get_runValues_section(process: Process, file_name: str) -> str: + """Generate the [RUNVALUES] section of the AVITI run manifest and return it as a string.""" + + read_recipe = "-".join( + [ + str(process.udf.get("Read 1 Cycles", 0)), + str(process.udf.get("Index Read 1", 0)), + str(process.udf.get("Index Read 2", 0)), + str(process.udf.get("Read 2 Cycles", 0)), + ] + ) + + runValues_section = "\n".join( + [ + "[RUNVALUES]", + "KeyName, Value", + f"lims_step_name, {sanitize(process.type.name)}", + f"file_name, {sanitize(file_name)}", + f"read_recipe, {read_recipe}", + ] + ) + + return runValues_section + + +def get_settings_section() -> str: + """Generate the [SETTINGS] section of the AVITI run manifest and return it as a string.""" + settings_section = "\n".join( + [ + "[SETTINGS]", + "SettingName, Value", + ] + ) + + return settings_section + + +def get_samples_section(process: Process) -> str: + """Generate the [SAMPLES] section of the AVITI run manifest and return it as a string.""" + + phix_loaded: bool = process.udf["PhiX Loaded"] + + # Assert two output analytes placed in either flowcell lane + arts_out = [op for op in process.all_outputs() if op.type == "Analyte"] + assert len(arts_out) == 2, "Expected two output analytes." + lanes = [art_out.location[1].split(":")[1] for art_out in arts_out] + assert set(lanes) == {"1", "2"}, "Expected lanes 1 and 2." + + # Iterate over pools + all_rows = [] + for art_out, lane in zip(arts_out, lanes): + lane_rows = [] + assert ( + "AVITI Flow Cell" in art_out.container.type.name + ), f"Unsupported container type {art_out.container.type.name}." + assert ( + len(art_out.samples) > 1 and len(art_out.reagent_labels) > 1 + ), "Not a pool." + assert len(art_out.samples) == len( + art_out.reagent_labels + ), "Unequal number of samples and reagent labels." + + sample2label: dict[str, str] = get_pool_sample_label_mapping(art_out) + samples = art_out.samples + labels = art_out.reagent_labels + + assert len(set(labels)) == len(labels), "Detected non-unique reagent labels." + + # Iterate over samples + for sample in samples: + lims_label = sample2label[sample.name] + + # Parse sample index + label_seq_match = re.search(LABEL_SEQ_SUBSTRING, lims_label) + assert ( + label_seq_match is not None + ), f"Could not parse label sequence from {lims_label}" + label_seq = label_seq_match.group(0) + + if "-" in label_seq: + index1, index2 = label_seq.split("-") + else: + index1 = label_seq + index2 = "" + + row = {} + row["SampleName"] = sample.name + row["Index1"] = index1 + row["Index2"] = index2 + row["Lane"] = lane + + lane_rows.append(row) + + # Add PhiX controls + if phix_loaded: + for phix_idx_pair in [ + ("ACGTGTAGC", "GCTAGTGCA"), + ("CACATGCTG", "AGACACTGT"), + ("GTACACGAT", "CTCGTACAG"), + ("TGTGCATCA", "TAGTCGATC"), + ]: + row = {} + row["SampleName"] = "PhiX" + row["Index1"] = phix_idx_pair[0] + row["Index2"] = phix_idx_pair[1] + row["Lane"] = lane + lane_rows.append(row) + + # Check for index collision within lane, across samples and PhiX + check_distances(lane_rows) + all_rows.extend(lane_rows) + + df = pd.DataFrame(all_rows) + + samples_section = f"[SAMPLES]\n{df.to_csv(index=None, header=True)}" + + return samples_section + + +def check_distances(rows: list[dict], dist_warning_threshold=3) -> None: + for i in range(len(rows)): + row = rows[i] + + for row_comp in rows[i + 1 :]: + check_pair_distance( + row, row_comp, dist_warning_threshold=dist_warning_threshold + ) + + +def check_pair_distance( + row, row_comp, check_flips: bool = False, dist_warning_threshold: int = 3 +): + """Distance check between two index pairs. + + row dict manifest row of sample A + row_comp dict manifest row of sample B + check_flips bool check all reverse-complement combinations + dist_warning_threshold int trigger warning for distances at or below this value + + """ + + if check_flips: + flips = [] + for a1, _a1 in zip( + [row["Index1"], revcomp(row["Index1"])], ["Index1", "Index1_rc"] + ): + for a2, _a2 in zip( + [row["Index2"], revcomp(row["Index2"])], ["Index2", "Index2_rc"] + ): + for b1, _b1 in zip( + [row_comp["Index1"], revcomp(row_comp["Index1"])], + ["Index1", "Index1_rc"], + ): + for b2, _b2 in zip( + [row_comp["Index2"], revcomp(row_comp["Index2"])], + ["Index2", "Index2_rc"], + ): + flips.append( + ( + distance(a1, b1) + distance(a2, b2), + f"{a1}-{a2} {b1}-{b2}", + f"{_a1}-{_a2} {_b1}-{_b2}", + ) + ) + dist, compared_seqs, flip_conf = min(flips, key=lambda x: x[0]) + + else: + dist = distance( + row["Index1"] + row["Index2"], row_comp["Index1"] + row_comp["Index2"] + ) + compared_seqs = ( + f"{row['Index1']}-{row['Index2']} {row_comp['Index1']}-{row_comp['Index2']}" + ) + + if dist <= dist_warning_threshold: + # Build a warning message for the pair + warning_lines = [ + f"Hamming distance {dist} between {row['SampleName']} and {row_comp['SampleName']}" + ] + # If the distance is derived from a flip, show the original and the flipped conformation + if check_flips: + warning_lines.append( + f"Given: {row['Index1']}-{row['Index2']} <-> {row_comp['Index1']}-{row_comp['Index2']}" + ) + warning_lines.append(f"Distance: {dist} when flipped to {flip_conf}") + # If the index lengths are equal, add a simple visual representation + if len(row["Index1"]) + len(row["Index2"]) == len(row_comp["Index1"]) + len( + row_comp["Index2"] + ): + warning_lines.append(show_match(*compared_seqs.split())) + + warning = "\n".join(warning_lines) + logging.warning(warning) + + # For identical collisions, kill the process + if dist == 0: + raise AssertionError("Identical indices detected.") + + +def revcomp(seq: str) -> str: + """Reverse-complement a DNA string.""" + return seq.translate(str.maketrans("ACGT", "TGCA"))[::-1] + + +def show_match(seq1: str, seq2: str) -> str: + """Visualize base-by-base match between sequences of equal length.""" + + assert len(seq1) == len(seq2) + + m = "" + for seq1_base, seq2_base in zip(seq1, seq2): + if seq1_base == seq2_base: + m += "|" + else: + m += "X" + + lines = "\n".join([seq1, m, seq2]) + return lines + + +def sanitize(s: str) -> str: + """Wrap a string in quotes if it contains commas.""" + if "," in s: + return f'"{s}"' + else: + return s + + +@epp_decorator(script_path=__file__, timestamp=TIMESTAMP) +def main(args: Namespace): + lims = Lims(BASEURI, USERNAME, PASSWORD) + process = Process(lims, id=args.pid) + + # Name manifest file + flowcell_id = get_flowcell_id(process) + file_name = f"AVITI_run_manifest_{flowcell_id}_{process.id}_{TIMESTAMP}_{process.technician.name.replace(' ','')}.csv" + + # Build manifest + logging.info("Starting to build run manifest.") + + runValues_section = get_runValues_section(process, file_name) + settings_section = get_settings_section() + samples_section = get_samples_section(process) + + manifest = "\n\n".join([runValues_section, settings_section, samples_section]) + + # Write manifest + with open(file_name, "w") as f: + f.write(manifest) + + # Upload manifest + logging.info("Uploading run manifest to LIMS...") + upload_file( + file_name, + args.file, + process, + lims, + ) + + logging.info("Moving run manifest to ngi-nas-ns...") + try: + shutil.copyfile( + file_name, + f"/srv/ngi-nas-ns/samplesheets/Aviti/{dt.now().year}/{file_name}", + ) + os.remove(file_name) + except: + logging.error("Failed to move run manifest to ngi-nas-ns.", exc_info=True) + else: + logging.info("Run manifest moved to ngi-nas-ns.") + + +if __name__ == "__main__": + # Parse args + parser = ArgumentParser() + parser.add_argument( + "--pid", + required=True, + type=str, + help="Lims ID for current Process.", + ) + parser.add_argument( + "--log", + required=True, + type=str, + help="Which file slot to use for the script log.", + ) + parser.add_argument( + "--file", + required=True, + type=str, + help="Which file slot to use for the run manifest.", + ) + args = parser.parse_args() + + main(args) diff --git a/scripts/generate_minknow_samplesheet.py b/scripts/generate_minknow_samplesheet.py index 25122426..1a957632 100644 --- a/scripts/generate_minknow_samplesheet.py +++ b/scripts/generate_minknow_samplesheet.py @@ -4,7 +4,6 @@ import os import re import shutil -import sys from argparse import ArgumentParser from datetime import datetime as dt @@ -17,17 +16,14 @@ from tabulate import tabulate from data.ONT_barcodes import ONT_BARCODE_LABEL_PATTERN, ONT_BARCODES -from epp_utils.udf_tools import fetch from scilifelab_epps.epp import traceback_to_step, upload_file +from scilifelab_epps.utils.udf_tools import fetch +from scilifelab_epps.wrapper import epp_decorator DESC = """ Script to generate MinKNOW samplesheet for starting ONT runs. """ TIMESTAMP = dt.now().strftime("%y%m%d_%H%M%S") -SCRIPT_NAME: str = os.path.basename(__file__).split(".")[0] - -with open("/opt/gls/clarity/users/glsai/config/genosqlrc.yaml") as f: - config = yaml.safe_load(f) def get_ont_library_contents( @@ -194,6 +190,9 @@ def get_ont_library_contents( def get_pool_sample_label_mapping(pool: Artifact) -> dict[str, str]: + with open("/opt/gls/clarity/users/glsai/config/genosqlrc.yaml") as f: + config = yaml.safe_load(f) + # Setup DB connection connection = psycopg2.connect( user=config["username"], @@ -298,7 +297,7 @@ def write_minknow_csv(df: pd.DataFrame, file_path: str): df_csv.to_csv(file_path, index=False) -def generate_MinKNOW_samplesheet(process: Process): +def generate_MinKNOW_samplesheet(args): """=== Sample sheet columns === flow_cell_id E.g. 'PAM96489' @@ -322,6 +321,10 @@ def generate_MinKNOW_samplesheet(process: Process): - barcode """ + + lims = Lims(BASEURI, USERNAME, PASSWORD) + process = Process(lims, id=args.pid) + qc = True if "QC" in process.type.name else False logging.info(f"QC run: {qc}") @@ -470,7 +473,35 @@ def generate_MinKNOW_samplesheet(process: Process): return file_name -def main(): +@epp_decorator(script_path=__file__, timestamp=TIMESTAMP) +def main(args): + lims = Lims(BASEURI, USERNAME, PASSWORD) + process = Process(lims, id=args.pid) + + file_name = generate_MinKNOW_samplesheet(args) + + logging.info("Uploading samplesheet to LIMS...") + upload_file( + file_name, + args.file, + process, + lims, + ) + + logging.info("Moving samplesheet to ngi-nas-ns...") + try: + shutil.copyfile( + file_name, + f"/srv/ngi-nas-ns/samplesheets/nanopore/{dt.now().year}/{file_name}", + ) + os.remove(file_name) + except: + logging.error("Failed to move samplesheet to ngi-nas-ns.", exc_info=True) + else: + logging.info("Samplesheet moved to ngi-nas-ns.") + + +if __name__ == "__main__": # Parse args parser = ArgumentParser(description=DESC) parser.add_argument( @@ -493,95 +524,4 @@ def main(): ) args = parser.parse_args() - # Set up LIMS - lims = Lims(BASEURI, USERNAME, PASSWORD) - lims.check_version() - process = Process(lims, id=args.pid) - - # Set up logging - log_filename: str = ( - "_".join( - [ - SCRIPT_NAME, - process.id, - TIMESTAMP, - process.technician.name.replace(" ", ""), - ] - ) - + ".log" - ) - - logging.basicConfig( - filename=log_filename, - filemode="w", - format="%(levelname)s: %(message)s", - level=logging.INFO, - ) - - # Start logging - logging.info(f"Script '{SCRIPT_NAME}' started at {TIMESTAMP}.") - logging.info( - f"Launched in step '{process.type.name}' ({process.id}) by {process.technician.name}." - ) - args_str = "\n\t".join([f"'{arg}': {getattr(args, arg)}" for arg in vars(args)]) - logging.info(f"Script called with arguments: \n\t{args_str}") - - try: - file_name = generate_MinKNOW_samplesheet(process=process) - logging.info("Uploading samplesheet to LIMS...") - upload_file( - file_name, - args.file, - process, - lims, - ) - - logging.info("Moving samplesheet to ngi-nas-ns...") - try: - shutil.copyfile( - file_name, - f"/srv/ngi-nas-ns/samplesheets/nanopore/{dt.now().year}/{file_name}", - ) - os.remove(file_name) - except: - logging.error("Failed to move samplesheet to ngi-nas-ns.", exc_info=True) - else: - logging.info("Samplesheet moved to ngi-nas-ns.") - - except Exception as e: - # Post error to LIMS GUI - logging.error(str(e), exc_info=True) - logging.shutdown() - upload_file( - file_path=log_filename, - file_slot=args.log, - process=process, - lims=lims, - ) - os.remove(log_filename) - sys.stderr.write(str(e)) - sys.exit(2) - else: - logging.info("") - logging.info("Script completed successfully.") - logging.shutdown() - upload_file( - file_path=log_filename, - file_slot=args.log, - process=process, - lims=lims, - ) - # Check log for errors and warnings - log_content = open(log_filename).read() - os.remove(log_filename) - if "ERROR:" in log_content or "WARNING:" in log_content: - sys.stderr.write( - "Script finished successfully, but log contains errors or warnings, please have a look." - ) - sys.exit(2) - else: - sys.exit(0) - - -if __name__ == "__main__": - main() + main(args) diff --git a/scripts/log_udfs.py b/scripts/log_udfs.py index 5d86de13..e42d6af0 100644 --- a/scripts/log_udfs.py +++ b/scripts/log_udfs.py @@ -12,7 +12,7 @@ from ont_send_reloading_info_to_db import parse_run from tabulate import tabulate -from epp_utils import udf_tools +from scilifelab_epps.utils import udf_tools DESC = """Script for the EPP "Log fields" and file slot "Field log". diff --git a/scripts/molar_concentration.py b/scripts/molar_concentration.py index 40289d4a..57af5238 100644 --- a/scripts/molar_concentration.py +++ b/scripts/molar_concentration.py @@ -16,8 +16,8 @@ from genologics.entities import Process from genologics.lims import Lims -from epp_utils.formula import ng_ul_to_nM from scilifelab_epps.epp import EppLogger +from scilifelab_epps.utils.formula import ng_ul_to_nM def apply_calculations(lims, artifacts, conc_udf, size_udf, unit_udf, epp_logger): diff --git a/scripts/ont_calc_volumes.py b/scripts/ont_calc_volumes.py index c442f964..9271745f 100644 --- a/scripts/ont_calc_volumes.py +++ b/scripts/ont_calc_volumes.py @@ -8,7 +8,7 @@ from genologics.entities import Process from genologics.lims import Lims -from epp_utils import formula, udf_tools +from scilifelab_epps.utils import formula, udf_tools DESC = """ EPP "ONT calculate volumes" diff --git a/scripts/ont_pool.py b/scripts/ont_pool.py index fd067f4d..5835d31d 100644 --- a/scripts/ont_pool.py +++ b/scripts/ont_pool.py @@ -9,9 +9,9 @@ from genologics.lims import Lims from numpy import minimum from tabulate import tabulate -from zika_utils import fetch_sample_data -from epp_utils import formula +from scilifelab_epps.utils import formula +from scilifelab_epps.zika.utils import fetch_sample_data DESC = """ EPP "ONT pooling", file slot "ONT pooling log". diff --git a/scripts/ont_send_reloading_info_to_db.py b/scripts/ont_send_reloading_info_to_db.py index 2da91b0c..de8fc6d8 100644 --- a/scripts/ont_send_reloading_info_to_db.py +++ b/scripts/ont_send_reloading_info_to_db.py @@ -3,7 +3,6 @@ import logging import os import re -import sys from argparse import ArgumentParser from datetime import datetime as dt @@ -14,7 +13,7 @@ from genologics.entities import Artifact, Process from genologics.lims import Lims -from scilifelab_epps.epp import upload_file +from scilifelab_epps.wrapper import epp_decorator DESC = """Used to record the washing and reloading of ONT flow cells. @@ -22,7 +21,6 @@ """ TIMESTAMP: str = dt.now().strftime("%y%m%d_%H%M%S") -SCRIPT_NAME: str = os.path.basename(__file__).split(".")[0] def send_reloading_info_to_db(process: Process): @@ -178,71 +176,19 @@ def check_csv_udf_list(pattern: str, csv_udf_list: list[str]) -> bool: return True -def main(): - # Parse args - parser = ArgumentParser(description=DESC) - parser.add_argument("--pid", help="Lims id for current Process") - parser.add_argument("--log", type=str, help="Which log file slot to use") - args = parser.parse_args() - - # Set up LIMS +@epp_decorator(script_path=__file__, timestamp=TIMESTAMP) +def main(args): lims = Lims(BASEURI, USERNAME, PASSWORD) - lims.check_version() process = Process(lims, id=args.pid) - # Set up logging - log_filename: str = ( - "_".join( - [ - SCRIPT_NAME, - process.id, - TIMESTAMP, - process.technician.name.replace(" ", ""), - ] - ) - + ".log" - ) - - logging.basicConfig( - filename=log_filename, - filemode="w", - format="%(levelname)s: %(message)s", - level=logging.INFO, - ) - - # Start logging - logging.info(f"Script '{SCRIPT_NAME}' started at {TIMESTAMP}.") - logging.info( - f"Launched in step '{process.type.name}' ({process.id}) by {process.technician.name}." - ) - args_str = "\n\t".join([f"'{arg}': {getattr(args, arg)}" for arg in vars(args)]) - logging.info(f"Script called with arguments: \n\t{args_str}") - - try: - send_reloading_info_to_db(process) - except Exception as e: - # Post error to LIMS GUI - logging.error(e) - logging.shutdown() - upload_file( - file_path=log_filename, - file_slot=args.log, - process=process, - lims=lims, - ) - sys.stderr.write(str(e)) - sys.exit(2) - else: - logging.info("Script completed successfully.") - logging.shutdown() - upload_file( - file_path=log_filename, - file_slot=args.log, - process=process, - lims=lims, - ) - sys.exit(0) + send_reloading_info_to_db(process) if __name__ == "__main__": - main() + # Parse args + parser = ArgumentParser(description=DESC) + parser.add_argument("--pid", help="Lims id for current Process") + parser.add_argument("--log", type=str, help="Which log file slot to use") + args = parser.parse_args() + + main(args) diff --git a/scripts/ont_sync_to_db.py b/scripts/ont_sync_to_db.py index 2dc13bd2..9b787c83 100644 --- a/scripts/ont_sync_to_db.py +++ b/scripts/ont_sync_to_db.py @@ -3,7 +3,6 @@ import logging import os import re -import sys from argparse import ArgumentParser, Namespace from datetime import datetime as dt @@ -18,8 +17,8 @@ from genologics.lims import Lims from ont_send_reloading_info_to_db import get_ONT_db -from epp_utils import udf_tools -from scilifelab_epps.epp import upload_file +from scilifelab_epps.utils import udf_tools +from scilifelab_epps.wrapper import epp_decorator DESC = """Script for finishing the step to start ONT sequencing in LIMS. @@ -28,7 +27,6 @@ """ TIMESTAMP: str = dt.now().strftime("%y%m%d_%H%M%S") -SCRIPT_NAME: str = os.path.basename(__file__).split(".")[0] def assert_samplesheet(process: Process, args: Namespace, lims: Lims): @@ -241,7 +239,17 @@ def sync_runs_to_db(process: Process, args: Namespace, lims: Lims): ) -def main(): +@epp_decorator(script_path=__file__, timestamp=TIMESTAMP) +def main(args): + # Set up LIMS + lims = Lims(BASEURI, USERNAME, PASSWORD) + lims.check_version() + process = Process(lims, id=args.pid) + + sync_runs_to_db(process=process, lims=lims, args=args) + + +if __name__ == "__main__": # Parse args parser = ArgumentParser(description=DESC) parser.add_argument( @@ -263,64 +271,4 @@ def main(): ) args: Namespace = parser.parse_args() - # Set up LIMS - lims = Lims(BASEURI, USERNAME, PASSWORD) - lims.check_version() - process = Process(lims, id=args.pid) - - # Set up logging - log_filename: str = ( - "_".join( - [ - SCRIPT_NAME, - process.id, - TIMESTAMP, - process.technician.name.replace(" ", ""), - ] - ) - + ".log" - ) - - logging.basicConfig( - filename=log_filename, - filemode="w", - format="%(filename)s - %(funcName)s - %(levelname)s - %(message)s", - level=logging.INFO, - ) - - # Start logging - logging.info(f"Script '{SCRIPT_NAME}' started at {TIMESTAMP}.") - logging.info( - f"Launched in step '{process.type.name}' ({process.id}) by {process.technician.name}." - ) - args_str = "\n\t".join([f"'{arg}': {getattr(args, arg)}" for arg in vars(args)]) - logging.info(f"Script called with arguments: \n\t{args_str}") - - try: - sync_runs_to_db(process=process, lims=lims, args=args) - except Exception as e: - # Post error to LIMS GUI - logging.error(e, exc_info=True) - logging.shutdown() - upload_file( - file_path=log_filename, - file_slot=args.log, - process=process, - lims=lims, - ) - sys.stderr.write(str(e)) - sys.exit(2) - else: - logging.info("Script completed successfully.") - logging.shutdown() - upload_file( - file_path=log_filename, - file_slot=args.log, - process=process, - lims=lims, - ) - sys.exit(0) - - -if __name__ == "__main__": - main() + main(args) diff --git a/scripts/ont_update_amount.py b/scripts/ont_update_amount.py index 174e4f3c..f3c6fbfc 100644 --- a/scripts/ont_update_amount.py +++ b/scripts/ont_update_amount.py @@ -8,7 +8,7 @@ from genologics.entities import Process from genologics.lims import Lims -from epp_utils import formula, udf_tools +from scilifelab_epps.utils import formula, udf_tools DESC = """ EPP "ONT Update Amounts". diff --git a/scripts/parse_anglerfish_results.py b/scripts/parse_anglerfish_results.py index 07a7d936..13797087 100644 --- a/scripts/parse_anglerfish_results.py +++ b/scripts/parse_anglerfish_results.py @@ -2,7 +2,6 @@ import glob import logging import os -import sys from argparse import ArgumentParser from datetime import datetime as dt @@ -11,11 +10,10 @@ from genologics.entities import Artifact, Process from genologics.lims import Lims -from epp_utils import udf_tools -from scilifelab_epps.epp import upload_file +from scilifelab_epps.utils import udf_tools +from scilifelab_epps.wrapper import epp_decorator TIMESTAMP: str = dt.now().strftime("%y%m%d_%H%M%S") -SCRIPT_NAME: str = os.path.basename(__file__).split(".")[0] def find_run(process: Process) -> str: @@ -197,7 +195,16 @@ def parse_anglerfish_results(process, lims): fill_udfs(process, df_parsed) -def main(): +@epp_decorator(script_path=__file__, timestamp=TIMESTAMP) +def main(args): + # Set up LIMS + lims = Lims(BASEURI, USERNAME, PASSWORD) + process = Process(lims, id=args.pid) + + parse_anglerfish_results(process, lims) + + +if __name__ == "__main__": # Parse args parser = ArgumentParser() parser.add_argument( @@ -217,64 +224,4 @@ def main(): ) args = parser.parse_args() - # Set up LIMS - lims = Lims(BASEURI, USERNAME, PASSWORD) - lims.check_version() - process = Process(lims, id=args.pid) - - # Set up logging - log_filename = ( - "_".join( - [ - SCRIPT_NAME, - process.id, - TIMESTAMP, - process.technician.name.replace(" ", ""), - ] - ) - + ".log" - ) - - logging.basicConfig( - filename=log_filename, - filemode="w", - format="%(levelname)s: %(message)s", - level=logging.INFO, - ) - - # Start logging - logging.info(f"Script '{SCRIPT_NAME}' started at {TIMESTAMP}.") - logging.info( - f"Launched in step '{process.type.name}' ({process.id}) by {process.technician.name}." - ) - args_str = "\n\t".join([f"'{arg}': {getattr(args, arg)}" for arg in vars(args)]) - logging.info(f"Script called with arguments: \n\t{args_str}") - - try: - parse_anglerfish_results(process, lims) - except Exception as e: - # Post error to LIMS GUI - logging.error(e, exc_info=True) - logging.shutdown() - upload_file( - file_path=log_filename, - file_slot=args.log, - process=process, - lims=lims, - ) - sys.stderr.write(str(e)) - sys.exit(2) - else: - logging.info("Script completed successfully.") - logging.shutdown() - upload_file( - file_path=log_filename, - file_slot=args.log, - process=process, - lims=lims, - ) - sys.exit(0) - - -if __name__ == "__main__": - main() + main(args) diff --git a/scripts/parse_ba_results.py b/scripts/parse_ba_results.py index 9e7c7905..3f437148 100644 --- a/scripts/parse_ba_results.py +++ b/scripts/parse_ba_results.py @@ -11,8 +11,8 @@ from genologics.entities import Process from genologics.lims import Lims -from epp_utils import udf_tools from scilifelab_epps.epp import get_well_number +from scilifelab_epps.utils import udf_tools DESC = """This script parses the Agilent BioAnalyzer XML report. diff --git a/scripts/qc_amount_calculation.py b/scripts/qc_amount_calculation.py index 503318a9..2e258259 100644 --- a/scripts/qc_amount_calculation.py +++ b/scripts/qc_amount_calculation.py @@ -16,8 +16,8 @@ from genologics.entities import Process from genologics.lims import Lims -from epp_utils import formula, udf_tools from scilifelab_epps.epp import EppLogger +from scilifelab_epps.utils import formula, udf_tools def apply_calculations(artifacts, udf1, op, udf2, unit_amount_map, process): diff --git a/setup.py b/setup.py index 886deb80..9856d996 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,9 @@ url="https://github.com/scilifelab/scilifelab_epps", license="GPLv3", packages=find_packages(exclude=["ez_setup", "examples", "tests"]), - scripts=glob.glob("scripts/*.py"), + scripts=[ + file for file in glob.glob("scripts/*.py") if file != "scripts/__init__.py" + ], include_package_data=True, zip_safe=False, )