Skip to content

Commit

Permalink
Merge pull request #342 from kedhammar/AKe-dev
Browse files Browse the repository at this point in the history
Organize modules and add EPP wrapper
  • Loading branch information
kedhammar authored Aug 27, 2024
2 parents 89873cb + 20f6dbd commit a8ca9a3
Show file tree
Hide file tree
Showing 30 changed files with 619 additions and 523 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@ build/
*.swp
__pycache__
node_modules
dist
4 changes: 4 additions & 0 deletions VERSIONLOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
# Scilifelab_epps Version Log

## 20240826.1

Add script for AVITI run manifest generation, re-organize repo to follow best-practice modularization and implement EPP wrapper.

## 20240823.2

Add function to fetch sample-level Q30 for AVITI
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ oauth2client
pandas
protobuf
psycopg2
python_levenshtein
PyYAML
Requests
scilifelab_parsers @ git+https://github.com/SciLifeLab/scilifelab_parsers
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@
import tabulate
from genologics.entities import Process

from calc_from_args_utils.udf_arg_methods import (
from scilifelab_epps.calc_from_args.udf_arg_methods import (
fetch_from_arg,
get_UDF_source,
get_UDF_source_name,
)
from epp_utils import formula, udf_tools
from scilifelab_epps.utils import formula, udf_tools

DESC = """This file contains the method functions for a UDF-agnostic script."""

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import yaml
from genologics.entities import Artifact, Process

from epp_utils import udf_tools
from scilifelab_epps.utils import udf_tools


def fetch_from_arg(
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
104 changes: 104 additions & 0 deletions scilifelab_epps/wrapper.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
#!/usr/bin/env python

import logging
import os
import sys

from genologics.config import BASEURI, PASSWORD, USERNAME
from genologics.entities import Process
from genologics.lims import Lims

from scilifelab_epps.epp import upload_file


def epp_decorator(script_path: str, timestamp: str):
"""This top-level decorator is meant to be used on EPP scripts' main functions.
It receives the script path (__file__) and timestamp (yymmdd_hhmmss) as arguments to
pass on to it's children which wrap the main function to handle logging and graceful failure.
"""
script_name: str = os.path.basename(script_path).split(".")[0]

def _epp_decorator(script_main):
def epp_wrapper(args):
"""General wrapper for EPP scripts."""

# Set up LIMS
lims = Lims(BASEURI, USERNAME, PASSWORD)
lims.check_version()
process = Process(lims, id=args.pid)

# Name log file
log_filename: str = (
"_".join(
[
script_name,
process.id,
timestamp,
process.technician.name.replace(" ", ""),
]
)
+ ".log"
)

# Set up logging
logging.basicConfig(
filename=log_filename,
filemode="w",
format="%(levelname)s: %(message)s",
level=logging.INFO,
)

# Start logging
logging.info(f"Script '{script_name}' started at {timestamp}.")
logging.info(
f"Launched in step '{process.type.name}' ({process.id}) by {process.technician.name}."
)
args_str = "\n\t".join(
[f"'{arg}': {getattr(args, arg)}" for arg in vars(args)]
)
logging.info(f"Script called with arguments: \n\t{args_str}")

# Run
try:
script_main(args)

# On script error
except Exception as e:
# Post error to LIMS GUI
logging.error(str(e), exc_info=True)
logging.shutdown()
upload_file(
file_path=log_filename,
file_slot=args.log,
process=process,
lims=lims,
)
os.remove(log_filename)
sys.stderr.write(str(e))
sys.exit(2)

# On script success
else:
logging.info("Script completed successfully.")
logging.shutdown()
upload_file(
file_path=log_filename,
file_slot=args.log,
process=process,
lims=lims,
)
# Check log for errors and warnings
log_content = open(log_filename).read()
os.remove(log_filename)
if "ERROR:" in log_content or "WARNING:" in log_content:
sys.stderr.write(
"Script finished successfully, but log contains errors or warnings, please have a look."
)
sys.exit(2)
else:
sys.exit(0)

return epp_wrapper

return _epp_decorator
Empty file.
56 changes: 28 additions & 28 deletions scripts/zika_methods.py → scilifelab_epps/zika/methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@

import numpy as np
import pandas as pd
import zika_utils

from epp_utils.udf_tools import is_filled
from scilifelab_epps import zika
from scilifelab_epps.utils.udf_tools import is_filled


def pool_fixed_vol(
Expand Down Expand Up @@ -57,7 +57,7 @@ def pool_fixed_vol(
"dst_id": "art_tuple[1]['uri'].location[0].id",
"dst_well": "art_tuple[1]['uri'].location[1]",
}
df_all = zika_utils.fetch_sample_data(currentStep, to_fetch)
df_all = zika.utils.fetch_sample_data(currentStep, to_fetch)

# Define deck, a dictionary mapping plate names to deck positions
assert len(df_all.src_id.unique()) <= 4, "Only one to four input plates allowed"
Expand All @@ -77,22 +77,22 @@ def pool_fixed_vol(
df_wl = pd.concat([df_wl, df_pool], axis=0)

# Format worklist
df_formatted = zika_utils.format_worklist(df_wl.copy(), deck)
wl_filename, log_filename = zika_utils.get_filenames(
df_formatted = zika.utils.format_worklist(df_wl.copy(), deck)
wl_filename, log_filename = zika.utils.get_filenames(
method_name="pool", pid=currentStep.id
)

# Write the output files
zika_utils.write_worklist(
zika.utils.write_worklist(
df=df_formatted.copy(),
deck=deck,
wl_filename=wl_filename,
)
zika_utils.write_log(log, log_filename)
zika.utils.write_log(log, log_filename)

# Upload files
zika_utils.upload_csv(currentStep, lims, wl_filename)
zika_utils.upload_log(currentStep, lims, log_filename)
zika.utils.upload_csv(currentStep, lims, wl_filename)
zika.utils.upload_log(currentStep, lims, log_filename)

# Issue warnings, if any
if any("WARNING" in entry for entry in log):
Expand Down Expand Up @@ -187,7 +187,7 @@ def pool(
if v:
to_fetch[k] = f"art_tuple[1]['uri'].udf['{v}']"

df_all = zika_utils.fetch_sample_data(currentStep, to_fetch)
df_all = zika.utils.fetch_sample_data(currentStep, to_fetch)

# All samples should have accessible volume
assert all(
Expand Down Expand Up @@ -321,7 +321,7 @@ def pool(
)

errors = True
raise zika_utils.VolumeOverflow
raise zika.utils.VolumeOverflow

log.append(
"\nAn even pool can be created within the following parameter ranges:"
Expand Down Expand Up @@ -409,7 +409,7 @@ def pool(
)

errors = True
raise zika_utils.VolumeOverflow
raise zika.utils.VolumeOverflow

log.append(
"\nWill try to create a pool that is as even as possible. Accounting for sample depletion, a pool can be created with the following parameter ranges: "
Expand All @@ -436,7 +436,7 @@ def pool(
# No volume expansion is allowed, so pool volume is set to the minimum, given the conc
pool_vol = pool_real_min_sample_vol

except zika_utils.VolumeOverflow:
except zika.utils.VolumeOverflow:
continue

# === STORE FINAL CALCULATION RESULTS ===
Expand Down Expand Up @@ -518,14 +518,14 @@ def pool(
pool.put()

# Get filenames and upload log if errors
wl_filename, log_filename = zika_utils.get_filenames(
wl_filename, log_filename = zika.utils.get_filenames(
method_name="pool", pid=currentStep.id
)
if errors:
raise zika_utils.CheckLog(log, log_filename, lims, currentStep)
raise zika.utils.CheckLog(log, log_filename, lims, currentStep)

# Format worklist
df_formatted = zika_utils.format_worklist(df_wl.copy(), deck)
df_formatted = zika.utils.format_worklist(df_wl.copy(), deck)

# Comments to attach to the worklist header
comments = [
Expand All @@ -539,17 +539,17 @@ def pool(
)

# Write the output files
zika_utils.write_worklist(
zika.utils.write_worklist(
df=df_formatted.copy(),
deck=deck,
wl_filename=wl_filename,
comments=comments,
)
zika_utils.write_log(log, log_filename)
zika.utils.write_log(log, log_filename)

# Upload files
zika_utils.upload_csv(currentStep, lims, wl_filename)
zika_utils.upload_log(currentStep, lims, log_filename)
zika.utils.upload_csv(currentStep, lims, wl_filename)
zika.utils.upload_log(currentStep, lims, log_filename)

# Issue warnings, if any
if any("WARNING" in entry for entry in log):
Expand Down Expand Up @@ -656,7 +656,7 @@ def norm(
if v:
to_fetch[k] = f"art_tuple[1]['uri'].udf['{v}']"

df = zika_utils.fetch_sample_data(currentStep, to_fetch)
df = zika.utils.fetch_sample_data(currentStep, to_fetch)

conc_unit = "ng/ul" if use_customer_metrics else df.conc_units[0]
amt_unit = "ng" if conc_unit == "ng/ul" else "fmol"
Expand Down Expand Up @@ -789,34 +789,34 @@ def norm(
wl_comments = []

# Resolve buffer transfers
df_buffer, wl_comments = zika_utils.resolve_buffer_transfers(
df_buffer, wl_comments = zika.utils.resolve_buffer_transfers(
df=df.copy(), wl_comments=wl_comments
)

# Format worklist
df_formatted = zika_utils.format_worklist(df_buffer.copy(), deck=deck)
df_formatted = zika.utils.format_worklist(df_buffer.copy(), deck=deck)
wl_comments.append(
f"This worklist will enact normalization of {len(df)} samples. For detailed parameters see the worklist log"
)

# Write files

wl_filename, log_filename = zika_utils.get_filenames(
wl_filename, log_filename = zika.utils.get_filenames(
method_name="norm", pid=currentStep.id
)

zika_utils.write_worklist(
zika.utils.write_worklist(
df=df_formatted.copy(),
deck=deck,
wl_filename=wl_filename,
comments=wl_comments,
)

zika_utils.write_log(log, log_filename)
zika.utils.write_log(log, log_filename)

# Upload files
zika_utils.upload_csv(currentStep, lims, wl_filename)
zika_utils.upload_log(currentStep, lims, log_filename)
zika.utils.upload_csv(currentStep, lims, wl_filename)
zika.utils.upload_log(currentStep, lims, log_filename)

# Issue warnings, if any
if any("WARNING" in entry for entry in log):
Expand Down
2 changes: 1 addition & 1 deletion scripts/zika_utils.py → scilifelab_epps/zika/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
import pandas as pd
from genologics.entities import Process

from epp_utils.udf_tools import fetch_last
from scilifelab_epps.utils.udf_tools import fetch_last


def verify_step(currentStep, targets=None):
Expand Down
Empty file added scripts/__init__.py
Empty file.
Loading

0 comments on commit a8ca9a3

Please sign in to comment.