diff --git a/snappy_pipeline/workflows/adapter_trimming/__init__.py b/snappy_pipeline/workflows/adapter_trimming/__init__.py index c17a43b71..49ec80f5c 100644 --- a/snappy_pipeline/workflows/adapter_trimming/__init__.py +++ b/snappy_pipeline/workflows/adapter_trimming/__init__.py @@ -237,6 +237,7 @@ def args_function(wildcards): "reads_left": {key: reads_left[key] for key in sorted(reads_left.keys())}, "reads_right": {key: reads_right[key] for key in sorted(reads_right.keys())}, }, + "config": dict(self.config.get(self.name)), } # Validate action diff --git a/snappy_pipeline/workflows/adapter_trimming/model.py b/snappy_pipeline/workflows/adapter_trimming/model.py index 50b2e3a1f..78c3df46c 100644 --- a/snappy_pipeline/workflows/adapter_trimming/model.py +++ b/snappy_pipeline/workflows/adapter_trimming/model.py @@ -96,7 +96,7 @@ class UmiLoc(Enum): class Fastp(SnappyModel): - num_threads: int = 0 + num_threads: int = 4 trim_front1: int = 0 """ trimming how many bases in front for read1, default is 0 (int [=0]) @@ -361,9 +361,9 @@ class Bbduk(SnappyModel): Field( examples=[ [ - "/fast/work/groups/cubi/projects/biotools/static_data/app_support/" + "/data/cephfs-1/work/groups/cubi/projects/biotools/static_data/app_support/" "bbtools/39.01/resources/adapters.fa", - "/fast/work/groups/cubi/projects/biotools/static_data/app_support/" + "/data/cephfs-1/work/groups/cubi/projects/biotools/static_data/app_support/" "bbtools/39.01/resources/phix174_ill.ref.fa.gz", ] ] diff --git a/snappy_pipeline/workflows/cbioportal_export/__init__.py b/snappy_pipeline/workflows/cbioportal_export/__init__.py index 292b05b30..7b76a2f4f 100644 --- a/snappy_pipeline/workflows/cbioportal_export/__init__.py +++ b/snappy_pipeline/workflows/cbioportal_export/__init__.py @@ -602,6 +602,8 @@ def get_args(self, action): # Multiple libraries should not be returned by _yield_libraries assert extraction_type not in donors[donor_name][sample_name] donors[donor_name][sample_name][extraction_type] = lib.name + assert "__config" not in donors.keys(), "__config is a reserved key, not a valid donor" + donors["__config"] = dict(self.config) return donors @dictify @@ -654,6 +656,7 @@ def get_args(self, action): for sample_name in args["cnaseq"]["samples"]: if sample_name in args["rna_seq_mrna"]["samples"]: args["3way_complete"]["samples"] += [sample_name] + args["__cancer_study_id"] = self.config.study.cancer_study_identifier return args @dictify diff --git a/snappy_pipeline/workflows/cbioportal_export/model.py b/snappy_pipeline/workflows/cbioportal_export/model.py index 8bcfe102b..73daf9307 100644 --- a/snappy_pipeline/workflows/cbioportal_export/model.py +++ b/snappy_pipeline/workflows/cbioportal_export/model.py @@ -177,6 +177,8 @@ class CbioportalExport(SnappyStepModel): datatype="NUMBER", priority="2", column="TMB", + # FIXME: the cbioportal/clinical_data wrapper mentions key named "path" + # which seems to be mandatory but is not listed here ) } ], diff --git a/snappy_pipeline/workflows/common/delly.py b/snappy_pipeline/workflows/common/delly.py index 3ea9dd757..aaa575aa1 100644 --- a/snappy_pipeline/workflows/common/delly.py +++ b/snappy_pipeline/workflows/common/delly.py @@ -53,11 +53,20 @@ def __init__(self, parent): for sheet in self.parent.shortcut_sheets: self.donor_ngs_library_to_pedigree.update(sheet.donor_ngs_library_to_pedigree) + def get_args(self, action): + # Validate action + self._validate_action(action) + return { + "genome": self.w_config.static_data_config.reference.path, + "config": dict(self.config.get(self.name)), + } + @dictify def _get_input_files_call(self, wildcards): ngs_mapping = self.parent.sub_workflows["ngs_mapping"] token = f"{wildcards.mapper}.{wildcards.library_name}" yield "bam", ngs_mapping(f"output/{token}/out/{token}.bam") + yield "bai", ngs_mapping(f"output/{token}/out/{token}.bam.bai") @dictify def _get_output_files_call(self): diff --git a/snappy_pipeline/workflows/common/gcnv/gcnv_run.py b/snappy_pipeline/workflows/common/gcnv/gcnv_run.py index 832b9fc7a..fdc3d8048 100644 --- a/snappy_pipeline/workflows/common/gcnv/gcnv_run.py +++ b/snappy_pipeline/workflows/common/gcnv/gcnv_run.py @@ -7,6 +7,7 @@ import warnings from glob import glob from itertools import chain +from typing import Any from snakemake.io import Wildcards, expand, touch @@ -426,6 +427,9 @@ def _get_input_files_joint_germline_cnv_segmentation(self, wildcards): name_pattern = f"write_pedigree.{wildcards.library_name}" yield "ped", f"work/{name_pattern}/out/{wildcards.library_name}.ped" + def _get_params_joint_germline_cnv_segmentation(self, wildcards: Wildcards) -> dict[str, Any]: + return {"reference": self.parent.w_config.static_data_config.reference.path} + class MergeMultikitFamiliesMixin: """Methods for merging families with multiple kits. @@ -524,6 +528,15 @@ def get_params(self, action: str): # Return requested function return getattr(self, f"_get_params_{action}") + def _get_params_preprocess_intervals(self, wildcards: Wildcards) -> dict[str, Any]: + return {"reference": self.parent.w_config.static_data_config.reference.path} + + def _get_params_coverage(self, wildcards: Wildcards) -> dict[str, Any]: + return {"reference": self.parent.w_config.static_data_config.reference.path} + + def _get_params_joint_germline_cnv_segmentation(self, wildcards: Wildcards) -> dict[str, Any]: + return {"reference": self.parent.w_config.static_data_config.reference.path} + @listify def get_result_files(self): """Return list of **concrete** paths to result files for the given configuration and sample sheets. diff --git a/snappy_pipeline/workflows/common/manta.py b/snappy_pipeline/workflows/common/manta.py index 62240e012..2a9291c5b 100644 --- a/snappy_pipeline/workflows/common/manta.py +++ b/snappy_pipeline/workflows/common/manta.py @@ -3,6 +3,8 @@ These are used in both ``sv_calling_targeted`` and ``sv_calling_wgs``. """ +from typing import Any + from snappy_pipeline.base import UnsupportedActionException from snappy_pipeline.utils import dictify from snappy_pipeline.workflows.abstract import BaseStepPart @@ -80,3 +82,7 @@ def _get_output_files_run(self): yield from augment_work_dir_with_output_links( work_files, self.get_log_file().values() ).items() + + def get_args(self, action: str) -> dict[str, Any]: + self._validate_action(action) + return {"reference": self.parent.w_config.static_data_config.reference.path} diff --git a/snappy_pipeline/workflows/common/melt.py b/snappy_pipeline/workflows/common/melt.py index 066e84630..da342da66 100644 --- a/snappy_pipeline/workflows/common/melt.py +++ b/snappy_pipeline/workflows/common/melt.py @@ -1,6 +1,7 @@ import re import typing from itertools import chain +from typing import Any from biomedsheets.shortcuts import is_not_background from snakemake.io import touch @@ -228,3 +229,9 @@ def _get_output_files_merge_vcf(self): @dictify def _get_log_file_merge_vcf(self): yield from self._get_log_file_with_infix("{mapper}.melt.{library_name}").items() + + def get_args(self, action: str) -> dict[str, Any]: + self._validate_action(action) + return self.config.melt.model_dump(by_alias=True) | { + "reference": self.parent.w_config.static_data_config.reference.path + } diff --git a/snappy_pipeline/workflows/gene_expression_quantification/Snakefile b/snappy_pipeline/workflows/gene_expression_quantification/Snakefile index 90046c97a..f2c7de81d 100644 --- a/snappy_pipeline/workflows/gene_expression_quantification/Snakefile +++ b/snappy_pipeline/workflows/gene_expression_quantification/Snakefile @@ -95,6 +95,8 @@ rule gene_expression_quantification_duplication_run: decision=wf.get_strandedness_file("run"), output: **wf.get_output_files("duplication", "run"), + params: + **{"args": wf.get_args("duplication", "run")}, threads: wf.get_resource("duplication", "run", "threads") resources: time=wf.get_resource("duplication", "run", "time"), @@ -113,6 +115,8 @@ rule gene_expression_quantification_dupradar_run: decision=wf.get_strandedness_file("run"), output: **wf.get_output_files("dupradar", "run"), + params: + **{"args": wf.get_args("dupradar", "run")}, threads: wf.get_resource("dupradar", "run", "threads") resources: time=wf.get_resource("dupradar", "run", "time"), @@ -131,6 +135,8 @@ rule gene_expression_quantification_rnaseqc_run: decision=wf.get_strandedness_file("run"), output: **wf.get_output_files("rnaseqc", "run"), + params: + **{"args": wf.get_args("rnaseqc", "run")}, threads: wf.get_resource("rnaseqc", "run", "threads") resources: time=wf.get_resource("rnaseqc", "run", "time"), @@ -143,12 +149,14 @@ rule gene_expression_quantification_rnaseqc_run: wf.wrapper_path("rnaqc/rnaseqc") -rule gene_expression_quantification_star_run: +rule gene_expression_quantification_stats_run: input: unpack(wf.get_input_files("stats", "run")), decision=wf.get_strandedness_file("run"), output: **wf.get_output_files("stats", "run"), + params: + **{"args": wf.get_args("stats", "run")}, threads: wf.get_resource("stats", "run", "threads") resources: time=wf.get_resource("stats", "run", "time"), diff --git a/snappy_pipeline/workflows/gene_expression_quantification/__init__.py b/snappy_pipeline/workflows/gene_expression_quantification/__init__.py index d9c4f7dba..1f353865f 100644 --- a/snappy_pipeline/workflows/gene_expression_quantification/__init__.py +++ b/snappy_pipeline/workflows/gene_expression_quantification/__init__.py @@ -48,6 +48,7 @@ """ import os +from typing import Any from biomedsheets.shortcuts import GenericSampleSheet, is_not_background from snakemake.io import expand @@ -188,6 +189,8 @@ def args_function(wildcards): ) if reads_right: result["input"]["reads_right"] = reads_right + result |= self.config.salmon.model_dump(by_alias=True) + result["strand"] = self.config.strand return result assert action == "run", "Unsupported actions" @@ -259,6 +262,10 @@ def get_output_files(self, action): ) ) + def get_args(self, action: str) -> dict[str, Any]: + self._validate_action(action) + return {"strand": self.config.strand} + @dictify def get_log_file(self, action): """Return mapping of log files.""" @@ -287,6 +294,11 @@ class FeatureCountsStepPart(GeneExpressionQuantificationStepPart): #: Class available actions actions = ("run",) + def get_args(self, action: str) -> dict[str, Any]: + return super().get_args(action) | { + "path_annotation_gtf": self.config.featurecounts.path_annotation_gtf, + } + def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: """Get Resource Usage @@ -373,6 +385,12 @@ class QCStepPartDupradar(GeneExpressionQuantificationStepPart): #: Class available actions actions = ("run",) + def get_args(self, action: str) -> dict[str, Any]: + return super().get_args(action) | { + "dupradar_path_annotation_gtf": self.config.dupradar.dupradar_path_annotation_gtf, + "num_threads": self.config.dupradar.num_threads, + } + def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: """Get Resource Usage @@ -397,6 +415,12 @@ class QCStepPartRnaseqc(GeneExpressionQuantificationStepPart): #: Class available actions actions = ("run",) + def get_args(self, action: str) -> dict[str, Any]: + return super().get_args(action) | { + "reference": self.parent.w_config.static_data_config.reference.path, + "rnaseqc_path_annotation_gtf": self.config.rnaseqc.rnaseqc_path_annotation_gtf, + } + def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: """Get Resource Usage diff --git a/snappy_pipeline/workflows/helper_gcnv_model_targeted/Snakefile b/snappy_pipeline/workflows/helper_gcnv_model_targeted/Snakefile index 35631d2a6..a18c38fd3 100644 --- a/snappy_pipeline/workflows/helper_gcnv_model_targeted/Snakefile +++ b/snappy_pipeline/workflows/helper_gcnv_model_targeted/Snakefile @@ -58,7 +58,7 @@ rule build_gcnv_model_preprocess_intervals: log: wf.get_log_file("gcnv", "preprocess_intervals"), params: - step_key="helper_gcnv_model_targeted", + **{"args": wf.get_args("gcnv", "preprocess_intervals")}, wrapper: wf.wrapper_path("gcnv/preprocess_intervals") @@ -68,6 +68,8 @@ rule build_gcnv_model_annotate_gc: unpack(wf.get_input_files("gcnv", "annotate_gc")), output: **wf.get_output_files("gcnv", "annotate_gc"), + params: + **{"args": wf.get_args("gcnv", "annotate_gc")}, threads: wf.get_resource("gcnv", "annotate_gc", "threads") resources: time=wf.get_resource("gcnv", "annotate_gc", "time"), @@ -85,6 +87,8 @@ rule build_gcnv_model_coverage: unpack(wf.get_input_files("gcnv", "coverage")), output: **wf.get_output_files("gcnv", "coverage"), + params: + **{"args": wf.get_args("gcnv", "coverage")}, threads: wf.get_resource("gcnv", "coverage", "threads") resources: time=wf.get_resource("gcnv", "coverage", "time"), @@ -128,7 +132,7 @@ rule build_gcnv_model_contig_ploidy: log: wf.get_log_file("gcnv", "contig_ploidy"), params: - step_key="helper_gcnv_model_targeted", + **{"args": wf.get_args("gcnv", "contig_ploidy")}, wrapper: wf.wrapper_path("gcnv/contig_ploidy") diff --git a/snappy_pipeline/workflows/helper_gcnv_model_targeted/__init__.py b/snappy_pipeline/workflows/helper_gcnv_model_targeted/__init__.py index bc63074d7..b488812c0 100644 --- a/snappy_pipeline/workflows/helper_gcnv_model_targeted/__init__.py +++ b/snappy_pipeline/workflows/helper_gcnv_model_targeted/__init__.py @@ -85,6 +85,7 @@ import os import re +from typing import Any from biomedsheets.shortcuts import GermlineCaseSheet, is_not_background from snakemake.io import glob_wildcards @@ -159,6 +160,15 @@ def _get_input_files_post_germline_calls(self, wildcards, checkpoints): ) yield ext, "work/{name_pattern}/out/{name_pattern}/.done".format(name_pattern=name_pattern) + def get_args(self, action: str) -> dict[str, Any]: + gcnv_config = self.w_config.step_config["helper_gcnv_model_targeted"].gcnv + return { + "reference": self.parent.w_config.static_data_config.reference.path, + "path_par_intervals": gcnv_config.path_par_intervals, + "path_target_interval_list_mapping": gcnv_config.path_target_interval_list_mapping, + "path_uniquely_mapable_bed": gcnv_config.path_uniquely_mapable_bed, + } + class HelperBuildTargetSeqGcnvModelWorkflow(BaseStep): """Perform gCNV model building for WES samples by library kit""" diff --git a/snappy_pipeline/workflows/helper_gcnv_model_wgs/Snakefile b/snappy_pipeline/workflows/helper_gcnv_model_wgs/Snakefile index ea987ae6e..3f447b42f 100644 --- a/snappy_pipeline/workflows/helper_gcnv_model_wgs/Snakefile +++ b/snappy_pipeline/workflows/helper_gcnv_model_wgs/Snakefile @@ -47,6 +47,8 @@ rule build_gcnv_model_preprocess_intervals: unpack(wf.get_input_files("gcnv", "preprocess_intervals")), output: **wf.get_output_files("gcnv", "preprocess_intervals"), + params: + **{"args": wf.get_args("gcnv", "preprocess_intervals")}, threads: wf.get_resource("gcnv", "preprocess_intervals", "threads") resources: time=wf.get_resource("gcnv", "preprocess_intervals", "time"), @@ -66,6 +68,8 @@ rule build_gcnv_model_annotate_gc: unpack(wf.get_input_files("gcnv", "annotate_gc")), output: **wf.get_output_files("gcnv", "annotate_gc"), + params: + **{"args": wf.get_args("gcnv", "annotate_gc")}, threads: wf.get_resource("gcnv", "annotate_gc", "threads") resources: time=wf.get_resource("gcnv", "annotate_gc", "time"), @@ -83,6 +87,8 @@ rule build_gcnv_model_coverage: unpack(wf.get_input_files("gcnv", "coverage")), output: **wf.get_output_files("gcnv", "coverage"), + params: + **{"args": wf.get_args("gcnv", "coverage")}, threads: wf.get_resource("gcnv", "coverage", "threads") resources: time=wf.get_resource("gcnv", "coverage", "time"), @@ -117,6 +123,8 @@ rule build_gcnv_model_contig_ploidy: unpack(wf.get_input_files("gcnv", "contig_ploidy")), output: **wf.get_output_files("gcnv", "contig_ploidy"), + params: + **{"args": wf.get_args("gcnv", "preprocess_intervals")}, threads: wf.get_resource("gcnv", "contig_ploidy", "threads") resources: time=wf.get_resource("gcnv", "contig_ploidy", "time"), @@ -126,7 +134,7 @@ rule build_gcnv_model_contig_ploidy: log: wf.get_log_file("gcnv", "contig_ploidy"), params: - step_key="helper_gcnv_model_wgs", + **{"args": wf.get_args("gcnv", "coverage")}, wrapper: wf.wrapper_path("gcnv/contig_ploidy") diff --git a/snappy_pipeline/workflows/helper_gcnv_model_wgs/__init__.py b/snappy_pipeline/workflows/helper_gcnv_model_wgs/__init__.py index c157f92df..e3dadbd51 100644 --- a/snappy_pipeline/workflows/helper_gcnv_model_wgs/__init__.py +++ b/snappy_pipeline/workflows/helper_gcnv_model_wgs/__init__.py @@ -84,6 +84,7 @@ """ import os +from typing import Any import attr from biomedsheets.shortcuts import GermlineCaseSheet, is_not_background @@ -184,6 +185,14 @@ def _get_input_files_post_germline_calls(self, wildcards, checkpoints): ) yield ext, "work/{name_pattern}/out/{name_pattern}/.done".format(name_pattern=name_pattern) + def get_args(self, action: str) -> dict[str, Any]: + gcnv_config = self.w_config.step_config["helper_gcnv_model_wgs"].gcnv + return { + "reference": self.parent.w_config.static_data_config.reference.path, + "path_par_intervals": gcnv_config.path_par_intervals, + "path_uniquely_mapable_bed": gcnv_config.path_uniquely_mapable_bed, + } + def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: """Get Resource Usage diff --git a/snappy_pipeline/workflows/hla_typing/__init__.py b/snappy_pipeline/workflows/hla_typing/__init__.py index a2b8d2b04..735da0364 100644 --- a/snappy_pipeline/workflows/hla_typing/__init__.py +++ b/snappy_pipeline/workflows/hla_typing/__init__.py @@ -165,6 +165,8 @@ def args_function(wildcards): ) if reads_right: result["input"]["reads_right"] = reads_right + result["num_mapping_threads"] = self.config.optitype.num_mapping_threads + result["max_reads"] = self.confing.optitype.max_reads return result assert action == "run", "Unsupported actions" diff --git a/snappy_pipeline/workflows/homologous_recombination_deficiency/Snakefile b/snappy_pipeline/workflows/homologous_recombination_deficiency/Snakefile index 8397821b0..c1c356231 100644 --- a/snappy_pipeline/workflows/homologous_recombination_deficiency/Snakefile +++ b/snappy_pipeline/workflows/homologous_recombination_deficiency/Snakefile @@ -83,6 +83,8 @@ rule homologous_recombination_deficiency_scarHRD_run: unpack(wf.get_input_files("scarHRD", "run")), output: **wf.get_output_files("scarHRD", "run"), + params: + **{"args": wf.get_args("scarHRD", "run")}, threads: wf.get_resource("scarHRD", "run", "threads") resources: time=wf.get_resource("scarHRD", "run", "time"), diff --git a/snappy_pipeline/workflows/homologous_recombination_deficiency/__init__.py b/snappy_pipeline/workflows/homologous_recombination_deficiency/__init__.py index 111c5159e..c5da32696 100644 --- a/snappy_pipeline/workflows/homologous_recombination_deficiency/__init__.py +++ b/snappy_pipeline/workflows/homologous_recombination_deficiency/__init__.py @@ -58,6 +58,7 @@ """ import sys +from typing import Any from biomedsheets.shortcuts import CancerCaseSheet, is_not_background from snakemake.io import expand @@ -124,6 +125,12 @@ def get_output_files(self, action): ) ) + def get_args(self, action: str) -> dict[str, Any]: + self._validate_action(action) + return self.config.scarHRD.model_dump(by_alias=True) | { + "reference": self.parent.w_config.static_data_config.reference.path, + } + @dictify def _get_log_file(self, action): """Return dict of log files.""" diff --git a/snappy_pipeline/workflows/ngs_data_qc/Snakefile b/snappy_pipeline/workflows/ngs_data_qc/Snakefile index 53ebb9335..5d9df7cce 100644 --- a/snappy_pipeline/workflows/ngs_data_qc/Snakefile +++ b/snappy_pipeline/workflows/ngs_data_qc/Snakefile @@ -95,6 +95,8 @@ rule data_qc_fastqc_run: rule data_qc_picard_prepare: output: **wf.get_output_files("picard", "prepare"), + params: + args=wf.get_params("picard", "prepare"), threads: wf.get_resource("picard", "prepare", "threads") resources: time=wf.get_resource("picard", "prepare", "time"), @@ -113,7 +115,7 @@ rule data_qc_picard_metrics: output: **wf.get_output_files("picard", "metrics"), params: - wf.get_params("picard", "metrics"), + args=wf.get_params("picard", "metrics"), threads: wf.get_resource("picard", "metrics", "threads") resources: time=wf.get_resource("picard", "metrics", "time"), diff --git a/snappy_pipeline/workflows/ngs_data_qc/__init__.py b/snappy_pipeline/workflows/ngs_data_qc/__init__.py index 9adc2dc50..e28e89fd6 100644 --- a/snappy_pipeline/workflows/ngs_data_qc/__init__.py +++ b/snappy_pipeline/workflows/ngs_data_qc/__init__.py @@ -13,9 +13,10 @@ import os from itertools import chain +from typing import Any from biomedsheets.shortcuts import GenericSampleSheet -from snakemake.io import Namedlist, expand, touch +from snakemake.io import Namedlist, expand, touch, Wildcards from snappy_pipeline.base import UnsupportedActionException from snappy_pipeline.utils import dictify, listify @@ -215,11 +216,31 @@ def get_log_file(self, action): def get_params(self, action): self._validate_action(action) - return self._get_params - - @dictify - def _get_params(self, wildcards): - return {"prefix": f"{wildcards.mapper}.{wildcards.library_name}"} + return getattr(self, f"_get_params_{action}") + + def _get_params_prepare(self, wildcards: Wildcards) -> dict[str, Any]: + return { + "reference": self.parent.w_config.static_data_config.reference.path, + "path_to_baits": self.config.picard.path_to_baits, + "path_to_targets": self.config.picard.path_to_targets, + } + + def _get_params_metrics(self, wildcards: Wildcards) -> dict[str, Any]: + params = { + "reference": self.parent.w_config.static_data_config.reference.path, + "prefix": f"{wildcards.mapper}.{wildcards.library_name}", + "programs": self.config.picard.programs, + } + if self.config.picard.bait_name: + params["bait_name"] = self.config.picard.bait_name + if ( + getattr(self.parent.w_config.static_data_config, "dbsnp", {"path": ""}) + and getattr(self.parent.w_config.static_data_config.dbsnp, "path", "") + and self.parent.w_config.static_data_config.dbsnp.path + ): + params["dbsnp"] = self.parent.w_config.static_data_config.dbsnp.path + else: + params["dbsnp"] = "" def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: """Get Resource Usage diff --git a/snappy_pipeline/workflows/ngs_mapping/Snakefile b/snappy_pipeline/workflows/ngs_mapping/Snakefile index 138983cab..8517af715 100644 --- a/snappy_pipeline/workflows/ngs_mapping/Snakefile +++ b/snappy_pipeline/workflows/ngs_mapping/Snakefile @@ -214,6 +214,8 @@ rule ngs_mapping_bam_collect_doc_run: **wf.get_input_files("bam_collect_doc", "run")(), output: **wf.get_output_files("bam_collect_doc", "run"), + params: + **{"args": wf.get_args("bam_collect_doc", "run")}, threads: wf.get_resource("bam_collect_doc", "run", "threads") resources: time=wf.get_resource("bam_collect_doc", "run", "time"), @@ -234,6 +236,8 @@ rule ngs_mapping_ngs_chew_fingerprint: **wf.get_input_files("ngs_chew", "fingerprint")(), output: **wf.get_output_files("ngs_chew", "fingerprint"), + params: + **{"args": wf.get_args("ngs_chew", "fingerprint")}, threads: wf.get_resource("ngs_chew", "fingerprint", "threads") resources: time=wf.get_resource("ngs_chew", "fingerprint", "time"), @@ -254,6 +258,8 @@ rule ngs_mapping_infer_strandedness: **wf.get_input_files("strandedness", "infer"), output: **wf.get_output_files("strandedness", "infer"), + params: + **{"args": wf.get_args("strandedness", "infer")}, threads: wf.get_resource("strandedness", "infer", "threads") resources: time=wf.get_resource("strandedness", "infer", "time"), diff --git a/snappy_pipeline/workflows/ngs_mapping/__init__.py b/snappy_pipeline/workflows/ngs_mapping/__init__.py index 501a38e9c..c583a887f 100644 --- a/snappy_pipeline/workflows/ngs_mapping/__init__.py +++ b/snappy_pipeline/workflows/ngs_mapping/__init__.py @@ -431,9 +431,10 @@ import re import sys from itertools import chain +from typing import Any from biomedsheets.shortcuts import GenericSampleSheet, is_not_background -from snakemake.io import expand +from snakemake.io import expand, Wildcards from snappy_pipeline.base import InvalidConfiguration, UnsupportedActionException from snappy_pipeline.utils import dictify, flatten, listify @@ -732,6 +733,14 @@ def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: memory=f"{mem_mb}M", ) + def get_args(self, action): + def args_fn(wildcards): + parent_args = super().get_args(action)(wildcards) + parent_args.update(dict(self.config.bwa)) + return parent_args + + return args_fn + class BwaMem2StepPart(ReadMappingStepPart): """Support for performing NGS alignment using BWA-MEM 2""" @@ -757,6 +766,14 @@ def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: memory=f"{mem_mb}M", ) + def get_args(self, action): + def args_fn(wildcards): + parent_args = super().get_args(action)(wildcards) + parent_args.update(dict(self.config.bwa_mem2)) + return parent_args + + return args_fn + class MBCsStepPart(ReadMappingStepPart): """Support for performing NGS alignment on MBC data""" @@ -782,6 +799,28 @@ def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: partition="medium", ) + def get_args(self, action: str): + self._validate_action(action) + + def args_fn(wildcards: Wildcards) -> dict[str, Any]: + args = super().get_args(action)(wildcards) + args |= { + "reference": self.parent.w_config.static_data_config.reference.path, + "config": self.config.mbcs.model_dump(by_alias=True), + "mapper_config": getattr(self.config, self.config.mbcs.mapping_tool).model_dump( + by_alias=True + ), + } + if self.config.mbcs.use_barcodes: + args["barcode_config"] = getattr( + self.config, self.config.mbcs.barcode_tool + ).model_dump(by_alias=True) + if self.config.mbcs.recalibrate: + args["bqsr_config"] = self.config.bqsr.model_dump(by_alias=True) + return args + + return args_fn + class StarStepPart(ReadMappingStepPart): """Support for performing NGS alignment using STAR""" @@ -851,6 +890,15 @@ def get_output_files(self, action): ), ) + def get_args(self, action: str): + def args_fn(wildcards: Wildcards) -> dict[str, Any]: + parent_args = super().get_args(action)(wildcards) + parent_args.update(self.config.star.model_dump(by_alias=True)) + parent_args["features"] = self.parent.w_config.static_data_config.features.path + return parent_args + + return args_fn + def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: """Get Resource Usage @@ -979,6 +1027,10 @@ def get_log_file(self, action): yield key, prefix + ext yield key + "_md5", prefix + ext + ".md5" + def get_args(self, action: str) -> dict[str, Any]: + self._validate_action(action) + return self.config.strandedness.model_dump(by_alias=True) + class Minimap2StepPart(ReadMappingStepPart): """Support for performing long-read alignment using minimap2""" @@ -1166,7 +1218,12 @@ def _get_params_run(self, wildcards): path_targets_bed = item.path break + path_reference = self.w_config.static_data_config.reference.path + path_reference_genome = path_reference + ".genome" + return { + "path_reference": path_reference, + "path_reference_genome": path_reference_genome, "path_targets_bed": path_targets_bed, } @@ -1240,6 +1297,13 @@ def get_output_files(self, action): ], ) + def get_args(self, action: str) -> dict[str, Any]: + self._check_action(action) + return { + "reference": self.parent.w_config.static_config_data.reference.path, + "window_length": self.config.bam_collect_doc.window_length, + } + @dictify def _get_output_files_run_work(self): yield "vcf", "work/{mapper}.{library_name}/report/cov/{mapper}.{library_name}.cov.vcf.gz" @@ -1371,6 +1435,10 @@ def _get_log_files_fingerprint(self): yield key, prefix + ext yield key + "_md5", prefix + ext + ".md5" + def get_args(self, action: str) -> dict[str, Any]: + self._validate_action(action) + return {"reference": self.parent.w_config.static_data_config.reference.path} + def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: """Get Resource Usage diff --git a/snappy_pipeline/workflows/ngs_mapping/model.py b/snappy_pipeline/workflows/ngs_mapping/model.py index d390b0528..8fd83059c 100644 --- a/snappy_pipeline/workflows/ngs_mapping/model.py +++ b/snappy_pipeline/workflows/ngs_mapping/model.py @@ -149,17 +149,6 @@ class BarcodeTool(Enum): AGENT = "agent" -class Somatic(SnappyModel): - mapping_tool: DnaMapper - """Either bwa of bwa_mem2. The indices & other parameters are taken from mapper config""" - - barcode_tool: BarcodeTool = BarcodeTool.AGENT - """Only agent currently implemented""" - - use_barcodes: bool = False - recalibrate: bool = True - - class Bqsr(SnappyModel): common_variants: str """Common germline variants (see /fast/work/groups/cubi/projects/biotools/static_data/app_support/GATK)""" @@ -277,9 +266,13 @@ class Minimap2(SnappyModel): class Mbcs(SnappyModel): mapping_tool: DnaMapper - barcode_tool: BarcodeTool - use_barcodes: bool - recalibrate: bool + """Either bwa or bwa_mem2. The indices & other parameters are taken from mapper config""" + + barcode_tool: BarcodeTool = BarcodeTool.AGENT + """Only agent currently implemented""" + + use_barcodes: bool = False + recalibrate: bool = True class NgsMapping(SnappyStepModel): @@ -304,12 +297,6 @@ class NgsMapping(SnappyStepModel): bwa_mem2: BwaMem2 | None = None """Configuration for BWA-MEM2""" - mbcs: Mbcs | None = None - """ - Configuration for somatic ngs_calling - (separate read groups, molecular barcodes & base quality recalibration) - """ - bqsr: Bqsr | None = None agent: Agent | None = None @@ -322,6 +309,10 @@ class NgsMapping(SnappyStepModel): minimap2: Minimap2 | None = None mbcs: Mbcs | None = None + """ + Configuration for somatic ngs_calling + (separate read groups, molecular barcodes & base quality recalibration) + """ @model_validator(mode="after") def ensure_tools_are_configured(self): diff --git a/snappy_pipeline/workflows/repeat_expansion/__init__.py b/snappy_pipeline/workflows/repeat_expansion/__init__.py index 82ab9da72..62a9d4c86 100644 --- a/snappy_pipeline/workflows/repeat_expansion/__init__.py +++ b/snappy_pipeline/workflows/repeat_expansion/__init__.py @@ -193,7 +193,6 @@ def get_log_file(self, action): raise UnsupportedActionException(error_message) return getattr(self, "_get_log_files_{}".format(action))() - @listify def _get_input_files_run(self, wildcards): """Yield BAM files based on subworkflow `ngs_mapping` results. @@ -201,8 +200,14 @@ def _get_input_files_run(self, wildcards): :type wildcards: snakemake.io.Wildcards """ ngs_mapping = self.parent.sub_workflows["ngs_mapping"] - bam_tpl = "output/{mapper}.{library_name}/out/{mapper}.{library_name}.bam" - yield ngs_mapping(bam_tpl.format(**wildcards)) + bam_tpl = ngs_mapping("output/{mapper}.{library_name}/out/{mapper}.{library_name}.bam") + bam = bam_tpl.format(**wildcards) + return { + "bam": bam, + "bai": bam + ".bai", + "reference": self.w_config.static_data_config.reference.path, + "repeat_catalog": self.config.repeat_catalog, + } @staticmethod @listify diff --git a/snappy_pipeline/workflows/somatic_cnv_checking/Snakefile b/snappy_pipeline/workflows/somatic_cnv_checking/Snakefile index 2f6e8b7fe..47a6d8d8b 100644 --- a/snappy_pipeline/workflows/somatic_cnv_checking/Snakefile +++ b/snappy_pipeline/workflows/somatic_cnv_checking/Snakefile @@ -58,6 +58,8 @@ rule somatic_cnv_checking_pileup_normal: unpack(wf.get_input_files("pileup", "normal")), output: **wf.get_output_files("pileup", "normal"), + params: + args=wf.get_params("pileup", "normal"), threads: wf.get_resource("pileup", "normal", "threads") resources: time=wf.get_resource("pileup", "normal", "time"), @@ -78,6 +80,8 @@ rule somatic_cnv_checking_pileup_tumor: unpack(wf.get_input_files("pileup", "tumor")), output: **wf.get_output_files("pileup", "tumor"), + params: + args=wf.get_params("pileup", "tumor"), threads: wf.get_resource("pileup", "tumor", "threads") resources: time=wf.get_resource("pileup", "tumor", "time"), @@ -99,6 +103,8 @@ if wf.w_config.step_config["somatic_cnv_checking"].path_cnv_calling: unpack(wf.get_input_files("cnv", "run")), output: **wf.get_output_files("cnv", "run"), + params: + args=wf.get_args("cnv", "run"), threads: wf.get_resource("cnv", "run", "threads") resources: time=wf.get_resource("cnv", "run", "time"), diff --git a/snappy_pipeline/workflows/somatic_cnv_checking/__init__.py b/snappy_pipeline/workflows/somatic_cnv_checking/__init__.py index febda8197..41b72fb36 100644 --- a/snappy_pipeline/workflows/somatic_cnv_checking/__init__.py +++ b/snappy_pipeline/workflows/somatic_cnv_checking/__init__.py @@ -59,6 +59,7 @@ import os import sys +from typing import Any from biomedsheets.shortcuts import CancerCaseSheet, CancerCaseSheetOptions, is_not_background from snakemake.io import expand @@ -161,6 +162,17 @@ def get_output_files(self, action): ) return dict(zip(EXT_NAMES, expand(base_path_out, action=action, ext=EXT_VALUES))) + def get_args(self, **kwargs): + def args_fn(_wildcards): + return { + "reference_path": self.w_config.static_data_config.reference.path, + "min_baf": self.config.min_baf, + "min_depth": self.config.min_depth, + "max_depth": self.config.max_depth, + } + + return args_fn + def get_log_file(self, action): # Validate action self._validate_action(action) @@ -220,6 +232,11 @@ def get_output_files(self, action): yield (key, base_path_out + ext) yield (key + "_md5", base_path_out + ext + ".md5") + def get_args(self, action: str) -> dict[str, Any]: + # Validate action + self._validate_action(action) + return self.config.model_dump(by_alias=True) + def get_log_file(self, action): # Validate action self._validate_action(action) @@ -258,6 +275,11 @@ def get_output_files(self, action): "segment_md5": base_path_out + ".segment.pdf.md5", } + def get_args(self, action: str) -> dict[str, Any]: + # Validate action + self._validate_action(action) + return {"reference": self.parent.w_config.static_data_config.reference.path} + def get_log_file(self, action): # Validate action self._validate_action(action) diff --git a/snappy_pipeline/workflows/somatic_gene_fusion_calling/__init__.py b/snappy_pipeline/workflows/somatic_gene_fusion_calling/__init__.py index 4c729d389..a532f28f0 100644 --- a/snappy_pipeline/workflows/somatic_gene_fusion_calling/__init__.py +++ b/snappy_pipeline/workflows/somatic_gene_fusion_calling/__init__.py @@ -231,9 +231,16 @@ def flatten(lst): def args_function(wildcards): # TODO: wildcards.library_name is tumor_library_name - left = list(sorted(self._collect_reads(wildcards, wildcards.library_name, ""))) - right = list(sorted(self._collect_reads(wildcards, wildcards.library_name, "right-"))) - return {"left": left, "right": right} + return { + "left": list(sorted(self._collect_reads(wildcards, wildcards.library_name, ""))), + "right": list( + sorted(self._collect_reads(wildcards, wildcards.library_name, "right-")) + ), + "kallisto_index": self.config.pizzly.kallisto_index, + "kmer_size": self.config.pizzly.kmer_size, + "transcripts_fasta": self.config.pizzly.transcripts_fasta, + "annotation_gtf": self.config.pizzly.annotation_gtf, + } assert action == "run", "Unsupported actions" return args_function @@ -271,7 +278,11 @@ def args_function(wildcards): # TODO: wildcards.library_name is tumor_library_name left = list(sorted(self._collect_reads(wildcards, wildcards.library_name, ""))) right = list(sorted(self._collect_reads(wildcards, wildcards.library_name, "right-"))) - return {"left": left, "right": right} + return { + "left": left, + "right": right, + "path_ctat_resource_lib": self.config.star_fusion.path_ctat_resource_lib, + } assert action == "run", "Unsupported actions" return args_function @@ -309,7 +320,11 @@ def args_function(wildcards): # TODO: wildcards.library_name is tumor_library_name left = list(sorted(self._collect_reads(wildcards, wildcards.library_name, ""))) right = list(sorted(self._collect_reads(wildcards, wildcards.library_name, "right-"))) - return {"left": left, "right": right} + return { + "left": left, + "right": right, + "path_dataset_directory": self.config.get(self.name).get("path_dataset_directory"), + } assert action == "run", "Unsupported actions" return args_function @@ -378,14 +393,26 @@ class ArribaStepPart(SomaticGeneFusionCallingStepPart): def get_args(self, action): """Return function that maps wildcards to dict for input files""" - def flatten(lst): - return [x for pair in lst for x in pair] - def args_function(wildcards): # TODO: wildcards.library_name is tumor_library_name left = list(sorted(self._collect_reads(wildcards, wildcards.library_name, ""))) right = list(sorted(self._collect_reads(wildcards, wildcards.library_name, "right-"))) - return {"input": {"reads_left": left, "reads_right": right}} + + return { + "input": {"reads_left": left, "reads_right": right}, + "trim_adapters": self.config.arriba.trim_adapters, + "num_threads_trimming": self.config.arriba.num_threads_trimming, + "num_threads": self.config.arriba.num_threads, + "path_index": self.config.arriba.path_index, + "star_parameters": self.config.arriba.star_parameters, + "reference_path": self.w_config.static_data_config.reference.path, + "features_path": self.w_config.static_data_config.features.path, + "blacklist": self.config.arriba.blacklist, + "known_fusions": self.config.arriba.known_fusions, + "tags": self.config.arriba.tags, + "structural_variants": self.config.arriba.structural_variants, + "protein_domains": self.config.arriba.protein_domains, + } assert action == "run", "Unsupported actions" return args_function diff --git a/snappy_pipeline/workflows/somatic_msi_calling/Snakefile b/snappy_pipeline/workflows/somatic_msi_calling/Snakefile index 94eda4c85..1a972834c 100644 --- a/snappy_pipeline/workflows/somatic_msi_calling/Snakefile +++ b/snappy_pipeline/workflows/somatic_msi_calling/Snakefile @@ -59,6 +59,8 @@ rule somatic_msi_calling_mantis_msi2: unpack(wf.get_input_files("mantis_msi2", "run")), output: **wf.get_output_files("mantis_msi2", "run"), + params: + **{"args": wf.get_args("mantis_msi2", "run")}, threads: wf.get_resource("mantis_msi2", "run", "threads") resources: time=wf.get_resource("mantis_msi2", "run", "time"), diff --git a/snappy_pipeline/workflows/somatic_msi_calling/__init__.py b/snappy_pipeline/workflows/somatic_msi_calling/__init__.py index 8b6c0aec5..1b14f4b0f 100644 --- a/snappy_pipeline/workflows/somatic_msi_calling/__init__.py +++ b/snappy_pipeline/workflows/somatic_msi_calling/__init__.py @@ -51,6 +51,7 @@ import os import sys from collections import OrderedDict +from typing import Any from biomedsheets.shortcuts import CancerCaseSheet, CancerCaseSheetOptions, is_not_background from snakemake.io import expand @@ -150,6 +151,14 @@ def get_output_files(self, action): zip(EXT_NAMES, expand(self.base_path_out, msi_caller=[self.name], ext=EXT_VALUES)) ) + def get_args(self, action: str) -> dict[str, Any]: + # Validate action + self._validate_action(action) + return { + "reference": self.parent.w_config.static_data_config.reference.path, + "loci_bed": self.config.loci_bed, + } + @dictify def _get_log_file(self, action): """Return dict of log files.""" diff --git a/snappy_pipeline/workflows/somatic_purity_ploidy_estimate/Snakefile b/snappy_pipeline/workflows/somatic_purity_ploidy_estimate/Snakefile index 69b6290bf..b908f18d5 100644 --- a/snappy_pipeline/workflows/somatic_purity_ploidy_estimate/Snakefile +++ b/snappy_pipeline/workflows/somatic_purity_ploidy_estimate/Snakefile @@ -73,6 +73,8 @@ rule somatic_purity_ploidy_estimate_ascat_baf_tumor: unpack(wf.get_input_files("ascat", "baf_tumor")), output: **wf.get_output_files("ascat", "baf_tumor"), + params: + args=wf.get_params("ascat", "baf_tumor"), threads: wf.get_resource("ascat", "baf_tumor", "threads") resources: time=wf.get_resource("ascat", "baf_tumor", "time"), @@ -90,6 +92,8 @@ rule somatic_purity_ploidy_estimate_ascat_baf_normal: unpack(wf.get_input_files("ascat", "baf_normal")), output: **wf.get_output_files("ascat", "baf_normal"), + params: + args=wf.get_params("ascat", "baf_normal"), threads: wf.get_resource("ascat", "baf_normal", "threads") resources: time=wf.get_resource("ascat", "baf_normal", "time"), @@ -110,6 +114,8 @@ rule somatic_purity_ploidy_estimate_ascat_cnv_tumor: unpack(wf.get_input_files("ascat", "cnv_tumor")), output: **wf.get_output_files("ascat", "cnv_tumor"), + params: + args=wf.get_params("ascat", "cnv_tumor"), threads: wf.get_resource("ascat", "cnv_tumor", "threads") resources: time=wf.get_resource("ascat", "cnv_tumor", "time"), @@ -127,6 +133,8 @@ rule somatic_purity_ploidy_estimate_ascat_cnv_normal: unpack(wf.get_input_files("ascat", "cnv_normal")), output: **wf.get_output_files("ascat", "cnv_normal"), + params: + args=wf.get_params("ascat", "cnv_normal"), threads: wf.get_resource("ascat", "cnv_normal", "threads") resources: time=wf.get_resource("ascat", "cnv_normal", "time"), @@ -147,6 +155,8 @@ rule somatic_purity_ploidy_estimate_ascat_cnv_tumor_wes: unpack(wf.get_input_files("ascat", "cnv_tumor_wes")), output: **wf.get_output_files("ascat", "cnv_tumor"), + params: + args=wf.get_params("ascat", "cnv_tumor"), threads: wf.get_resource("ascat", "cnv_tumor", "threads") resources: time=wf.get_resource("ascat", "cnv_tumor", "time"), @@ -164,6 +174,8 @@ rule somatic_purity_ploidy_estimate_ascat_cnv_normal_wes: unpack(wf.get_input_files("ascat", "cnv_normal_wes")), output: **wf.get_output_files("ascat", "cnv_normal"), + params: + args=wf.get_params("ascat", "cnv_normal"), threads: wf.get_resource("ascat", "cnv_normal", "threads") resources: time=wf.get_resource("ascat", "cnv_normal", "time"), diff --git a/snappy_pipeline/workflows/somatic_purity_ploidy_estimate/__init__.py b/snappy_pipeline/workflows/somatic_purity_ploidy_estimate/__init__.py index 1c1b09ce4..a813cfcec 100644 --- a/snappy_pipeline/workflows/somatic_purity_ploidy_estimate/__init__.py +++ b/snappy_pipeline/workflows/somatic_purity_ploidy_estimate/__init__.py @@ -233,6 +233,18 @@ def _get_output_files_run_ascat(self): ) % infix yield infix, path + def get_args(self, action): + def args_function(_wildcards): + if action in {"cnv_tumor", "cnv_normal", "baf_tumor", "baf_normal"}: + return { + "b_af_loci": self.config.ascat.b_af_loci, + "reference_path": self.w_config.static_data_config.reference.path, + } + else: + pass + + return args_function + def get_log_file(self, action): """Return path to log file""" # TODO: implement log option for actions `cnv_tumor_wes` and `cnv_normal_wes`. diff --git a/snappy_pipeline/workflows/somatic_targeted_seq_cnv_calling/Snakefile b/snappy_pipeline/workflows/somatic_targeted_seq_cnv_calling/Snakefile index 569ecd7df..bcfa9db82 100644 --- a/snappy_pipeline/workflows/somatic_targeted_seq_cnv_calling/Snakefile +++ b/snappy_pipeline/workflows/somatic_targeted_seq_cnv_calling/Snakefile @@ -130,7 +130,7 @@ rule somatic_targeted_seq_cnv_calling_cnvetti_on_target_coverage: partition=wf.get_resource("cnvetti_on_target", "coverage", "partition"), tmpdir=wf.get_resource("cnvetti_on_target", "coverage", "tmpdir"), params: - method_name="cnvetti_on_target", + **{"args": wf.get_args("cnvetti_on_target", "coverage")}, log: **wf.get_log_file("cnvetti_on_target", "coverage"), wrapper: @@ -190,7 +190,7 @@ rule somatic_targeted_seq_cnv_calling_cnvetti_off_target_coverage: partition=wf.get_resource("cnvetti_off_target", "coverage", "partition"), tmpdir=wf.get_resource("cnvetti_off_target", "coverage", "tmpdir"), params: - method_name="cnvetti_off_target", + **{"args": wf.get_args("cnvetti_off_target", "coverage")}, log: **wf.get_log_file("cnvetti_off_target", "coverage"), wrapper: @@ -261,6 +261,8 @@ rule somatic_targeted_seq_cnv_calling_sequenza_install: rule somatic_targeted_seq_cnv_calling_sequenza_gcreference: output: **wf.get_output_files("sequenza", "gcreference"), + params: + **{"args": wf.get_args("sequenza", "gcreference")}, threads: wf.get_resource("sequenza", "gcreference", "threads") resources: time=wf.get_resource("sequenza", "gcreference", "time"), @@ -279,7 +281,7 @@ rule somatic_targeted_seq_cnv_calling_sequenza_coverage: output: **wf.get_output_files("sequenza", "coverage"), params: - sample_id=wf.get_params("sequenza", "coverage"), + **{"args": wf.get_args("sequenza", "coverage")}, threads: wf.get_resource("sequenza", "coverage", "threads") resources: time=wf.get_resource("sequenza", "coverage", "time"), @@ -297,6 +299,8 @@ rule somatic_targeted_seq_cnv_calling_sequenza_run: unpack(wf.get_input_files("sequenza", "run")), output: **wf.get_output_files("sequenza", "run"), + params: + **{"args": wf.get_args("sequenza", "run")}, threads: wf.get_resource("sequenza", "run", "threads") resources: time=wf.get_resource("sequenza", "run", "time"), diff --git a/snappy_pipeline/workflows/somatic_targeted_seq_cnv_calling/__init__.py b/snappy_pipeline/workflows/somatic_targeted_seq_cnv_calling/__init__.py index 4b394f243..a48e6076a 100644 --- a/snappy_pipeline/workflows/somatic_targeted_seq_cnv_calling/__init__.py +++ b/snappy_pipeline/workflows/somatic_targeted_seq_cnv_calling/__init__.py @@ -70,9 +70,10 @@ import sys from collections import OrderedDict from itertools import chain +from typing import Any from biomedsheets.shortcuts import CancerCaseSheet, CancerCaseSheetOptions, is_not_background -from snakemake.io import expand +from snakemake.io import expand, Wildcards from snappy_pipeline.base import UnsupportedActionException from snappy_pipeline.utils import dictify, listify @@ -270,6 +271,16 @@ def _get_output_files_postprocess(self): os.path.join("work", name_pattern, "out", name_pattern + "_" + infix + ext), ) + def get_args(self, action: str) -> dict[str, Any]: + """Return wrapper parameters for the given action""" + # Validate action + self._validate_action(action) + return { + "reference": self.parent.w_config.static_data_config.reference.path, + "path_target_region": getattr(self.config, "path_target_regions"), + "method": self.name, + } + def _get_log_file(self, action): """Return path to log file for the given action""" # Validate action @@ -301,6 +312,11 @@ class CnvettiOffTargetStepPart(CnvettiStepPartBase): #: Step name name = "cnvetti_off_target" + def get_args(self, action: str) -> dict[str, Any]: + params = super().get_args(action) + params["window_length"] = self.config.cnvetti_off_target.window_length + return params + class CnvettiOnTargetStepPart(CnvettiStepPartBase): """Perform somatic targeted CNV calling using CNVetti with on-target reads.""" @@ -440,10 +456,41 @@ def get_output_files(self, action): def get_params(self, action): self._validate_action(action) - return self._get_params_report + return getattr(self, f"_get_params_{action}") + + def _get_params_coverage(self, wildcards: Wildcards) -> dict[str, Any]: + return { + "reference": self.parent.w_config.static_data_config.reference.path, + "length": self.config.sequenza.length, + "ignore_chroms": self.config.sequenza.ignore_chroms, + "extra_arguments": self.config.sequenza.extra_args, + } - def _get_params_report(self, wildcards): - return wildcards["library_name"] + def _get_params_gcreference(self, wildcards: Wildcards) -> dict[str, Any]: + return { + "reference": self.parent.w_config.static_data_config.reference.path, + "length": self.config.sequenza.length, + } + + def _get_params_report(self, wildcards: Wildcards) -> dict[str, Any]: + return { + "reference": self.parent.w_config.static_data_config.reference.path, + "assembly": self.config.sequenza.assembly, + "ignore_chroms": self.config.sequenza.ignore_chroms, + "extra_args_extract": self.config.sequenza.extra_args_extract.model_dump(by_alias=True), + "extra_args_fit": self.config.sequenza.extra_args_fit.model_dump(by_alias=True), + "library_name": wildcards.library_name, + } + + def _get_params_run(self, wildcards: Wildcards) -> dict[str, Any]: + return { + "reference": self.parent.w_config.static_data_config.reference.path, + "assembly": self.config.sequenza.assembly, + "ignore_chroms": self.config.sequenza.ignore_chroms, + "extra_args_extract": self.config.sequenza.extra_args_extract.model_dump(by_alias=True), + "extra_args_fit": self.config.sequenza.extra_args_fit.model_dump(by_alias=True), + "library_name": wildcards.library_name, + } def get_log_file(self, action): """Return dict of log files.""" diff --git a/snappy_pipeline/workflows/somatic_variant_annotation/__init__.py b/snappy_pipeline/workflows/somatic_variant_annotation/__init__.py index 53ba6514d..fe60e8d58 100644 --- a/snappy_pipeline/workflows/somatic_variant_annotation/__init__.py +++ b/snappy_pipeline/workflows/somatic_variant_annotation/__init__.py @@ -188,6 +188,8 @@ def params_function(wildcards): return { "tumor_library": wildcards.tumor_library, "normal_library": self.get_normal_lib_name(wildcards), + "config": getattr(self.config, self.name).model_dump(by_alias=True), + "reference": self.parent.w_config.static_data_config.reference.path, } else: return {} diff --git a/snappy_pipeline/workflows/somatic_variant_calling/Snakefile b/snappy_pipeline/workflows/somatic_variant_calling/Snakefile index 2f782163b..1a4ef2655 100644 --- a/snappy_pipeline/workflows/somatic_variant_calling/Snakefile +++ b/snappy_pipeline/workflows/somatic_variant_calling/Snakefile @@ -184,6 +184,7 @@ rule somatic_variant_calling_scalpel_run: **wf.get_log_file("scalpel", "run"), params: normal_lib_name=wf.substep_getattr("scalpel", "get_normal_lib_name"), + args=wf.get_args("scalpel", "run"), wrapper: wf.wrapper_path("scalpel/somatic") @@ -196,6 +197,8 @@ rule somatic_variant_calling_strelka2_run: unpack(wf.get_input_files("strelka2", "run")), output: **wf.get_output_files("strelka2", "run"), + params: + args=wf.get_args("strelka2", "run"), threads: wf.get_resource("strelka2", "run", "threads") resources: time=wf.get_resource("strelka2", "run", "time"), diff --git a/snappy_pipeline/workflows/somatic_variant_calling/__init__.py b/snappy_pipeline/workflows/somatic_variant_calling/__init__.py index 19d399da3..fb241ea62 100644 --- a/snappy_pipeline/workflows/somatic_variant_calling/__init__.py +++ b/snappy_pipeline/workflows/somatic_variant_calling/__init__.py @@ -101,6 +101,7 @@ import os import sys from collections import OrderedDict +from typing import Any from biomedsheets.shortcuts import CancerCaseSheet, CancerCaseSheetOptions, is_not_background from snakemake.io import expand @@ -668,6 +669,13 @@ def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: memory=f"{5 * 1024 * 16}M", ) + def get_args(self, action: str) -> dict[str, Any]: + self._validate_action(action) + return { + "reference": self.parent.w_config.static_data_config.reference.path, + "path_target_regions": self.config.scalpel.path_target_regions, + } + class Strelka2StepPart(SomaticVariantCallingStepPart): """Somatic variant calling with strelka2/manta""" @@ -722,6 +730,13 @@ def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: memory="4G", ) + def get_args(self, action: str) -> dict[str, Any]: + self._validate_action(action) + return { + "reference": self.parent.w_config.static_data_config.reference.path, + "path_target_regions": self.config.strelka2.path_target_regions, + } + class JointCallingStepPart(BaseStepPart): """Base class for joint calling.""" @@ -798,6 +813,7 @@ def get_args(self, action): self._validate_action(action) def arg_function(wildcards): + reference_path = self.w_config.static_data_config.reference.path donor = self.donor_by_name[wildcards.donor_name] result = { "sample_list": [ @@ -805,7 +821,8 @@ def arg_function(wildcards): for bio_sample in donor.bio_samples.values() for test_sample in bio_sample.test_samples.values() for ngs_library in test_sample.ngs_libraries.values() - ] + ], + "reference_path": reference_path, } if ignore_chroms := self.parent.config.ignore_chroms: result["ignore_chroms"] = ignore_chroms @@ -841,6 +858,18 @@ def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: memory=f"{mem_mb}M", ) + def get_args(self, action): + def args_fn(_wildcards): + parent_args = super().get_args(action)(_wildcards) + args = { + name: getattr(self.config, name) + for name in ["max_depth", "max_indel_depth", "window_length", "num_threads"] + } + args.update(parent_args) + return args + + return args_fn + class VarscanJointStepPart(JointCallingStepPart): """Somatic variant calling with Varscan in "joint" mode.""" @@ -895,6 +924,17 @@ def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: memory=f"{mem_mb}M", ) + def get_args(self, action): + def args_fn(_wildcards): + parent_args = super().get_args(action)(_wildcards) + args = { + name: getattr(self.config, name) for name in ["num_threads", "split_complex_mnvs"] + } + args.update(parent_args) + return args + + return args_fn + class GatkHcJointStepPart(JointCallingStepPart): """Somatic variant calling with GATK HC in "joint" mode. diff --git a/snappy_pipeline/workflows/somatic_variant_filtration/Snakefile b/snappy_pipeline/workflows/somatic_variant_filtration/Snakefile index f8be70fa1..8f411b328 100644 --- a/snappy_pipeline/workflows/somatic_variant_filtration/Snakefile +++ b/snappy_pipeline/workflows/somatic_variant_filtration/Snakefile @@ -145,6 +145,8 @@ if wf.w_config.step_config["somatic_variant_filtration"].filtration_schema == "s unpack(wf.get_input_files("filter_to_exons", "run")), output: **wf.get_output_files("filter_to_exons", "run"), + params: + args=wf.get_args("filter_to_exons", "run"), threads: wf.get_resource("filter_to_exons", "run", "threads") resources: time=wf.get_resource("filter_to_exons", "run", "time"), @@ -168,7 +170,7 @@ if wf.w_config.step_config["somatic_variant_filtration"].filtration_schema == "l output: **wf.get_output_files("one_dkfz", "run"), params: - **{"args": wf.get_params("one_dkfz", "run")}, + **{"args": wf.get_args("one_dkfz", "run")}, threads: wf.get_resource("one_dkfz", "run", "threads") resources: time=wf.get_resource("one_dkfz", "run", "time"), @@ -186,7 +188,7 @@ if wf.w_config.step_config["somatic_variant_filtration"].filtration_schema == "l output: **wf.get_output_files("one_ebfilter", "run"), params: - **{"args": wf.get_params("one_ebfilter", "run")}, + **{"args": wf.get_args("one_ebfilter", "run")}, threads: wf.get_resource("one_ebfilter", "run", "threads") resources: time=wf.get_resource("one_ebfilter", "run", "time"), @@ -204,7 +206,7 @@ if wf.w_config.step_config["somatic_variant_filtration"].filtration_schema == "l output: **wf.get_output_files("one_bcftools", "run"), params: - **{"args": wf.get_params("one_bcftools", "run")}, + **{"args": wf.get_args("one_bcftools", "run")}, threads: wf.get_resource("one_bcftools", "run", "threads") resources: time=wf.get_resource("one_bcftools", "run", "time"), @@ -222,7 +224,7 @@ if wf.w_config.step_config["somatic_variant_filtration"].filtration_schema == "l output: **wf.get_output_files("one_regions", "run"), params: - **{"args": wf.get_params("one_regions", "run")}, + **{"args": wf.get_args("one_regions", "run")}, threads: wf.get_resource("one_regions", "run", "threads") resources: time=wf.get_resource("one_regions", "run", "time"), @@ -240,7 +242,7 @@ if wf.w_config.step_config["somatic_variant_filtration"].filtration_schema == "l output: **wf.get_output_files("one_protected", "run"), params: - **{"args": wf.get_params("one_protected", "run")}, + **{"args": wf.get_args("one_protected", "run")}, threads: wf.get_resource("one_protected", "run", "threads") resources: time=wf.get_resource("one_protected", "run", "time"), diff --git a/snappy_pipeline/workflows/somatic_variant_filtration/__init__.py b/snappy_pipeline/workflows/somatic_variant_filtration/__init__.py index 992a0b0a5..f86110a0c 100644 --- a/snappy_pipeline/workflows/somatic_variant_filtration/__init__.py +++ b/snappy_pipeline/workflows/somatic_variant_filtration/__init__.py @@ -115,9 +115,10 @@ import random import sys from collections import OrderedDict +from typing import Any from biomedsheets.shortcuts import CancerCaseSheet, CancerCaseSheetOptions, is_not_background -from snakemake.io import expand +from snakemake.io import expand, Wildcards from snappy_pipeline.utils import dictify, listify from snappy_pipeline.workflows.abstract import ( @@ -294,14 +295,17 @@ def get_log_file(self, action): ), ) - def get_params(self, action): + def get_args(self, action): # Validate action self._validate_action(action) - def input_function(wildcards): - return {"filter_name": "{}_{}".format(self.filter_name, wildcards["filter_nb"])} + return self._get_args - return input_function + def _get_args(self, wildcards: Wildcards) -> dict[str, Any]: + filter_nb = int(wildcards["filter_nb"]) + params = dict(self.config.filter_list[filter_nb - 1][self.filter_name]) + params["filter_name"] = "{}_{}".format(self.filter_name, wildcards["filter_nb"]) + return params class OneFilterWithBamStepPart(OneFilterStepPart): @@ -345,6 +349,12 @@ class OneFilterDkfzStepPart(OneFilterWithBamStepPart): filter_name = "dkfz" resource_usage = {"run": ResourceUsage(threads=1, time="12:00:00", memory=f"{3 * 1024}M")} + def _get_args(self, wildcards: Wildcards) -> dict[str, Any]: + """Return dkfz parameters to parameters""" + return super(OneFilterDkfzStepPart, self)._get_args(wildcards) | { + "reference": self.w_config.static_data_config.reference.path + } + class OneFilterEbfilterStepPart(OneFilterWithBamStepPart): name = "one_ebfilter" @@ -375,83 +385,28 @@ def _get_output_files_write_panel(self): ), ) - def get_params(self, action): - """Return add EBFilter parameters to parameters""" - # Validate action - self._validate_action(action) - - @dictify - def input_function(wildcards): - parent = super(OneFilterEbfilterStepPart, self).get_params(action) - parameters = parent(wildcards) - filter_nb = int(wildcards["filter_nb"]) - ebfilter_config = self.config.filter_list[filter_nb - 1][self.filter_name] - parameters.update(ebfilter_config) - parameters["has_annotation"] = self.config.has_annotation - return parameters - - return input_function + def _get_args(self, wildcards: Wildcards) -> dict[str, Any]: + """Return dkfz parameters to parameters""" + return super(OneFilterEbfilterStepPart, self)._get_args(wildcards) | { + "reference": self.w_config.static_data_config.reference.path, + "has_annotation": self.config.has_annotation, + } class OneFilterBcftoolsStepPart(OneFilterStepPart): name = "one_bcftools" filter_name = "bcftools" - def get_params(self, action): - # Validate action - self._validate_action(action) - - def input_function(wildcards): - parent = super(OneFilterBcftoolsStepPart, self).get_params(action) - parameters = parent(wildcards) - filter_nb = int(wildcards["filter_nb"]) - filter = self.config.filter_list[filter_nb - 1][self.filter_name] - keywords = filter.keywords() - parameters.update(keywords) - return parameters - - return input_function - class OneFilterRegionsStepPart(OneFilterStepPart): name = "one_regions" filter_name = "regions" - def get_params(self, action): - # Validate action - self._validate_action(action) - - def input_function(wildcards): - parent = super(OneFilterRegionsStepPart, self).get_params(action) - parameters = parent(wildcards) - filter_nb = int(wildcards["filter_nb"]) - filter = self.config.filter_list[filter_nb - 1][self.filter_name] - keywords = filter.keywords() - parameters.update(keywords) - return parameters - - return input_function - class OneFilterProtectedStepPart(OneFilterStepPart): name = "one_protected" filter_name = "protected" - def get_params(self, action): - # Validate action - self._validate_action(action) - - def input_function(wildcards): - parent = super(OneFilterProtectedStepPart, self).get_params(action) - parameters = parent(wildcards) - filter_nb = int(wildcards["filter_nb"]) - filter = self.config.filter_list[filter_nb - 1][self.filter_name] - keywords = filter.keywords() - parameters.update(keywords) - return parameters - - return input_function - class LastFilterStepPart(SomaticVariantFiltrationStepPart): """Mark last filter as final output""" @@ -608,6 +563,10 @@ def _get_log_file(self, action): for key, ext in key_ext: yield key, prefix + ext + def get_params(self, action): + self._validate_action(action) + return {"reference": self.w_config.static_data_config.reference.path} + def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: """Get Resource Usage @@ -681,9 +640,12 @@ def get_params(self, action): """Return EBFilter parameters from the config""" # Validate action self._validate_action(action) - parameters = self.config.eb_filter - parameters.update(self.config.filter_sets.dkfz_and_ebfilter) + parameters = dict(self.config.eb_filter) + for _, cfg in self.config.filter_sets: + if cfg is not None: + parameters.update(dict(cfg)) parameters["has_annotation"] = self.config.has_annotation + parameters["reference"] = self.w_config.static_data_config.reference.path return parameters @dictify @@ -826,6 +788,7 @@ def args_function(wildcards): result = { "normal_sample": self.get_normal_lib_name(wildcards), "tumor_sample": wildcards.tumor_library, + "config": self.config.filter_sets.model_dump(by_alias=True), } return result @@ -923,6 +886,11 @@ def get_output_files(self, action): for key, ext in zip(EXT_NAMES, EXT_VALUES): yield key, self.base_path_out.replace("{ext}", ext) + def get_args(self, action: str) -> dict[str, Any]: + # Validate action + self._validate_action(action) + return {"exon_lists": self.config.exon_list.model_dump(by_alias=True)} + def get_log_file(self, action): # Validate action self._validate_action(action) diff --git a/snappy_pipeline/workflows/somatic_variant_filtration/model.py b/snappy_pipeline/workflows/somatic_variant_filtration/model.py index 0f48ab35d..f6817da8e 100644 --- a/snappy_pipeline/workflows/somatic_variant_filtration/model.py +++ b/snappy_pipeline/workflows/somatic_variant_filtration/model.py @@ -12,12 +12,13 @@ class DkfzAndEbfilter(SnappyModel): class DkfzAndEbfilterAndOxog(SnappyModel): vaf_threshold: float = 0.08 - coverage_threshold: float = 5 + coverage_threshold: int = 5 + ebfilter_threshold: float = 2.4 class DkfzAndOxog(SnappyModel): vaf_threshold: float = 0.08 - coverage_threshold: float = 5 + coverage_threshold: int = 5 class FilterSets(SnappyModel): diff --git a/snappy_pipeline/workflows/somatic_wgs_cnv_calling/Snakefile b/snappy_pipeline/workflows/somatic_wgs_cnv_calling/Snakefile index 2be8d1f1f..ada998418 100644 --- a/snappy_pipeline/workflows/somatic_wgs_cnv_calling/Snakefile +++ b/snappy_pipeline/workflows/somatic_wgs_cnv_calling/Snakefile @@ -61,6 +61,8 @@ rule somatic_wgs_cnv_calling_canvas_run: unpack(wf.get_input_files("canvas", "run")), output: **wf.get_output_files("canvas", "run"), + params: + args=wf.get_params("canvas", "run"), threads: wf.get_resource("canvas", "run", "threads") resources: time=wf.get_resource("canvas", "run", "time"), @@ -84,6 +86,8 @@ rule somatic_wgs_cnv_calling_cnvetti_coverage: unpack(wf.get_input_files("cnvetti", "coverage")), output: **wf.get_output_files("cnvetti", "coverage"), + params: + **{"args": wf.get_args("cnvetti", "coverage")}, threads: wf.get_resource("cnvetti", "coverage", "threads") resources: time=wf.get_resource("cnvetti", "coverage", "time"), @@ -121,6 +125,8 @@ rule somatic_wgs_cnv_calling_cnvetti_segment: unpack(wf.get_input_files("cnvetti", "segment")), output: **wf.get_output_files("cnvetti", "segment"), + params: + **{"args": wf.get_args("cnvetti", "segment")}, threads: wf.get_resource("cnvetti", "segment", "threads") resources: time=wf.get_resource("cnvetti", "segment", "time"), @@ -141,6 +147,8 @@ rule somatic_wgs_cnv_calling_control_freec_run: unpack(wf.get_input_files("control_freec", "run")), output: **wf.get_output_files("control_freec", "run"), + params: + **{"args": wf.get_args("control_freec", "run")}, threads: wf.get_resource("control_freec", "run", "threads") resources: time=wf.get_resource("control_freec", "run", "time"), @@ -161,6 +169,8 @@ rule somatic_wgs_cnv_calling_control_freec_transform_output: **(wf.get_output_files("control_freec", "run")), output: **wf.get_output_files("control_freec", "transform"), + params: + **{"args": wf.get_args("control_freec", "transform")}, threads: wf.get_resource("control_freec", "transform", "threads") resources: time=wf.get_resource("control_freec", "transform", "time"), diff --git a/snappy_pipeline/workflows/somatic_wgs_cnv_calling/__init__.py b/snappy_pipeline/workflows/somatic_wgs_cnv_calling/__init__.py index 28e843401..cdf51f868 100644 --- a/snappy_pipeline/workflows/somatic_wgs_cnv_calling/__init__.py +++ b/snappy_pipeline/workflows/somatic_wgs_cnv_calling/__init__.py @@ -77,6 +77,7 @@ import sys from collections import OrderedDict from itertools import chain +from typing import Any from biomedsheets.shortcuts import CancerCaseSheet, CancerCaseSheetOptions, is_not_background from snakemake.io import expand @@ -225,6 +226,14 @@ def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: memory=f"{int(3.75 * 1024 * 16)}M", ) + def get_params(self, action): + self._validate_action(action) + + def args_fn(_wildcards): + return dict(self.config.canvas.path_reference) + + return args_fn + class CnvettiSomaticWgsStepPart(SomaticWgsCnvCallingStepPart): """Somatic WGS CNV calling with CNVetti""" @@ -243,6 +252,12 @@ class CnvettiSomaticWgsStepPart(SomaticWgsCnvCallingStepPart): "bcf_csi_md5": ".bcf.csi.md5", } + #: Parameters to pass to the wrapper + params_for_action = { + "coverage": ("window_length", "count_kind", "normalization"), + "segment": ("segmentation",), + } + def get_input_files(self, action): """Return input function for CNVetti rule""" # Validate action @@ -328,6 +343,29 @@ def _get_output_files_segment(self): ), ) + def get_args(self, action: str) -> dict[str, Any]: + """Return args (params) that CNVetti creates for the given action""" + # Validate action + self._validate_action(action) + + params = {} + if action in self.params_for_action: + cfg = getattr(self.config, self.name) + assert cfg.preset in cfg.presets, f"Undefined preset '{cfg.preset}'" + for k in self.params_for_action[action]: + v = getattr(cfg, k, None) + if v is None: + assert k in cfg.presets[cfg.preset], ( + f"Missing parameter '{k}' from preset '{cfg.preset}'" + ) + v = cfg.presets[cfg.preset].get(k) + params[k] = v + + if action == "coverage": + params["reference"] = self.parent.w_config.static_data_config.reference.path + + return getattr(self, "_get_args_{}".format(action)) + @dictify def get_log_file(self, action): """Return path to log file""" @@ -687,6 +725,26 @@ def get_output_files(self, action): return result + def get_args(self, action: str) -> dict[str, Any]: + # Validate action + self._validate_action(action) + cfg = self.config.control_freec + if action == "run": + return { + "path_chrlenfile": cfg.path_chrlenfile, + "path_mappability": cfg.path_mappability, + "path_mappability_enabled": cfg.path_mappability_enabled, + "window_size": cfg.window_size, + } + elif action == "transform": + return { + "org_obj": cfg.convert.org_obj, + "tx_obj": cfg.convert.tx_obj, + "bs_obj": cfg.convert.bs_obj, + } + elif action == "plot": + return {} + def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: """Get Resource Usage diff --git a/snappy_pipeline/workflows/somatic_wgs_sv_calling/Snakefile b/snappy_pipeline/workflows/somatic_wgs_sv_calling/Snakefile index e0517e098..9dc7aaf28 100644 --- a/snappy_pipeline/workflows/somatic_wgs_sv_calling/Snakefile +++ b/snappy_pipeline/workflows/somatic_wgs_sv_calling/Snakefile @@ -59,6 +59,8 @@ rule somatic_wgs_sv_calling_manta_run: unpack(wf.get_input_files("manta", "run")), output: **wf.get_output_files("manta", "run"), + params: + **{"args": wf.get_args("manta", "run")}, threads: wf.get_resource("manta", "run", "threads") resources: time=wf.get_resource("manta", "run", "time"), diff --git a/snappy_pipeline/workflows/somatic_wgs_sv_calling/__init__.py b/snappy_pipeline/workflows/somatic_wgs_sv_calling/__init__.py index b7c75cc30..2792387ed 100644 --- a/snappy_pipeline/workflows/somatic_wgs_sv_calling/__init__.py +++ b/snappy_pipeline/workflows/somatic_wgs_sv_calling/__init__.py @@ -76,6 +76,7 @@ import os import sys from collections import OrderedDict +from typing import Any from biomedsheets.shortcuts import CancerCaseSheet, CancerCaseSheetOptions, is_not_background from snakemake.io import expand @@ -205,6 +206,10 @@ def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: memory=f"{int(3.75 * 1024 * 16)}M", ) + def get_args(self, action: str) -> dict[str, Any]: + self._validate_action(action) + return {"reference": self.parent.w_config.static_data_config.reference.path} + class Delly2StepPart(BaseStepPart): """Somatic WGS SV identification using Delly2""" diff --git a/snappy_pipeline/workflows/sv_calling_targeted/Snakefile b/snappy_pipeline/workflows/sv_calling_targeted/Snakefile index 179e134e7..c9f20fd22 100644 --- a/snappy_pipeline/workflows/sv_calling_targeted/Snakefile +++ b/snappy_pipeline/workflows/sv_calling_targeted/Snakefile @@ -54,6 +54,8 @@ rule sv_calling_targeted_gcnv_preprocess_intervals: unpack(wf.get_input_files("gcnv", "preprocess_intervals")), output: **wf.get_output_files("gcnv", "preprocess_intervals"), + params: + args=wf.get_params("gcnv", "preprocess_intervals"), threads: wf.get_resource("gcnv", "preprocess_intervals", "threads") resources: time=wf.get_resource("gcnv", "preprocess_intervals", "time"), @@ -71,6 +73,8 @@ rule sv_calling_targeted_gcnv_coverage: unpack(wf.get_input_files("gcnv", "coverage")), output: **wf.get_output_files("gcnv", "coverage"), + params: + args=wf.get_params("gcnv", "coverage"), threads: wf.get_resource("gcnv", "coverage", "threads") resources: time=wf.get_resource("gcnv", "coverage", "time"), @@ -146,6 +150,8 @@ rule sv_calling_targeted_gcnv_joint_germline_cnv_segmentation: unpack(wf.get_input_files("gcnv", "joint_germline_cnv_segmentation")), output: **wf.get_output_files("gcnv", "joint_germline_cnv_segmentation"), + params: + args=wf.get_params("gcnv", "coverage"), threads: wf.get_resource("gcnv", "joint_germline_cnv_segmentation", "threads") resources: time=wf.get_resource("gcnv", "joint_germline_cnv_segmentation", "time"), @@ -183,6 +189,8 @@ rule sv_calling_targeted_manta_run: unpack(wf.get_input_files("manta", "run")), output: **wf.get_output_files("manta", "run"), + params: + **{"args": wf.get_args("manta", "run")}, threads: wf.get_resource("manta", "run", "threads") resources: time=wf.get_resource("manta", "run", "time"), @@ -299,6 +307,7 @@ rule sv_calling_targeted_melt_preprocess: **wf.get_log_file("melt", "preprocess"), params: step_key="sv_calling_targeted", + args=wf.get_args("melt", "preprocess"), wrapper: wf.wrapper_path("melt/preprocess") @@ -321,6 +330,7 @@ rule sv_calling_targeted_melt_indiv_analysis: **wf.get_log_file("melt", "indiv_analysis"), params: step_key="sv_calling_targeted", + args=wf.get_args("melt", "indiv_analysis"), wrapper: wf.wrapper_path("melt/indiv_analysis") @@ -343,6 +353,7 @@ rule sv_calling_targeted_melt_group_analysis: **wf.get_log_file("melt", "group_analysis"), params: step_key="sv_calling_targeted", + args=wf.get_args("melt", "group_analysis"), wrapper: wf.wrapper_path("melt/group_analysis") @@ -365,6 +376,7 @@ rule sv_calling_targeted_melt_genotype: **wf.get_log_file("melt", "genotype"), params: step_key="sv_calling_targeted", + args=wf.get_args("melt", "genotype"), wrapper: wf.wrapper_path("melt/genotype") @@ -386,6 +398,7 @@ rule sv_calling_targeted_melt_make_vcf: **wf.get_log_file("melt", "make_vcf"), params: step_key="sv_calling_targeted", + args=wf.get_args("melt", "make_vcf"), wrapper: wf.wrapper_path("melt/make_vcf") diff --git a/snappy_pipeline/workflows/sv_calling_wgs/Snakefile b/snappy_pipeline/workflows/sv_calling_wgs/Snakefile index a0a70e04a..5641ebf21 100644 --- a/snappy_pipeline/workflows/sv_calling_wgs/Snakefile +++ b/snappy_pipeline/workflows/sv_calling_wgs/Snakefile @@ -65,6 +65,8 @@ rule sv_calling_wgs_manta_run: unpack(wf.get_input_files("manta", "run")), output: **wf.get_output_files("manta", "run"), + params: + **{"args": wf.get_args("manta", "run")}, threads: wf.get_resource("manta", "run", "threads") resources: time=wf.get_resource("manta", "run", "time"), @@ -92,7 +94,7 @@ rule sv_calling_wgs_delly2_call: partition=wf.get_resource("delly2", "call", "partition"), tmpdir=wf.get_resource("delly2", "call", "tmpdir"), params: - step_key="sv_calling_wgs", + **wf.get_args("delly2", "call"), log: **wf.get_log_file("delly2", "call"), wrapper: @@ -111,7 +113,7 @@ rule sv_calling_wgs_delly2_merge_calls: partition=wf.get_resource("delly2", "merge_calls", "partition"), tmpdir=wf.get_resource("delly2", "merge_calls", "tmpdir"), params: - step_key="sv_calling_wgs", + **wf.get_args("delly2", "merge_calls"), log: **wf.get_log_file("delly2", "merge_calls"), wrapper: @@ -130,7 +132,7 @@ rule sv_calling_wgs_delly2_genotype: partition=wf.get_resource("delly2", "genotype", "partition"), tmpdir=wf.get_resource("delly2", "genotype", "tmpdir"), params: - step_key="sv_calling_wgs", + **wf.get_args("delly2", "genotype"), log: **wf.get_log_file("delly2", "genotype"), wrapper: @@ -149,7 +151,7 @@ rule sv_calling_wgs_delly2_merge_genotypes: partition=wf.get_resource("delly2", "merge_genotypes", "partition"), tmpdir=wf.get_resource("delly2", "merge_genotypes", "tmpdir"), params: - step_key="sv_calling_wgs", + **wf.get_args("delly2", "merge_genotypes"), log: **wf.get_log_file("delly2", "merge_genotypes"), wrapper: @@ -224,6 +226,8 @@ rule sv_calling_wgs_popdel_profile: wildcard_constraints: mapper=RE_BETWEEN_DOTS, index_ngs_library=RE_BETWEEN_DOTS, + params: + **{"args": wf.get_args("popdel", "profile")}, threads: wf.get_resource("popdel", "profile", "threads") resources: time=wf.get_resource("popdel", "profile", "time"), @@ -320,6 +324,7 @@ rule sv_calling_wgs_melt_preprocess: **wf.get_log_file("melt", "preprocess"), params: step_key="sv_calling_wgs", + args=wf.get_args("melt", "preprocess"), wrapper: wf.wrapper_path("melt/preprocess") @@ -342,6 +347,7 @@ rule sv_calling_wgs_melt_indiv_analysis: **wf.get_log_file("melt", "indiv_analysis"), params: step_key="sv_calling_wgs", + args=wf.get_args("melt", "indiv_analysis"), wrapper: wf.wrapper_path("melt/indiv_analysis") @@ -364,6 +370,7 @@ rule sv_calling_wgs_melt_group_analysis: **wf.get_log_file("melt", "group_analysis"), params: step_key="sv_calling_wgs", + args=wf.get_args("melt", "group_analysis"), wrapper: wf.wrapper_path("melt/group_analysis") @@ -386,6 +393,7 @@ rule sv_calling_wgs_melt_genotype: **wf.get_log_file("melt", "genotype"), params: step_key="sv_calling_wgs", + args=wf.get_args("melt", "genotype"), wrapper: wf.wrapper_path("melt/genotype") @@ -407,6 +415,7 @@ rule sv_calling_wgs_melt_make_vcf: **wf.get_log_file("melt", "make_vcf"), params: step_key="sv_calling_wgs", + args=wf.get_args("melt", "make_vcf"), wrapper: wf.wrapper_path("melt/make_vcf") @@ -440,6 +449,8 @@ rule sv_calling_wgs_gcnv_preprocess_intervals: unpack(wf.get_input_files("gcnv", "preprocess_intervals")), output: **wf.get_output_files("gcnv", "preprocess_intervals"), + params: + args=wf.get_params("gcnv", "preprocess_intervals"), threads: wf.get_resource("gcnv", "preprocess_intervals", "threads") resources: time=wf.get_resource("gcnv", "preprocess_intervals", "time"), @@ -540,6 +551,8 @@ rule sv_calling_wgs_gcnv_joint_germline_cnv_segmentation: tmpdir=wf.get_resource("gcnv", "joint_germline_cnv_segmentation", "tmpdir"), log: **wf.get_log_file("gcnv", "joint_germline_cnv_segmentation"), + params: + args=wf.get_params("gcnv", "joint_germline_cnv_segmentation"), wrapper: wf.wrapper_path("gcnv/joint_germline_cnv_segmentation") diff --git a/snappy_pipeline/workflows/sv_calling_wgs/__init__.py b/snappy_pipeline/workflows/sv_calling_wgs/__init__.py index 1f9c68fc4..2830a72e9 100644 --- a/snappy_pipeline/workflows/sv_calling_wgs/__init__.py +++ b/snappy_pipeline/workflows/sv_calling_wgs/__init__.py @@ -2,6 +2,7 @@ import re from itertools import chain +from typing import Any from biomedsheets.shortcuts import GermlineCaseSheet, is_not_background @@ -211,6 +212,13 @@ def get_ped_members(self, wildcards): donor.dna_ngs_library.name for donor in pedigree.donors if donor.dna_ngs_library ) + def get_args(self, action: str) -> dict[str, Any]: + self._validate_action(action) + if action == "profile": + return {"reference": self.parent.w_config.static_data_config.reference.path} + else: + return {} + class Sniffles2StepPart(BaseStepPart): """WGS SV identification using Sniffles 2""" diff --git a/snappy_pipeline/workflows/tumor_mutational_burden/__init__.py b/snappy_pipeline/workflows/tumor_mutational_burden/__init__.py index 8a7e21903..41d88f651 100644 --- a/snappy_pipeline/workflows/tumor_mutational_burden/__init__.py +++ b/snappy_pipeline/workflows/tumor_mutational_burden/__init__.py @@ -142,8 +142,12 @@ def get_params(self, action): self._validate_action(action) return getattr(self, "_get_params_run") - def _get_params_run(self, wildcards): - return {"missense_re": self.w_config.step_config["tumor_mutational_burden"].missense_regex} + def _get_params_run(self, _wildcards): + return { + "missense_re": self.config.missense_regex, + "target_regions": self.config.target_regions, + "has_annotation": self.config.has_annotation, + } class TumorMutationalBurdenCalculationWorkflow(BaseStep): diff --git a/snappy_pipeline/workflows/varfish_export/__init__.py b/snappy_pipeline/workflows/varfish_export/__init__.py index 96717aede..175bc9a18 100644 --- a/snappy_pipeline/workflows/varfish_export/__init__.py +++ b/snappy_pipeline/workflows/varfish_export/__init__.py @@ -261,14 +261,11 @@ def _get_output_files_annotate_seqvars(self): ) def _get_params_annotate_seqvars(self, wildcards: Wildcards) -> typing.Dict[str, typing.Any]: - pedigree = self.index_ngs_library_to_pedigree[wildcards.index_ngs_library] - for donor in pedigree.donors: - if ( - donor.dna_ngs_library - and donor.dna_ngs_library.extra_infos.get("libraryType") == "WGS" - ): - return {"step_name": "varfish_export"} - return {"step_name": "varfish_export"} + return { + "path_exon_bed": self.config.path_exon_bed, + "reference": self.parent.w_config.static_data_config.reference.path, + "path_mehari_db": self.config.path_mehari_db, + } @dictify def _get_input_files_annotate_strucvars(self, wildcards): diff --git a/snappy_pipeline/workflows/variant_annotation/Snakefile b/snappy_pipeline/workflows/variant_annotation/Snakefile index ea7b6387f..014a76372 100644 --- a/snappy_pipeline/workflows/variant_annotation/Snakefile +++ b/snappy_pipeline/workflows/variant_annotation/Snakefile @@ -42,6 +42,8 @@ rule variant_annotation_vep_run: **wf.get_input_files("vep", "run"), output: **wf.get_output_files("vep", "run"), + params: + args=wf.get_args("vep", "run"), threads: wf.get_resource("vep", "run", "threads") resources: time=wf.get_resource("vep", "run", "time"), diff --git a/snappy_pipeline/workflows/variant_annotation/__init__.py b/snappy_pipeline/workflows/variant_annotation/__init__.py index 76b9372f5..fde9eec99 100644 --- a/snappy_pipeline/workflows/variant_annotation/__init__.py +++ b/snappy_pipeline/workflows/variant_annotation/__init__.py @@ -55,6 +55,7 @@ import re from itertools import chain +from typing import Any from biomedsheets.shortcuts import GermlineCaseSheet @@ -122,6 +123,13 @@ def get_output_files(self, action): ], ) + def get_args(self, action: str) -> dict[str, Any]: + self._validate_action(action) + return { + "reference": self.parent.w_config.static_data_config.reference.path, + "config": self.config.model_dump(by_alias=True), + } + def get_extra_kv_pairs(self): return {"var_caller": self.parent.w_config.step_config["variant_calling"].tools} diff --git a/snappy_pipeline/workflows/variant_calling/Snakefile b/snappy_pipeline/workflows/variant_calling/Snakefile index 0b8b5552b..dfd7cbef5 100644 --- a/snappy_pipeline/workflows/variant_calling/Snakefile +++ b/snappy_pipeline/workflows/variant_calling/Snakefile @@ -59,6 +59,8 @@ rule variant_calling_bcftools_call_run: unpack(wf.get_input_files("bcftools_call", "run")), output: **wf.get_output_files("bcftools_call", "run"), + params: + args=wf.get_params("bcftools_call", "run"), threads: wf.get_resource("bcftools_call", "run", "threads") resources: time=wf.get_resource("bcftools_call", "run", "time"), @@ -79,6 +81,8 @@ rule variant_calling_gatk3_hc_run: unpack(wf.get_input_files("gatk3_hc", "run")), output: **wf.get_output_files("gatk3_hc", "run"), + params: + **{"args": wf.get_args("gatk3_hc", "run")}, threads: wf.get_resource("gatk3_hc", "run", "threads") resources: time=wf.get_resource("gatk3_hc", "run", "time"), @@ -99,6 +103,8 @@ rule variant_calling_gatk3_ug_run: unpack(wf.get_input_files("gatk3_ug", "run")), output: **wf.get_output_files("gatk3_ug", "run"), + params: + **{"args": wf.get_args("gatk3_hc", "run")}, threads: wf.get_resource("gatk3_ug", "run", "threads") resources: time=wf.get_resource("gatk3_ug", "run", "time"), @@ -147,8 +153,7 @@ rule variant_calling_gatk4_hc_gvcf_discover: partition=wf.get_resource("gatk4_hc_gvcf", "discover", "partition"), tmpdir=wf.get_resource("gatk4_hc_gvcf", "discover", "tmpdir"), params: - step_key="variant_calling", - caller_key="gatk4_hc_gvcf", + **{"args": wf.get_args("gatk4_hc_gvcf", "combine_gvcfs")}, log: **wf.get_log_file("gatk4_hc_gvcf", "discover"), wrapper: @@ -168,8 +173,7 @@ rule variant_calling_gatk4_hc_gvcf_combine_gvcfs: partition=wf.get_resource("gatk4_hc_gvcf", "combine_gvcfs", "partition"), tmpdir=wf.get_resource("gatk4_hc_gvcf", "combine_gvcfs", "tmpdir"), params: - step_key="variant_calling", - caller_key="gatk4_hc_gvcf", + **{"args": wf.get_args("gatk4_hc_gvcf", "combine_gvcfs")}, log: **wf.get_log_file("gatk4_hc_gvcf", "combine_gvcfs"), wrapper: @@ -189,8 +193,7 @@ rule variant_calling_gatk4_hc_gvcf_genotype: partition=wf.get_resource("gatk4_hc_gvcf", "genotype", "partition"), tmpdir=wf.get_resource("gatk4_hc_gvcf", "genotype", "tmpdir"), params: - step_key="variant_calling", - caller_key="gatk4_hc_gvcf", + **{"args": wf.get_args("gatk4_hc_gvcf", "combine_gvcfs")}, log: **wf.get_log_file("gatk4_hc_gvcf", "genotype"), wrapper: @@ -247,6 +250,8 @@ rule variant_calling_baf_file_generation: **wf.get_input_files("baf_file_generation", "run"), output: **wf.get_output_files("baf_file_generation", "run"), + params: + args=wf.get_params("baf_file_generation", "run"), threads: wf.get_resource("baf_file_generation", "run", "threads") resources: time=wf.get_resource("baf_file_generation", "run", "time"), @@ -267,6 +272,8 @@ rule variant_calling_bcftools_roh: **wf.get_input_files("bcftools_roh", "run"), output: **wf.get_output_files("bcftools_roh", "run"), + params: + args=wf.get_params("bcftools_roh", "run"), threads: wf.get_resource("bcftools_roh", "run", "threads") resources: time=wf.get_resource("bcftools_roh", "run", "time"), diff --git a/snappy_pipeline/workflows/variant_calling/__init__.py b/snappy_pipeline/workflows/variant_calling/__init__.py index 6e3c9e8bf..d6a31ddea 100644 --- a/snappy_pipeline/workflows/variant_calling/__init__.py +++ b/snappy_pipeline/workflows/variant_calling/__init__.py @@ -252,6 +252,7 @@ import warnings from collections import OrderedDict from itertools import chain +from typing import Any from biomedsheets.shortcuts import GermlineCaseSheet, Pedigree, is_not_background from snakemake.io import Wildcards, expand @@ -477,6 +478,31 @@ def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: memory=f"{int(3.75 * 1024 * 16)}M", ) + def get_args(self, action: str): + self._validate_action(action) + + def args_fn(_wildcards): + reference_path = self.w_config.static_data_config.reference.path + if "GRCh37" in reference_path or "hg19" in reference_path: + assembly = "GRCh37" + elif "GRCh38" in reference_path or "hg38" in reference_path: + assembly = "GRCh38" + else: + assembly = "unknown" + + return { + "reference_path": reference_path, + "reference_index_path": reference_path + ".fai", + "assembly": assembly, + "ignore_chroms": self.config.ignore_chroms, + "gatk4_hc_joint_window_length": self.config.gatk4_hc_joint.window_length, + "gatk4_hc_joint_num_threads": self.config.gatk4_hc_joint.num_threads, + "max_depth": self.config.bcftools_call.max_depth, + "max_indel_depth": self.config.bcftools_call.max_indel_depth, + } + + return args_fn + class GatkCallerStepPartBase(VariantCallingStepPart): """Base class for GATK v3/v4 variant callers""" @@ -507,6 +533,17 @@ class Gatk3HaplotypeCallerStepPart(GatkCallerStepPartBase): #: Step name name = "gatk3_hc" + def get_args(self, action: str) -> dict[str, Any]: + self._validate_action(action) + return { + "reference": self.parent.w_config.static_data_config.reference.path, + "dbsnp": self.parent.w_config.static_data_config.dbsnp.path, + "num_threads": self.config.gatk3_hc.num_threads, + "window_length": self.config.gatk3_hc.window_length, + "allow_seq_dict_incompatibility": self.config.gatk3_hc.allow_seq_dict_incompatibility, + "ignore_chroms": self.config.ignore_chroms, + } + class Gatk3UnifiedGenotyperStepPart(GatkCallerStepPartBase): """Germline variant calling with GATK v3 UnifiedGenotyper""" @@ -514,12 +551,35 @@ class Gatk3UnifiedGenotyperStepPart(GatkCallerStepPartBase): #: Step name name = "gatk3_ug" + def get_args(self, action: str) -> dict[str, Any]: + self._validate_action(action) + return { + "reference": self.parent.w_config.static_data_config.reference.path, + "dbsnp": self.parent.w_config.static_data_config.dbsnp.path, + "num_threads": self.config.gatk3_uc.num_threads, + "window_length": self.config.gatk4_hc_joint.window_length, + "allow_seq_dict_incompatibility": self.config.gatk3_uc.allow_seq_dict_incompatibility, + "downsample_to_coverage": self.config.gatk3_uc.downsample_to_coverage, + "ignore_chroms": self.config.ignore_chroms, + } + class Gatk4HaplotypeCallerJointStepPart(GatkCallerStepPartBase): """Germline variant calling with GATK 4 HaplotypeCaller doing joint calling per pedigree""" name = "gatk4_hc_joint" + def get_args(self, action: str) -> dict[str, Any]: + self._validate_action(action) + return { + "reference": self.parent.w_config.static_data_config.reference.path, + "dbsnp": self.parent.w_config.static_data_config.dbsnp.path, + "window_length": self.config.gatk4_hc_joint.window_length, + "num_threads": self.config.gatk4_hc_joint.num_threads, + "allow_seq_dict_incompatibility": self.config.gatk4_hc_joint.allow_seq_dict_incompatibility, + "ignore_chroms": self.config.ignore_chroms, + } + class Gatk4HaplotypeCallerGvcfStepPart(GatkCallerStepPartBase): """Germline variant calling with GATK 4 HaplotypeCaller and gVCF workflow""" @@ -605,6 +665,19 @@ def _get_output_files_genotype(self) -> SnakemakeDictItemsGenerator: ], ) + def get_args(self, action: str) -> dict[str, Any]: + self._validate_action(action) + return { + "step_key": "variant_calling", + "caller_key": "gatk4_hc_gvcf", + "reference": self.parent.w_config.static_data_config.reference.path, + "dbsnp": self.parent.w_config.static_data_config.dbsnp.path, + "window_length": self.config.gatk4_hc_gvcf.window_length, + "num_threads": self.config.gatk4_hc_gvcf.num_threads, + "allow_seq_dict_incompatibility": self.config.gatk4_hc_gvcf.allow_seq_dict_incompatibility, + "ignore_chroms": self.config.ignore_chroms, + } + class ReportGetLogFileMixin: """Log file generation for reports""" @@ -728,6 +801,23 @@ def get_output_files(self, action: str) -> SnakemakeDict: self._validate_action(action) return getattr(self, f"_get_output_files_{action}")() + def get_params(self, action: str): + self._validate_action(action) + + def args_fn(_wildcards): + return { + name: self.config.bcftools_roh.get(name) + for name in [ + "path_targets", + "path_af_file", + "ignore_homref", + "skip_indels", + "rec_rate", + ] + } + + return args_fn + @dictify def _get_output_files_run(self) -> SnakemakeDictItemsGenerator: ext_names = {"txt": ".txt", "txt_md5": ".txt.md5"} @@ -870,6 +960,15 @@ def get_output_files(self, action: str) -> SnakemakeDictItemsGenerator: ], ) + def get_args(self, action: str): + def args_function(_wildcards): + return { + "min_dp": self.config.baf_file_generation.min_dp, + "reference_index_path": self.w_config.static_data_config.reference.path + ".fai", + } + + return args_function + def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: self._validate_action(action) return ResourceUsage( diff --git a/snappy_pipeline/workflows/variant_denovo_filtration/Snakefile b/snappy_pipeline/workflows/variant_denovo_filtration/Snakefile index 838c219ed..ea14f24a2 100644 --- a/snappy_pipeline/workflows/variant_denovo_filtration/Snakefile +++ b/snappy_pipeline/workflows/variant_denovo_filtration/Snakefile @@ -73,6 +73,8 @@ rule variant_denovo_filtration_filter_denovo_run: unpack(wf.get_input_files("filter_denovo", "run")), output: **wf.get_output_files("filter_denovo", "run"), + params: + args=wg.get_args("filter_denovo", "run"), threads: wf.get_resource("filter_denovo", "run", "threads") resources: time=wf.get_resource("filter_denovo", "run", "time"), diff --git a/snappy_pipeline/workflows/variant_denovo_filtration/__init__.py b/snappy_pipeline/workflows/variant_denovo_filtration/__init__.py index 6b23ed1d6..1610fdfa7 100644 --- a/snappy_pipeline/workflows/variant_denovo_filtration/__init__.py +++ b/snappy_pipeline/workflows/variant_denovo_filtration/__init__.py @@ -289,6 +289,7 @@ def args_function(wildcards): return { "father": donor.father.dna_ngs_library.name, "mother": donor.mother.dna_ngs_library.name, + "bad_regions_expression": self.config.bad_regions_expression, } return args_function diff --git a/snappy_pipeline/workflows/variant_export_external/__init__.py b/snappy_pipeline/workflows/variant_export_external/__init__.py index a3ef9768d..31c0abf23 100644 --- a/snappy_pipeline/workflows/variant_export_external/__init__.py +++ b/snappy_pipeline/workflows/variant_export_external/__init__.py @@ -433,6 +433,7 @@ def _get_params_gvcf_to_vcf(self, wildcards): result = { "input": list(sorted(self._collect_gvcf(wildcards))), "sample_names": list(sorted(self._collect_sample_ids(wildcards))), + "reference_path": self.w_config.static_data_config.reference.path, } return result @@ -441,18 +442,12 @@ def _get_params_merge_vcf(self, wildcards): "input": list(sorted(self._collect_vcfs(wildcards))), "sample_names": list(sorted(self._collect_sample_ids(wildcards))), "merge_option": self.config.merge_option, + "reference_path": self.w_config.static_data_config.reference.path, } return result def _get_params_annotate(self, wildcards): - pedigree = self.index_ngs_library_to_pedigree[wildcards.index_ngs_library] - for donor in pedigree.donors: - if ( - donor.dna_ngs_library - and donor.dna_ngs_library.extra_infos.get("libraryType") == "WGS" - ): - return {"step_name": "variant_export_external"} - return {"step_name": "variant_export_external"} + return {"config": self.config.model_dump(by_alias=True)} def _get_params_bam_qc(self, wildcards): """Get parameters for wrapper ``variant_annotator/bam_qc`` diff --git a/snappy_pipeline/workflows/variant_filtration/Snakefile b/snappy_pipeline/workflows/variant_filtration/Snakefile index e4b70f622..9cdaef1ef 100644 --- a/snappy_pipeline/workflows/variant_filtration/Snakefile +++ b/snappy_pipeline/workflows/variant_filtration/Snakefile @@ -71,6 +71,8 @@ rule variant_filtration_filter_quality_run: unpack(wf.get_input_files("filter_quality", "run")), output: **wf.get_output_files("filter_quality", "run"), + params: + args=wf.get_args("filter_quality", "run"), threads: wf.get_resource("filter_quality", "run", "threads") resources: time=wf.get_resource("filter_quality", "run", "time"), @@ -91,6 +93,8 @@ rule variant_filtration_filter_inheritance_run: unpack(wf.get_input_files("filter_inheritance", "run")), output: **wf.get_output_files("filter_inheritance", "run"), + params: + args=wf.get_args("filter_inheritance", "run"), threads: wf.get_resource("filter_inheritance", "run", "threads") resources: time=wf.get_resource("filter_inheritance", "run", "time"), @@ -111,6 +115,8 @@ rule variant_filtration_filter_frequency: unpack(wf.get_input_files("filter_frequency", "run")), output: **wf.get_output_files("filter_frequency", "run"), + params: + args=wf.get_args("filter_frequency", "run"), threads: wf.get_resource("filter_frequency", "run", "threads") resources: time=wf.get_resource("filter_frequency", "run", "time"), @@ -131,6 +137,8 @@ rule variant_filtration_filter_regions: unpack(wf.get_input_files("filter_regions", "run")), output: **wf.get_output_files("filter_regions", "run"), + params: + args=wf.get_args("filter_regions", "run"), threads: wf.get_resource("filter_regions", "run", "threads") resources: time=wf.get_resource("filter_regions", "run", "time"), @@ -151,6 +159,8 @@ rule variant_filtration_filter_scores: unpack(wf.get_input_files("filter_scores", "run")), output: **wf.get_output_files("filter_scores", "run"), + params: + args=wf.get_args("filter_scores", "run"), threads: wf.get_resource("filter_scores", "run", "threads") resources: time=wf.get_resource("filter_scores", "run", "time"), @@ -171,6 +181,8 @@ rule variant_filtration_filter_het_comp: unpack(wf.get_input_files("filter_het_comp", "run")), output: **wf.get_output_files("filter_het_comp", "run"), + params: + args=wf.get_args("filter_het_comp", "run"), threads: wf.get_resource("filter_het_comp", "run", "threads") resources: time=wf.get_resource("filter_het_comp", "run", "time"), diff --git a/snappy_pipeline/workflows/variant_filtration/__init__.py b/snappy_pipeline/workflows/variant_filtration/__init__.py index 03ce041be..1387cac1f 100644 --- a/snappy_pipeline/workflows/variant_filtration/__init__.py +++ b/snappy_pipeline/workflows/variant_filtration/__init__.py @@ -95,9 +95,10 @@ import os import os.path import sys +from typing import Any from biomedsheets.shortcuts import GermlineCaseSheet, is_not_background -from snakemake.io import expand +from snakemake.io import expand, Wildcards from snappy_pipeline.utils import dictify, listify from snappy_pipeline.workflows.abstract import ( @@ -137,6 +138,12 @@ class FiltersVariantsStepPartBase(BaseStepPart): #: Class available actions actions = ("run",) + #: Wildcard pattern name (must be set by derived class) + filter_mode = None + + #: Model attribute name (must be set by derived class) + filter_config = None + def __init__(self, parent): super().__init__(parent) assert self.name_pattern is not None, "Set into class..." @@ -176,6 +183,26 @@ def get_log_file(self, action): self._validate_action(action) return self.path_log + def get_args(self, action): + # Validate action + self._validate_action(action) + + def args_fn(wildcards: Wildcards) -> dict[str, Any]: + assert self.filter_mode is not None, ( + f"'filter_mode' must be defined for sub-step '{self.name}" + ) + params = { + "index_library": wildcards.index_library, + "filter_mode": getattr(wildcards, self.filter_mode), + } + if self.filter_config: + params["filter_config"] = getattr(self.config, self.filter_config).model_dump( + by_alias=True + ) + return params + + return args_fn + class FilterQualityStepPart(InputFilesStepPartMixin, FiltersVariantsStepPartBase): """Apply the configured filters.""" @@ -198,6 +225,12 @@ class FilterQualityStepPart(InputFilesStepPartMixin, FiltersVariantsStepPartBase #: Output file extensions ext_values = EXT_VALUES + #: Wildcards name for filter_quality + filter_mode = "thresholds" + + #: Model name for filter_quality + filter_config = "thresholds" + def get_input_files(self, action): # Validate action self._validate_action(action) @@ -245,6 +278,9 @@ class FilterInheritanceStepPart(InputFilesStepPartMixin, FiltersVariantsStepPart #: Output file extensions ext_values = EXT_VALUES + #: Wildcards name for filter_inheritance + filter_mode = "inheritance" + class FilterFrequencyStepPart(InputFilesStepPartMixin, FiltersVariantsStepPartBase): """Apply the configured filters.""" @@ -270,6 +306,12 @@ class FilterFrequencyStepPart(InputFilesStepPartMixin, FiltersVariantsStepPartBa #: Output file extensions ext_values = EXT_VALUES + #: Wildcards name for filter_frequency + filter_mode = "frequency" + + #: Model name for filter_frequency + filter_config = "frequencies" + class FilterRegionsStepPart(InputFilesStepPartMixin, FiltersVariantsStepPartBase): """Apply the configured filters.""" @@ -295,6 +337,12 @@ class FilterRegionsStepPart(InputFilesStepPartMixin, FiltersVariantsStepPartBase #: Output file extensions ext_values = EXT_VALUES + #: Wildcards name for filter_regions + filter_mode = "regions" + + #: Model name for filter_regions + filter_config = "region_beds" + class FilterScoresStepPart(InputFilesStepPartMixin, FiltersVariantsStepPartBase): """Apply the configured filters.""" @@ -321,6 +369,12 @@ class FilterScoresStepPart(InputFilesStepPartMixin, FiltersVariantsStepPartBase) #: Output file extensions ext_values = EXT_VALUES + #: Wildcards name for filter_scores + filter_mode = "scores" + + #: Model name for filter_scores + filter_config = "score_thresholds" + class FilterHetCompStepPart(InputFilesStepPartMixin, FiltersVariantsStepPartBase): """Apply the configured filters.""" @@ -347,6 +401,12 @@ class FilterHetCompStepPart(InputFilesStepPartMixin, FiltersVariantsStepPartBase #: Output file extensions ext_values = EXT_VALUES + #: Wildcards name for filter_het_comp + filter_mode = "het_comp" + + #: Model name for filter_scores + filter_config = "region_beds" + class VariantFiltrationWorkflow(BaseStep): """Perform germline variant annotation""" diff --git a/snappy_pipeline/workflows/variant_phasing/Snakefile b/snappy_pipeline/workflows/variant_phasing/Snakefile index 77e52602d..2248f65c0 100644 --- a/snappy_pipeline/workflows/variant_phasing/Snakefile +++ b/snappy_pipeline/workflows/variant_phasing/Snakefile @@ -71,6 +71,8 @@ rule variant_phasing_gatk_phase_by_transmission_run: unpack(wf.get_input_files("gatk_phase_by_transmission", "run")), output: **wf.get_output_files("gatk_phase_by_transmission", "run"), + params: + **{"args": wf.get_args("gatk_phase_by_transmission", "run")}, threads: wf.get_resource("gatk_phase_by_transmission", "run", "threads") resources: time=wf.get_resource("gatk_phase_by_transmission", "run", "time"), diff --git a/snappy_pipeline/workflows/variant_phasing/__init__.py b/snappy_pipeline/workflows/variant_phasing/__init__.py index a0d78e7fe..74c895b32 100644 --- a/snappy_pipeline/workflows/variant_phasing/__init__.py +++ b/snappy_pipeline/workflows/variant_phasing/__init__.py @@ -62,6 +62,7 @@ import os from collections import OrderedDict +from typing import Any from biomedsheets.shortcuts import GermlineCaseSheet, is_not_background from snakemake.io import expand @@ -226,6 +227,14 @@ def input_function(wildcards): assert action == "run", "Unsupported actions" return input_function + def get_args(self, action: str) -> dict[str, Any]: + # Validate action + self._validate_action(action) + return { + "reference": self.parent.w_config.static_data_config.reference.path, + "de_novo_prior": self.config.gatk_phase_by_transmission.de_novo_prior, + } + def get_resource_usage(self, action: str, **kwargs) -> ResourceUsage: """Get Resource Usage diff --git a/snappy_pipeline/workflows/wgs_cnv_export_external/__init__.py b/snappy_pipeline/workflows/wgs_cnv_export_external/__init__.py index 8da9450f9..82d6a1618 100644 --- a/snappy_pipeline/workflows/wgs_cnv_export_external/__init__.py +++ b/snappy_pipeline/workflows/wgs_cnv_export_external/__init__.py @@ -260,6 +260,8 @@ def _get_params_annotate(self, wildcards): return { "step_name": "wgs_cnv_export_external", "varfish_server_compatibility": varfish_server_compatibility_flag, + "reference": self.parent.w_config.static_data_config.reference.path, + "config": self.config.model_dump(by_alias=True), } def _collect_vcfs(self, wildcards): diff --git a/snappy_pipeline/workflows/wgs_sv_export_external/__init__.py b/snappy_pipeline/workflows/wgs_sv_export_external/__init__.py index e0221936d..b067fd42b 100644 --- a/snappy_pipeline/workflows/wgs_sv_export_external/__init__.py +++ b/snappy_pipeline/workflows/wgs_sv_export_external/__init__.py @@ -259,6 +259,8 @@ def _get_params_annotate(self, wildcards): return { "step_name": "wgs_sv_export_external", "varfish_server_compatibility": self.config.varfish_server_compatibility, + "reference": self.parent.w_config.static_data_config.reference.path, + "config": self.config.model_dump(by_alias=True), } def _collect_vcfs(self, wildcards): diff --git a/snappy_wrappers/wrappers/alfred/qc/wrapper.py b/snappy_wrappers/wrappers/alfred/qc/wrapper.py index c5cc2e0d8..a1a4a692a 100644 --- a/snappy_wrappers/wrappers/alfred/qc/wrapper.py +++ b/snappy_wrappers/wrappers/alfred/qc/wrapper.py @@ -1,4 +1,9 @@ -from snakemake import shell +from typing import TYPE_CHECKING + +from snakemake.shell import shell + +if TYPE_CHECKING: + from snakemake.script import snakemake __author__ = "Manuel Holtgrewe " @@ -15,6 +20,10 @@ } """ +reference_path = snakemake.params["args"]["path_reference"] +genome_path = snakemake.params["args"]["path_reference_genome"] +targets_bed = snakemake.params["args"]["path_targets_bed"] + shell( r""" set -x @@ -53,20 +62,20 @@ # Run actual tools -------------------------------------------------------------------------------- # Get sorted targets BED file. -zcat --force {snakemake.params.args[path_targets_bed]} \ +zcat --force {targets_bed} \ | awk -F $'\t' 'BEGIN {{ OFS = FS; }} ($2 < $3) {{ print; }}' \ > $TMPDIR/targets.tmp.bed bedtools sort \ -i $TMPDIR/targets.tmp.bed \ - -faidx {snakemake.config[static_data_config][reference][path]}.genome \ + -faidx {genome_path} \ | uniq \ > $TMPDIR/targets.bed # Run "alfred qc". alfred qc \ --ignore \ - --reference {snakemake.config[static_data_config][reference][path]} \ + --reference {reference_path} \ --bed $TMPDIR/targets.bed \ --jsonout {snakemake.output.json} \ --input-file {snakemake.input.bam} diff --git a/snappy_wrappers/wrappers/alfred/qc_external/environment.yaml b/snappy_wrappers/wrappers/alfred/qc_external/environment.yaml deleted file mode 120000 index a47a13af8..000000000 --- a/snappy_wrappers/wrappers/alfred/qc_external/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../qc/environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/alfred/qc_external/wrapper.py b/snappy_wrappers/wrappers/alfred/qc_external/wrapper.py deleted file mode 100644 index 533b9ed80..000000000 --- a/snappy_wrappers/wrappers/alfred/qc_external/wrapper.py +++ /dev/null @@ -1,104 +0,0 @@ -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -DEF_HELPER_FUNCS = r""" -compute-md5() -{ - if [[ $# -ne 2 ]]; then - >&2 echo "Invalid number of arguments: $#" - exit 1 - fi - md5sum $1 \ - | awk '{ gsub(/.*\//, "", $2); print; }' \ - > $2 -} -""" - -shell( - r""" -set -x - -# Write files for reproducibility ----------------------------------------------------------------- - -{DEF_HELPER_FUNCS} - -# Write out information about conda and save a copy of the wrapper with picked variables -# as well as the environment.yaml file. -conda list >{snakemake.log.conda_list} -conda info >{snakemake.log.conda_info} -compute-md5 {snakemake.log.conda_list} {snakemake.log.conda_list_md5} -compute-md5 {snakemake.log.conda_info} {snakemake.log.conda_info_md5} -cp {__real_file__} {snakemake.log.wrapper} -compute-md5 {snakemake.log.wrapper} {snakemake.log.wrapper_md5} -cp $(dirname {__file__})/environment.yaml {snakemake.log.env_yaml} -compute-md5 {snakemake.log.env_yaml} {snakemake.log.env_yaml_md5} - -# Also pipe stderr to log file -------------------------------------------------------------------- - -if [[ -n "{snakemake.log.log}" ]]; then - if [[ "$(set +e; tty; set -e)" != "" ]]; then - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - exec 2> >(tee -a "{snakemake.log.log}" >&2) - else - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - echo "No tty, logging disabled" >"{snakemake.log.log}" - fi -fi - -# Create auto-cleaned temporary directory -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -# Validate input -if [[ "{snakemake.params.args[bam_count]}" -eq 0 ]]; then - echo "No BAM files provided!" - exit 1 -elif [[ "{snakemake.params.args[bam_count]}" -gt 1 ]]; then - echo "Multiple BAM files provided!" - echo "{snakemake.params.args[bam]}" - exit 1 -fi - -# Run actual tools -------------------------------------------------------------------------------- - -# Get sorted targets BED file. -zcat --force {snakemake.params.args[path_targets_bed]} \ -| awk -F $'\t' 'BEGIN {{ OFS = FS; }} ($2 < $3) {{ print; }}' \ -> $TMPDIR/targets.tmp.bed - -bedtools sort \ - -i $TMPDIR/targets.tmp.bed \ - -faidx {snakemake.config[static_data_config][reference][path]}.genome \ -| uniq \ -> $TMPDIR/targets.bed - -# Run "alfred qc". -alfred qc \ - --reference {snakemake.config[static_data_config][reference][path]} \ - --bed $TMPDIR/targets.bed \ - --jsonout {snakemake.output.json} \ - --input-file {snakemake.params.args[bam]} - -# Compute MD5 sums on output files -compute-md5 {snakemake.output.json} {snakemake.output.json_md5} - -# Create output links ----------------------------------------------------------------------------- - -for path in {snakemake.output.output_links}; do - dst=$path - src=work/${{dst#output/}} - ln -sr $src $dst -done -""" -) - -# Compute MD5 sums of logs. -shell( - r""" -{DEF_HELPER_FUNCS} - -sleep 1s # try to wait for log file flush -compute-md5 {snakemake.log.log} {snakemake.log.log_md5} -""" -) diff --git a/snappy_wrappers/wrappers/arriba/run/wrapper.py b/snappy_wrappers/wrappers/arriba/run/wrapper.py index 2a154d473..2251cd489 100644 --- a/snappy_wrappers/wrappers/arriba/run/wrapper.py +++ b/snappy_wrappers/wrappers/arriba/run/wrapper.py @@ -1,7 +1,12 @@ # -*- coding: utf-8 -*- """CUBI+Snakemake wrapper code for arriba: Snakemake wrapper.py""" -from snakemake import shell +from typing import TYPE_CHECKING + +from snakemake.shell import shell + +if TYPE_CHECKING: + from snakemake.script import snakemake __author__ = "Eric Blanc " @@ -9,14 +14,39 @@ # Input fastqs are passed through snakemake.params. # snakemake.input is a .done file touched after linking files in. -reads_left = snakemake.params.args["input"]["reads_left"] +args = snakemake.params["args"] +reads_left = args["input"]["reads_left"] reads_right = ( - snakemake.params.args["input"]["reads_right"] - if snakemake.params.args["input"]["reads_right"] + args["input"]["reads_right"] + if args["input"]["reads_right"] else "" ) -this_file = __file__ +trim_adapters = args["trim_adapters"] +num_threads_trimming = args["num_threads_trimming"] +trim_cmd = "trimadap-mt -p {num_threads_trimming}" if trim_adapters else "zcat" + +num_threads = args["num_threads"] +arriba_index = args["path_index"] +star_parameters = args["star_parameters"] + +reference_path = args["reference_path"] +features_path = args["features_path"] + +blacklist = args["blacklist"] +blacklist_param = f"-b {blacklist}" if blacklist else "" + +known_fusions = args["known_fusions"] +known_fusions_param = f"-k {known_fusions}" if known_fusions else "" + +tags = args["tags"] +tags_param = f"-t {tags}" if tags else "" + +structural_variants = args["structural_variants"] +structural_variants_param = f"-d {structural_variants}" if structural_variants else "" + +protein_domains = args["protein_domains"] +protein_domains_param = f"-p {protein_domains}" if protein_domains else "" shell( r""" @@ -56,40 +86,23 @@ right_files=$(IFS="," ; echo "${{reads_right[*]}}") fi -trim_cmd="" -if [[ "{snakemake.config[step_config][somatic_gene_fusion_calling][arriba][trim_adapters]}" == "True" ]]; then - trim_cmd="\"trimadap-mt -p {snakemake.config[step_config][somatic_gene_fusion_calling][arriba][num_threads_trimming]}\"" -else - trim_cmd="zcat" -fi - STAR \ - --runThreadN {snakemake.config[step_config][somatic_gene_fusion_calling][arriba][num_threads]} \ - --genomeDir {snakemake.config[step_config][somatic_gene_fusion_calling][arriba][path_index]} --genomeLoad NoSharedMemory \ + --runThreadN {num_threads} \ + --genomeDir {arriba_index} --genomeLoad NoSharedMemory \ --readFilesIn ${{left_files}} ${{right_files}} --readFilesCommand ${{trim_cmd}} \ --outStd BAM_Unsorted --outSAMtype BAM Unsorted --outSAMunmapped Within --outBAMcompression 0 \ --outFileNamePrefix $TMPDIR/ \ - {snakemake.config[step_config][somatic_gene_fusion_calling][arriba][star_parameters]} \ + {star_parameters} \ | arriba \ -x /dev/stdin \ -o $TMPDIR/fusions.tsv -O $TMPDIR/fusions.discarded.tsv \ - -a {snakemake.config[static_data_config][reference][path]} \ - -g {snakemake.config[static_data_config][features][path]} \ - $(if [[ -n "{snakemake.config[step_config][somatic_gene_fusion_calling][arriba][blacklist]}" ]]; then \ - echo -b {snakemake.config[step_config][somatic_gene_fusion_calling][arriba][blacklist]} - fi) \ - $(if [[ -n "{snakemake.config[step_config][somatic_gene_fusion_calling][arriba][known_fusions]}" ]]; then \ - echo -k {snakemake.config[step_config][somatic_gene_fusion_calling][arriba][known_fusions]} - fi) \ - $(if [[ -n "{snakemake.config[step_config][somatic_gene_fusion_calling][arriba][tags]}" ]]; then \ - echo -t {snakemake.config[step_config][somatic_gene_fusion_calling][arriba][tags]} - fi) \ - $(if [[ -n "{snakemake.config[step_config][somatic_gene_fusion_calling][arriba][structural_variants]}" ]]; then \ - echo -d {snakemake.config[step_config][somatic_gene_fusion_calling][arriba][structural_variants]} - fi) \ - $(if [[ -n "{snakemake.config[step_config][somatic_gene_fusion_calling][arriba][protein_domains]}" ]]; then \ - echo -p {snakemake.config[step_config][somatic_gene_fusion_calling][arriba][protein_domains]} - fi) + -a {reference_path} \ + -g {features_path} \ + {blacklist_param} \ + {known_fusions_param} \ + {tags_param} \ + {structural_variants_param} \ + {protein_domains_param} cp $TMPDIR/fusions.tsv {snakemake.output.fusions} pushd $(dirname {snakemake.output.fusions}) diff --git a/snappy_wrappers/wrappers/ascat/build_baf/wrapper.py b/snappy_wrappers/wrappers/ascat/build_baf/wrapper.py index 9413e83e4..1404e39a8 100644 --- a/snappy_wrappers/wrappers/ascat/build_baf/wrapper.py +++ b/snappy_wrappers/wrappers/ascat/build_baf/wrapper.py @@ -1,7 +1,12 @@ # -*- coding: utf-8 -*- """Wrapper for building BAF files for ASCAT""" -from snakemake import shell +from typing import TYPE_CHECKING + +from snakemake.shell import shell + +if TYPE_CHECKING: + from snakemake.script import snakemake __author__ = "Manuel Holtgrewe " @@ -14,6 +19,9 @@ ) assert library_name is not None +path_b_af_loci = snakemake.params["args"]["b_af_loci"] +reference_path = snakemake.params["args"]["reference_path"] + shell( r""" set -x @@ -36,12 +44,12 @@ # Perform pileups at the spot positions. # samtools mpileup \ - -l {snakemake.config[step_config][somatic_purity_ploidy_estimate][ascat][b_af_loci]} \ + -l {path_b_af_loci} \ -I \ -u \ -v \ -t AD \ - -f {snakemake.config[static_data_config][reference][path]} \ + -f {reference_path} \ {snakemake.input.bam} \ | bcftools call \ -c \ @@ -59,7 +67,7 @@ echo -e "#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO" \ >> $TMPDIR/spots.vcf -zcat -f {snakemake.config[step_config][somatic_purity_ploidy_estimate][ascat][b_af_loci]} \ +zcat -f {path_b_af_loci} \ | awk \ -F $'\t' \ 'BEGIN {{ OFS=FS; }} diff --git a/snappy_wrappers/wrappers/ascat/build_cnv/wrapper.py b/snappy_wrappers/wrappers/ascat/build_cnv/wrapper.py index 00e436bfd..329a3631a 100644 --- a/snappy_wrappers/wrappers/ascat/build_cnv/wrapper.py +++ b/snappy_wrappers/wrappers/ascat/build_cnv/wrapper.py @@ -1,7 +1,12 @@ # -*- coding: utf-8 -*- """Wrapper for building CNV files for ASCAT""" -from snakemake import shell +from typing import TYPE_CHECKING + +from snakemake.shell import shell + +if TYPE_CHECKING: + from snakemake.script import snakemake __author__ = "Manuel Holtgrewe " @@ -14,6 +19,9 @@ ) assert library_name is not None +path_b_af_loci = snakemake.params["args"]["b_af_loci"] +reference_path = snakemake.params["args"]["reference_path"] + shell( r""" set -x @@ -40,7 +48,7 @@ echo -e "#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO" \ >> $TMPDIR/spots.vcf -zcat -f {snakemake.config[step_config][somatic_purity_ploidy_estimate][ascat][b_af_loci]} \ +zcat -f {path_b_af_loci} \ | awk \ -F $'\t' ' BEGIN {{ OFS=FS; }} @@ -56,7 +64,7 @@ # # TODO: should become a conda package! /fast/groups/cubi/scratch/mholtgr/cnvetti quick wgs-cov-bins \ - --reference {snakemake.config[static_data_config][reference][path]} \ + --reference {reference_path} \ --input {snakemake.input.bam} \ --output $TMPDIR/cov.bcf diff --git a/snappy_wrappers/wrappers/ascat/build_cnv_from_copywriter/wrapper.py b/snappy_wrappers/wrappers/ascat/build_cnv_from_copywriter/wrapper.py index 328b04378..fdce90a48 100644 --- a/snappy_wrappers/wrappers/ascat/build_cnv_from_copywriter/wrapper.py +++ b/snappy_wrappers/wrappers/ascat/build_cnv_from_copywriter/wrapper.py @@ -1,7 +1,12 @@ # -*- coding: utf-8 -*- """Wrapper for building CNV files for ASCAT""" -from snakemake import shell +from typing import TYPE_CHECKING + +from snakemake.shell import shell + +if TYPE_CHECKING: + from snakemake.script import snakemake __author__ = "Clemens Messerschmidt " @@ -20,6 +25,8 @@ else: log2_column = 6 +path_b_af_loci = snakemake.params["args"]["b_af_loci"] + shell( r""" set -x @@ -46,7 +53,7 @@ echo -e "#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO" \ >> $TMPDIR/spots.vcf -zcat -f {snakemake.config[step_config][somatic_purity_ploidy_estimate][ascat][b_af_loci]} \ +zcat -f {path_b_af_loci} \ | awk \ -F $'\t' ' BEGIN {{ OFS=FS; }} diff --git a/snappy_wrappers/wrappers/baf_file_generation/wrapper.py b/snappy_wrappers/wrappers/baf_file_generation/wrapper.py index 5e8989169..b0e0dbd65 100644 --- a/snappy_wrappers/wrappers/baf_file_generation/wrapper.py +++ b/snappy_wrappers/wrappers/baf_file_generation/wrapper.py @@ -1,7 +1,11 @@ from snakemake.shell import shell +from snakemake.script import snakemake __author__ = "Manuel Holtgrewe " +min_dp = snakemake.params["args"]["min_dp"] +reference_index_path = snakemake.params["args"]["reference_index_path"] + shell( r""" set -x @@ -49,7 +53,7 @@ dp = old3; split(old4, a, ","); rd = a[1]; - if (dp >= {snakemake.config[step_config][variant_calling][baf_file_generation][min_dp]}) {{ + if (dp >= {min_dp}) {{ printf("%s\t%f\n", old2, (dp - rd) / dp); }} }} @@ -61,7 +65,7 @@ }}' \ > $TMPDIR/tmp.wig -cut -f 1-2 {snakemake.config[static_data_config][reference][path]}.fai \ +cut -f 1-2 {reference_index_path} \ > $TMPDIR/chrom.sizes wigToBigWig $TMPDIR/tmp.wig $TMPDIR/chrom.sizes {snakemake.output.bw} diff --git a/snappy_wrappers/wrappers/bbduk/run/wrapper.py b/snappy_wrappers/wrappers/bbduk/run/wrapper.py index 7a7764fc7..9fff08876 100644 --- a/snappy_wrappers/wrappers/bbduk/run/wrapper.py +++ b/snappy_wrappers/wrappers/bbduk/run/wrapper.py @@ -1,7 +1,12 @@ # -*- coding: utf-8 -*- """CUBI+Snakemake wrapper code for bbduk: Snakemake wrapper.py""" -from snakemake import shell +from typing import TYPE_CHECKING + +from snakemake.shell import shell + +if TYPE_CHECKING: + from snakemake.script import snakemake __author__ = "Eric Blanc " @@ -39,8 +44,7 @@ this_file = __file__ -this_step = snakemake.config["pipeline_step"]["name"] -config = snakemake.config["step_config"][this_step]["bbduk"] +config = snakemake.params["args"]["config"] shell( r""" @@ -177,7 +181,7 @@ ecco={config[ecco]} \ ktrim={config[ktrim]} \ $(if [[ -n "{config[kmask]}" ]] ; then \ - echo kmask={config[kmask]} + echo kmask={config[kmask]} fi) \ maskfullycovered={config[maskfullycovered]} \ ksplit={config[ksplit]} \ @@ -211,7 +215,7 @@ chastityfilter={config[chastityfilter]} \ barcodefilter={config[barcodefilter]} \ $(if [[ -n "{config[barcodes]}" ]]; then \ - echo barcodes={config[barcodes]} + echo barcodes={config[barcodes]} fi) \ xmin={config[xmin]} \ ymin={config[ymin]} \ @@ -232,7 +236,7 @@ cardinality={config[cardinality]} \ cardinalityout={config[cardinalityout]} \ loglogk={config[loglogk]} \ - loglogbuckets={config[loglogbuckets]} + loglogbuckets={config[loglogbuckets]} fns="$out $outm" if [[ $paired -eq 1 ]]; then diff --git a/snappy_wrappers/wrappers/bcftools/TMB/wrapper.py b/snappy_wrappers/wrappers/bcftools/TMB/wrapper.py index 03a72f857..431a3cea2 100644 --- a/snappy_wrappers/wrappers/bcftools/TMB/wrapper.py +++ b/snappy_wrappers/wrappers/bcftools/TMB/wrapper.py @@ -1,21 +1,21 @@ # -*- coding: utf-8 -*- """Wrapper for calculating tumor mutation burde with bcftools""" +from typing import TYPE_CHECKING + from snakemake.shell import shell +if TYPE_CHECKING: + from snakemake.script import snakemake + __author__ = "Pham Gia Cuong" __email__ = "pham.gia-cuong@bih-charite.de" -step = snakemake.config["pipeline_step"]["name"] -config = snakemake.config["step_config"][step] +args = snakemake.params["args"] +target_regions = args["target_regions"] +has_annotation = args["has_annotation"] -missense_re = ( - snakemake.params.args["missense_re"] - if "args" in snakemake.params.keys() - and "missense_re" in snakemake.params.args.keys() - and config["has_annotation"] - else "" -) +missense_re = args["missense_re"] if has_annotation else "" shell( r""" @@ -32,7 +32,7 @@ conda list > {snakemake.log.conda_list} conda info > {snakemake.log.conda_info} -bed_file={config[target_regions]} +bed_file={target_regions} bed_file_name=$(basename $bed_file) bed_md5=$(md5sum $bed_file | awk '{{print $1}}') @@ -59,7 +59,7 @@ TMB=$(printf "%f" $(echo "1000000*($number_variants/$total_exom_length)" | bc -l)) missense_TMB=$(printf "%f" $(echo "1000000*($number_missense_variants/$total_exom_length)" | bc -l)) -if [[ $(echo "{config[has_annotation]}" | tr '[a-z]' '[A-Z]') = "TRUE" ]] +if [[ $(echo "{has_annotation}" | tr '[a-z]' '[A-Z]') = "TRUE" ]] then cat << EOF > {snakemake.output.json} {{ diff --git a/snappy_wrappers/wrappers/bcftools/bcf_to_vcf/environment.yaml b/snappy_wrappers/wrappers/bcftools/bcf_to_vcf/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/bcftools/bcf_to_vcf/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/bcftools/bcf_to_vcf/wrapper.py b/snappy_wrappers/wrappers/bcftools/bcf_to_vcf/wrapper.py deleted file mode 100644 index 7dacf2fec..000000000 --- a/snappy_wrappers/wrappers/bcftools/bcf_to_vcf/wrapper.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running bcftools convert - BCF to compressed VCF.""" - -from snakemake.shell import shell - -shell( - r""" -# ----------------------------------------------------------------------------- -# Redirect stderr to log file by default and enable printing executed commands -exec 2> >(tee -a "{snakemake.log.log}") -set -x -# ----------------------------------------------------------------------------- - -# Write out information about conda installation -conda list > {snakemake.log.conda_list} -conda info > {snakemake.log.conda_info} - -bcftools convert \ - --output-type z \ - --output {snakemake.output.vcf} \ - {snakemake.input.bcf} -tabix -f {snakemake.output.vcf} - -pushd $(dirname {snakemake.output.vcf}) -md5sum $(basename {snakemake.output.vcf}) > $(basename {snakemake.output.vcf_md5}) -md5sum $(basename {snakemake.output.vcf_tbi}) > $(basename {snakemake.output.vcf_tbi_md5}) -""" -) - -# Compute MD5 sums of logs -shell( - r""" -md5sum {snakemake.log.log} > {snakemake.log.log_md5} -md5sum {snakemake.log.conda_list} > {snakemake.log.conda_list_md5} -md5sum {snakemake.log.conda_info} > {snakemake.log.conda_info_md5} -""" -) diff --git a/snappy_wrappers/wrappers/bcftools/call_joint/wrapper.py b/snappy_wrappers/wrappers/bcftools/call_joint/wrapper.py index 65ed3c912..7fc79d54a 100644 --- a/snappy_wrappers/wrappers/bcftools/call_joint/wrapper.py +++ b/snappy_wrappers/wrappers/bcftools/call_joint/wrapper.py @@ -6,13 +6,21 @@ """ from snakemake.shell import shell +from snakemake.script import snakemake __author__ = "Manuel Holtgrewe" __email__ = "manuel.holtgrewe@bih-charite.de" +args = snakemake.params["args"] args_ignore_chroms = "" -if snakemake.params.args["ignore_chroms"]: - args_ignore_chroms = " ".join(["--ignore-chroms"] + snakemake.params.args["ignore_chroms"]) +if ignore_chroms := args.get("ignore_chroms"): + args_ignore_chroms = " ".join(["--ignore-chroms"] + ignore_chroms) + +reference_path = args["reference_path"] +max_depth = args["max_depth"] +max_indel_depth = args["max_indel_depth"] +window_length = args["window_length"] +num_threads = args["num_threads"] shell( r""" @@ -23,15 +31,15 @@ # ----------------------------------------------------------------------------- -export REF={snakemake.config[static_data_config][reference][path]} +export REF={reference_path} bcftools_joint() {{ samtools mpileup \ --BCF \ --fasta-ref $REF \ - --max-depth {snakemake.config[step_config][somatic_variant_calling][bcftools_joint][max_depth]} \ - --max-idepth {snakemake.config[step_config][somatic_variant_calling][bcftools_joint][max_indel_depth]} \ + --max-depth {max_depth} \ + --max-idepth {max_indel_depth} \ --output-tags DP,AD,ADF,ADR,SP,INFO/AD,INFO/ADF,INFO/ADR \ --per-sample-mF \ --redo-BAQ \ @@ -53,13 +61,13 @@ snappy-genome_windows \ --fai-file $REF.fai \ - --window-size {snakemake.config[step_config][somatic_variant_calling][bcftools_joint][window_length]} \ + --window-size {window_length} \ {args_ignore_chroms} \ | parallel \ --plain \ --keep-order \ --verbose \ - --max-procs {snakemake.config[step_config][somatic_variant_calling][bcftools_joint][num_threads]} \ + --max-procs {num_threads} \ bcftools_joint \ | snappy-vcf_first_header \ | bcftools norm \ diff --git a/snappy_wrappers/wrappers/bcftools/filter/wrapper.py b/snappy_wrappers/wrappers/bcftools/filter/wrapper.py index b14b0b5da..0fad336e8 100644 --- a/snappy_wrappers/wrappers/bcftools/filter/wrapper.py +++ b/snappy_wrappers/wrappers/bcftools/filter/wrapper.py @@ -1,14 +1,15 @@ # -*- coding: utf-8 -*- -"""Wrapper for running bcftools mpileup""" +"""Wrapper for running bcftools filter""" from snakemake.shell import shell +from snakemake.script import snakemake -params = dict(snakemake.params)["args"] -filter_name = params["filter_name"] +args = snakemake.params["args"] +filter_name = args["filter_name"] expression = ( - '--include "{}"'.format(params["include"]) - if "include" in params - else '--exclude "{}"'.format(params["exclude"]) + '--include "{}"'.format(args["include"]) + if "include" in args + else '--exclude "{}"'.format(args["exclude"]) ) # Actually run the script. diff --git a/snappy_wrappers/wrappers/bcftools/gvcf_to_vcf/wrapper.py b/snappy_wrappers/wrappers/bcftools/gvcf_to_vcf/wrapper.py index 782632fc5..75a7562e1 100644 --- a/snappy_wrappers/wrappers/bcftools/gvcf_to_vcf/wrapper.py +++ b/snappy_wrappers/wrappers/bcftools/gvcf_to_vcf/wrapper.py @@ -2,6 +2,10 @@ """Wrapper for running bcftools convert - gVCF to VCF.""" from snakemake.shell import shell +from snakemake.script import snakemake + +args = snakemake.params["args"] +reference_path = args["reference_path"] shell( r""" @@ -34,7 +38,7 @@ # Convert gVCF to VCF, filter at least one allele bcftools convert --gvcf2vcf \ --output-type u \ - --fasta-ref {snakemake.config[static_data_config][reference][path]} \ + --fasta-ref {reference_path} \ {snakemake.params.args[input]} \ | bcftools view --no-update --min-ac 1 \ --output-type z \ diff --git a/snappy_wrappers/wrappers/bcftools/heterozygous_variants/wrapper.py b/snappy_wrappers/wrappers/bcftools/heterozygous_variants/wrapper.py index 66fc2935d..643cb7118 100644 --- a/snappy_wrappers/wrappers/bcftools/heterozygous_variants/wrapper.py +++ b/snappy_wrappers/wrappers/bcftools/heterozygous_variants/wrapper.py @@ -1,24 +1,33 @@ # -*- coding: utf-8 -*- """Wrapper for finding heterozygous variants with bcftools""" +from typing import TYPE_CHECKING from snakemake.shell import shell -step = snakemake.config["pipeline_step"]["name"] -config = snakemake.config["step_config"][step] +if TYPE_CHECKING: + from snakemake.script import snakemake -if "args" in snakemake.params and "intervals" in snakemake.params["args"]: - locii = "-r " + snakemake.params["args"]["intervals"] +args = getattr(snakemake.params, "args") + +reference_path = args["reference_path"] + +# FIXME: "locii" only ever gets set as the input, never as a parameter in args +if intervals := args["intervals"]: + locii = "-r " + intervals elif "locii" in snakemake.input.keys(): locii = "-R " + snakemake.input.locii -elif "locii" in config and config["locii"]: - locii = "-R " + config["locii"] +elif locii_arg := args.get("locii"): + locii = "-R " + locii_arg else: locii = "" # Convert minimum B-allele fraction into ratio of alternative to reference alleles -min_ratio = config["min_baf"] / (1 - config["min_baf"]) +min_ratio = args["min_baf"] / (1 - args["min_baf"]) max_ratio = 1 / min_ratio +min_depth = args["min_depth"] +max_depth = args["max_depth"] + shell( r""" # ----------------------------------------------------------------------------- @@ -34,13 +43,13 @@ conda info > {snakemake.log.conda_info} only_one_variant="N_ALT=2 & FORMAT/AD[:2]=0" -min_depth="FORMAT/AD[:0]>{config[min_depth]} & FORMAT/AD[:1]>{config[min_depth]}" +min_depth="FORMAT/AD[:0]>{min_depth} & FORMAT/AD[:1]>{min_depth}" hetero="{min_ratio}*FORMAT/AD[:0]<=FORMAT/AD[:1] & FORMAT/AD[:1]<={max_ratio}*FORMAT/AD[:0]" bcftools mpileup \ {locii} \ - --max-depth {config[max_depth]} \ - -f {snakemake.config[static_data_config][reference][path]} \ + --max-depth {max_depth} \ + -f {reference_path} \ -a "FORMAT/AD" \ {snakemake.input.bam} \ | bcftools filter \ diff --git a/snappy_wrappers/wrappers/bcftools/merge_snv_vcf/wrapper.py b/snappy_wrappers/wrappers/bcftools/merge_snv_vcf/wrapper.py index 06f671ab9..fb9d89ab4 100644 --- a/snappy_wrappers/wrappers/bcftools/merge_snv_vcf/wrapper.py +++ b/snappy_wrappers/wrappers/bcftools/merge_snv_vcf/wrapper.py @@ -2,9 +2,16 @@ """Wrapper for running bcftools merge - VCF files.""" import tempfile +from typing import TYPE_CHECKING from snakemake.shell import shell +if TYPE_CHECKING: + from snakemake.script import snakemake + +args = snakemake.params["args"] +reference_path = args["reference_path"] + with tempfile.NamedTemporaryFile("wt") as tmpf: # Write paths to input files into temporary file. # @@ -78,7 +85,7 @@ --output-type u \ *.vcf.gz \ | bcftools norm \ - --fasta-ref {snakemake.config[static_data_config][reference][path]} \ + --fasta-ref {reference_path} \ --multiallelics -any \ | bgzip -c > $out popd diff --git a/snappy_wrappers/wrappers/bcftools/merge_vcf/wrapper.py b/snappy_wrappers/wrappers/bcftools/merge_vcf/wrapper.py index b0a04490e..86e4fb19b 100644 --- a/snappy_wrappers/wrappers/bcftools/merge_vcf/wrapper.py +++ b/snappy_wrappers/wrappers/bcftools/merge_vcf/wrapper.py @@ -2,14 +2,24 @@ """Wrapper for running bcftools merge - Structural VCF files (CNV, SV).""" import tempfile +from typing import TYPE_CHECKING from snakemake.shell import shell +if TYPE_CHECKING: + from snakemake.script import snakemake + +args = snakemake.params["args"] +merge_option = args["merge_option"] +gvcf_option = args["gvcf_option"] +sample_names = args["sample_names"] +input_ = args["input"] + with tempfile.NamedTemporaryFile("wt") as tmpf: # Write paths to input files into temporary file. # # cf. https://bitbucket.org/snakemake/snakemake/issues/878 - print("\n".join(snakemake.params.args["input"]), file=tmpf) + print("\n".join(input_), file=tmpf) tmpf.flush() # Actually run the script. shell( @@ -57,13 +67,13 @@ # Define merge option merge_option="--merge none" - if [[ "{snakemake.params.args[merge_option]}" != "None" ]]; then - merge_option="--merge {snakemake.params.args[merge_option]}" + if [[ "{merge_option}" != "None" ]]; then + merge_option="--merge {merge_option}" fi # Set merge gVCF option gvcf_option="" - if [[ "{snakemake.params.args[gvcf_option]}" != "False" ]]; then + if [[ "{gvcf_option}" != "False" ]]; then gvcf_option="--gvcf" fi @@ -73,7 +83,7 @@ # Validate VCF: contains all expected samples while read sample; do check_vcf $TMPDIR/cwd/1.vcf.gz $sample - done < <(echo {snakemake.params.args[sample_names]}) + done < <(echo {sample_names}) # Copy cp $TMPDIR/cwd/1.vcf.gz {snakemake.output.vcf} cp $TMPDIR/cwd/1.vcf.gz.tbi {snakemake.output.vcf_tbi} diff --git a/snappy_wrappers/wrappers/bcftools/pileups/wrapper.py b/snappy_wrappers/wrappers/bcftools/pileups/wrapper.py index 82bba742a..7c8f7c9e8 100644 --- a/snappy_wrappers/wrappers/bcftools/pileups/wrapper.py +++ b/snappy_wrappers/wrappers/bcftools/pileups/wrapper.py @@ -1,17 +1,25 @@ # -*- coding: utf-8 -*- """Wrapper for running bcftools mpileup""" +from typing import TYPE_CHECKING + from snakemake.shell import shell -step = snakemake.config["pipeline_step"]["name"] -config = snakemake.config["step_config"][step] +if TYPE_CHECKING: + from snakemake.script import snakemake + + +args = getattr(snakemake.params, "args") +reference_path = args["reference_path"] +max_depth = args["max_depth"] -if "args" in snakemake.params and "intervals" in snakemake.params["args"]: - locii = "-r " + snakemake.params["args"]["intervals"] +# FIXME: "locii" only ever gets set as the input, never as a parameter in args +if intervals := args["intervals"]: + locii = "-r " + intervals elif "locii" in snakemake.input.keys(): locii = "-R " + snakemake.input.locii -elif "locii" in config and config["locii"]: - locii = "-R " + config["locii"] +elif locii_arg := args.get("locii"): + locii = "-R " + locii_arg else: locii = "" @@ -32,8 +40,8 @@ bcftools mpileup \ {locii} \ - --max-depth {config[max_depth]} \ - -f {snakemake.config[static_data_config][reference][path]} \ + --max-depth {max_depth} \ + -f {reference_path} \ -a "FORMAT/AD" \ -O z -o {snakemake.output.vcf} \ {snakemake.input.bam} diff --git a/snappy_wrappers/wrappers/bcftools/protected/wrapper.py b/snappy_wrappers/wrappers/bcftools/protected/wrapper.py index d471b605a..2c185cbd6 100644 --- a/snappy_wrappers/wrappers/bcftools/protected/wrapper.py +++ b/snappy_wrappers/wrappers/bcftools/protected/wrapper.py @@ -1,9 +1,14 @@ # -*- coding: utf-8 -*- """Wrapper for running bcftools mpileup""" +from typing import TYPE_CHECKING + from snakemake.shell import shell -params = dict(snakemake.params)["args"] +if TYPE_CHECKING: + from snakemake.script import snakemake + +params = snakemake.params["args"] filter_name = params["filter_name"] bed = params["path_bed"] diff --git a/snappy_wrappers/wrappers/bcftools/regions/wrapper.py b/snappy_wrappers/wrappers/bcftools/regions/wrapper.py index af44bae5c..c105ad312 100644 --- a/snappy_wrappers/wrappers/bcftools/regions/wrapper.py +++ b/snappy_wrappers/wrappers/bcftools/regions/wrapper.py @@ -1,9 +1,14 @@ # -*- coding: utf-8 -*- """Wrapper for running bcftools filter over regions defined by a bed file""" +from typing import TYPE_CHECKING + from snakemake.shell import shell -params = dict(snakemake.params)["args"] +if TYPE_CHECKING: + from snakemake.script import snakemake + +params = snakemake.params["args"] filter_name = params["filter_name"] bed = f'^{params["include"]}' if "include" in params else params["exclude"] diff --git a/snappy_wrappers/wrappers/bcftools/stats/wrapper.py b/snappy_wrappers/wrappers/bcftools/stats/wrapper.py deleted file mode 100644 index 197e9bc5b..000000000 --- a/snappy_wrappers/wrappers/bcftools/stats/wrapper.py +++ /dev/null @@ -1,27 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running Canvas in somatic variant calling mode on WGS data""" - -from snakemake.shell import shell - -__author__ = "Manuel Holtgrewe" -__email__ = "manuel.holtgrewe@bih-charite.de" - -shell( - r""" -# ----------------------------------------------------------------------------- -# Redirect stderr to log file by default and enable printing executed commands -exec 2> >(tee -a "{snakemake.log}") -set -x -# ----------------------------------------------------------------------------- - -module purge -module load HTSlib/1.3.1-foss-2015a -module load BCFtools/1.3.1-foss-2015a - -bcftools stats {snakemake.input} \ -> {snakemake.output.txt} - -pushd $(dirname {snakemake.output.txt}) -md5sum $(basename {snakemake.output.txt}) >$(basename {snakemake.output.txt}).md5 -""" -) diff --git a/snappy_wrappers/wrappers/bcftools_call/wrapper.py b/snappy_wrappers/wrappers/bcftools_call/wrapper.py index fe1fc662c..db28f9c74 100644 --- a/snappy_wrappers/wrappers/bcftools_call/wrapper.py +++ b/snappy_wrappers/wrappers/bcftools_call/wrapper.py @@ -1,4 +1,9 @@ -from snakemake import shell +from typing import TYPE_CHECKING + +from snakemake.shell import shell + +if TYPE_CHECKING: + from snakemake.script import snakemake __author__ = "Manuel Holtgrewe " @@ -14,15 +19,18 @@ > $2 } """ +args = snakemake.params["args"] + +reference_path = args["reference_path"] +assembly = args["assembly"] + +ignore_chroms = args["ignore_chroms"] -ref_path = snakemake.config["static_data_config"]["reference"]["path"] -if "GRCh37" in ref_path or "hg19" in ref_path: - assembly = "GRCh37" -elif "GRCh38" in ref_path or "hg38" in ref_path: - assembly = "GRCh38" -else: - assembly = "unknown" +max_depth = args["max_depth"] +max_indel_depth = args["max_indel_depth"], +gatk4_hc_joint_window_length = args["gatk4_hc_joint_window_length"] +gatk4_hc_joint_num_threads = args["gatk4_hc_joint_num_threads"] shell( r""" @@ -63,13 +71,13 @@ # Create binning of the reference into windows of roughly the same size. gatk PreprocessIntervals \ - --reference {snakemake.config[static_data_config][reference][path]} \ - --bin-length {snakemake.config[step_config][variant_calling][gatk4_hc_joint][window_length]} \ + --reference {ref_path} \ + --bin-length {gatk4_hc_joint_window_length} \ --output $TMPDIR/raw.interval_list \ --interval-merging-rule OVERLAPPING_ONLY \ - $(for ignore_chrom in {snakemake.config[step_config][variant_calling][ignore_chroms]}; do \ + $(for ignore_chrom in {ignore_chroms}; do \ awk "(\$1 ~ /$ignore_chrom/) {{ printf(\"--exclude-intervals %s:1-%d\\n\", \$1, \$2) }}" \ - {snakemake.config[static_data_config][reference][path]}.fai; \ + {reference_index_path}; \ done) # Postprocess the Picard-style interval list into properly padded interval strings suitable for @@ -104,10 +112,10 @@ bcftools mpileup \ -Ou \ --annotate FORMAT/AD,FORMAT/DP \ - -f {snakemake.config[static_data_config][reference][path]} \ + -f {reference_path} \ --per-sample-mF \ - --max-depth {snakemake.config[step_config][variant_calling][bcftools_call][max_depth]} \ - --max-idepth {snakemake.config[step_config][variant_calling][bcftools_call][max_indel_depth]} \ + --max-depth {max_depth} \ + --max-idepth {max_indel_depth} \ --redo-BAQ \ --regions $2 \ {snakemake.input.bam} \ @@ -126,7 +134,7 @@ # Perform parallel execution (set -x; sleep $(echo "scale=3; $RANDOM/32767*10" | bc)s) # sleep up to 10s to work around bug -num_threads={snakemake.config[step_config][variant_calling][gatk4_hc_joint][num_threads]} +num_threads={gatk4_hc_joint_num_threads} cat $TMPDIR/final_intervals.txt \ | parallel --plain -j $num_threads 'run-shard {{#}} {{}}' @@ -142,7 +150,7 @@ /dev/stdin \ | bcftools norm \ -d exact \ - -f {snakemake.config[static_data_config][reference][path]} \ + -f {reference_path} \ -O z \ -o {snakemake.output.vcf} tabix {snakemake.output.vcf} diff --git a/snappy_wrappers/wrappers/bcftools_roh/wrapper.py b/snappy_wrappers/wrappers/bcftools_roh/wrapper.py index 76e1bf2fa..860db1778 100644 --- a/snappy_wrappers/wrappers/bcftools_roh/wrapper.py +++ b/snappy_wrappers/wrappers/bcftools_roh/wrapper.py @@ -1,7 +1,20 @@ -from snakemake import shell +from typing import TYPE_CHECKING + +from snakemake.shell import shell + +if TYPE_CHECKING: + from snakemake.script import snakemake __author__ = "Manuel Holtgrewe " +args = snakemake.params["args"] + +path_targets = args["path_targets"] +path_af_file = args["path_af_file"] +ignore_homref = args["ignore_homref"] +skip_indels = args["skip_indels"] +rec_rate = args["rec_rate"] + DEF_HELPER_FUNCS = r""" compute-md5() { @@ -56,20 +69,20 @@ raw_out=${{out%.regions.txt.gz}}.raw.txt.gz bcftools roh \ - $(if [[ "{snakemake.config[step_config][variant_calling][bcftools_roh][path_targets]}" != "None" ]]; then - echo --regions-file "{snakemake.config[step_config][variant_calling][bcftools_roh][path_targets]}" + $(if [[ "{path_targets}" != "None" ]]; then + echo --regions-file "{path_targets}" fi) \ - $(if [[ "{snakemake.config[step_config][variant_calling][bcftools_roh][path_af_file]}" != "None" ]]; then - echo --AF-file "{snakemake.config[step_config][variant_calling][bcftools_roh][path_af_file]}" + $(if [[ "{path_af_file}" != "None" ]]; then + echo --AF-file "{path_af_file}" fi) \ - $(if [[ "{snakemake.config[step_config][variant_calling][bcftools_roh][ignore_homref]}" != "False" ]]; then + $(if [[ "{ignore_homref}" != "False" ]]; then echo --ignore-homref fi) \ - $(if [[ "{snakemake.config[step_config][variant_calling][bcftools_roh][skip_indels]}" != "False" ]]; then + $(if [[ "{skip_indels}" != "False" ]]; then echo --skip-indels fi) \ - $(if [[ "{snakemake.config[step_config][variant_calling][bcftools_roh][rec_rate]}" != "None" ]]; then - echo --rec-rate "{snakemake.config[step_config][variant_calling][bcftools_roh][rec_rate]}" + $(if [[ "{rec_rate}" != "None" ]]; then + echo --rec-rate "{rec_rate}" fi) \ --output $raw_out \ --output-type srz \ diff --git a/snappy_wrappers/wrappers/bed_jaccard_operations/__init__.py b/snappy_wrappers/wrappers/bed_jaccard_operations/__init__.py deleted file mode 100644 index f33460121..000000000 --- a/snappy_wrappers/wrappers/bed_jaccard_operations/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for BEDTools intersect + snappy-bed_filter_jaccard""" diff --git a/snappy_wrappers/wrappers/bed_jaccard_operations/environment.yaml b/snappy_wrappers/wrappers/bed_jaccard_operations/environment.yaml deleted file mode 100644 index 7c4d41541..000000000 --- a/snappy_wrappers/wrappers/bed_jaccard_operations/environment.yaml +++ /dev/null @@ -1,6 +0,0 @@ -channels: - - conda-forge - - bioconda - - nodefaults -dependencies: - - bedtools==2.26.0 diff --git a/snappy_wrappers/wrappers/bed_jaccard_operations/meta.yaml b/snappy_wrappers/wrappers/bed_jaccard_operations/meta.yaml deleted file mode 100644 index f4c28bf46..000000000 --- a/snappy_wrappers/wrappers/bed_jaccard_operations/meta.yaml +++ /dev/null @@ -1,4 +0,0 @@ -name: bed_jaccard_operations -description: Perform BED intersection or subtraction based on Jaccard index -authors: - - Manuel Holtgrewe diff --git a/snappy_wrappers/wrappers/bed_jaccard_operations/wrapper.py b/snappy_wrappers/wrappers/bed_jaccard_operations/wrapper.py deleted file mode 100644 index 84cce32ad..000000000 --- a/snappy_wrappers/wrappers/bed_jaccard_operations/wrapper.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for BEDTools intersect + snappy-bed_filter_jaccard: Snakemake wrapper.py""" - -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -shell.executable("/bin/bash") - -# Handle /dev/stdout output file -if str(snakemake.output) == ".wrapper.done": - output_arg = "" - output_bed = "" -else: - output_arg = ">" + snakemake.output.bed - output_bed = snakemake.output.bed - -shell( - r""" -set -x - -# Also pipe stderr to log file -if [[ -n "{snakemake.log}" ]]; then - if [[ "$(set +e; tty; set -e)" != "" ]]; then - rm -f "{snakemake.log}" && mkdir -p $(dirname {snakemake.log}) - exec 2> >(tee -a "{snakemake.log}" >&2) - else - rm -f "{snakemake.log}" && mkdir -p $(dirname {snakemake.log}) - echo "No tty, logging disabled" >"{snakemake.log}" - fi -fi - -# Hack: get back bin directory of base/root environment. -export PATH=$PATH:$(dirname $(dirname $(which conda)))/bin - -bedtools intersect \ - -wao \ - -a <(grep -v '^#' {snakemake.input.first} || true) \ - -b <({{ grep -v '^#' {snakemake.input.second} || true; }} | cut -f 1-3) \ -| snappy-bed_filter_jaccard \ - --operation {snakemake.params.args[operation]} \ - --num-cols-first {snakemake.params.args[num_cols_first]} \ - --num-cols-second 3 \ - --threshold {snakemake.params.args[threshold]} \ -| cut -f 1-{snakemake.params.args[num_cols_first]} \ -{output_arg} - -if [[ -n "{output_arg}" ]]; then - pushd $(dirname {output_bed}) && - md5sum $(basename {output_bed}) >$(basename {output_bed}).md5 -fi -""" -) diff --git a/snappy_wrappers/wrappers/bed_venn/__init__.py b/snappy_wrappers/wrappers/bed_venn/__init__.py deleted file mode 100644 index 8fe3c9378..000000000 --- a/snappy_wrappers/wrappers/bed_venn/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# -*- coding: utf-8 -*- -"""Create Venn diagrams from BED file overlaps based on Jaccard-index""" diff --git a/snappy_wrappers/wrappers/bed_venn/environment.yaml b/snappy_wrappers/wrappers/bed_venn/environment.yaml deleted file mode 100644 index 21bf63685..000000000 --- a/snappy_wrappers/wrappers/bed_venn/environment.yaml +++ /dev/null @@ -1,6 +0,0 @@ -channels: - - conda-forge - - bioconda - - nodefaults -dependencies: - - bedtools ==2.26.0 diff --git a/snappy_wrappers/wrappers/bed_venn/meta.yaml b/snappy_wrappers/wrappers/bed_venn/meta.yaml deleted file mode 100644 index 9069ffdae..000000000 --- a/snappy_wrappers/wrappers/bed_venn/meta.yaml +++ /dev/null @@ -1,4 +0,0 @@ -name: bedvenn -description: Create venn diagram from BED file overlaps using Jaccard index -authors: - - Manuel Holtgrewe diff --git a/snappy_wrappers/wrappers/bed_venn/wrapper.py b/snappy_wrappers/wrappers/bed_venn/wrapper.py deleted file mode 100644 index 2d6014e78..000000000 --- a/snappy_wrappers/wrappers/bed_venn/wrapper.py +++ /dev/null @@ -1,118 +0,0 @@ -# -*- coding: utf-8 -*- -"""CUBI BEDVenn: Snakemake wrapper.py""" - -import itertools -import textwrap - -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - - -def powerset(iterable): - """powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)""" - s = list(iterable) - return itertools.chain.from_iterable(itertools.combinations(s, r) for r in range(len(s) + 1)) - - -all_ids = set(snakemake.input.keys()) -id_sets = tuple(map(set, powerset(all_ids))) - -PREFIX = r""" -set -x - -# Also pipe stderr to log file -if [[ -n "{snakemake.log}" ]]; then - if [[ "$(set +e; tty; set -e)" != "" ]]; then - rm -f "{snakemake.log}" && mkdir -p $(dirname {snakemake.log}) - exec 2> >(tee -a "{snakemake.log}" >&2) - else - rm -f "{snakemake.log}" && mkdir -p $(dirname {snakemake.log}) - echo "No tty, logging disabled" >"{snakemake.log}" - fi -fi - -export TMPDIR=$(mktemp -d) -""" - -unique_cmd = "| sort -u" if snakemake.params["args"]["unique"] else "" - -chunks = [] - -for id_set in id_sets: - if not id_set: - continue # skip empty set - labels = "\\t".join(sorted(id_set)) - intersect_files = [snakemake.input[name] for name in id_set] - subtract_files = [ - snakemake.input[name] for name in snakemake.input.keys() if not name in id_set - ] - chunks.append( - textwrap.dedent( - r""" - # Hack: get back bin directory of base/root environment. - export PATH=$PATH:$(dirname $(dirname $(which conda)))/bin - - cut -f 1-3 {first} >$TMPDIR/input_first.txt - if [[ -z "{intersect_files}" ]]; then - cp $TMPDIR/input_first.txt $TMPDIR/intersected.txt - else - i=0 - cp $TMPDIR/input_first.txt $TMPDIR/intersected.0.txt - for path in {intersect_files}; do - let "i=$i+1" - cut -f 1-3 $path > $TMPDIR/input_second.txt - snappy-bed_jaccard_operations \ - --input-first $TMPDIR/intersected.$(($i - 1)).txt \ - --input-second $TMPDIR/input_second.txt \ - --threshold {{snakemake.params[args][jaccard_threshold]}} \ - --output-file /dev/stdout \ - --operation intersect \ - {unique_cmd} \ - > $TMPDIR/intersected.$i.txt - done - cp $TMPDIR/intersected.$i.txt $TMPDIR/intersected.txt - fi - - if [[ -z "{subtract_files}" ]]; then - cp $TMPDIR/intersected.txt $TMPDIR/subtracted.txt - else - cut -f 1-3 {subtract_files} >$TMPDIR/input_second.txt - snappy-bed_jaccard_operations \ - --input-first $TMPDIR/intersected.txt \ - --input-second $TMPDIR/input_second.txt \ - --threshold {{snakemake.params[args][jaccard_threshold]}} \ - --output-file $TMPDIR/subtracted.txt \ - --operation subtract - fi - - echo -e "{labels}\t$(wc -l $TMPDIR/subtracted.txt | awk '{{{{ print $1 }}}}')" \ - >>$TMPDIR/overlap.txt - """ - ).format( - first=intersect_files[0], - intersect_files=" ".join(intersect_files[1:]), - subtract_files=" ".join(subtract_files), - unique_cmd=unique_cmd, - labels=labels, - ) - ) - -SUFFIX = r""" -export MPLBACKEND="agg" -snappy-quickvenn \ - --input-shared-counts $TMPDIR/overlap.txt \ - --output-image {snakemake.output.image} -pushd $(dirname {snakemake.output.image}) && \ - md5sum $(basename {snakemake.output.image}) >$(basename {snakemake.output.image}).md5 && \ - popd - -cp $TMPDIR/overlap.txt {snakemake.output.counts} -pushd $(dirname {snakemake.output.counts}) && \ - md5sum $(basename {snakemake.output.counts}) >$(basename {snakemake.output.counts}).md5 && \ - popd -""" - -shell.executable("/bin/bash") - -shell(PREFIX + "\n".join(chunks) + SUFFIX) diff --git a/snappy_wrappers/wrappers/bwa/wrapper.py b/snappy_wrappers/wrappers/bwa/wrapper.py index 4a9c520b9..301bb05d3 100644 --- a/snappy_wrappers/wrappers/bwa/wrapper.py +++ b/snappy_wrappers/wrappers/bwa/wrapper.py @@ -1,16 +1,33 @@ # -*- coding: utf-8 -*- """CUBI+Snakemake wrapper code for BWA: Snakemake wrapper.py""" -from snakemake import shell +from typing import TYPE_CHECKING + +from snakemake.shell import shell + +if TYPE_CHECKING: + from snakemake.script import snakemake __author__ = "Manuel Holtgrewe " shell.executable("/bin/bash") +args = snakemake.params["args"] + # Input fastqs are passed through snakemake.params. # snakemake.input is a .done file touched after linking files in. -input_left = snakemake.params.args["input"]["reads_left"] -input_right = snakemake.params.args["input"].get("reads_right", "") +input_left = args["input"]["reads_left"] +input_right = args["input"].get("reads_right", "") + +path_bwa_index = args["path_index"] +trim_adapters = args["trim_adapters"] +num_threads_trimming = args["num_threads_trimming"] +mask_duplicates = args["mask_duplicates"] +num_threads_bam_view = args["num_threads_bam_view"] +memory_bam_sort = args["memory_bam_sort"] +num_threads_bam_sort = args["num_threads_bam_sort"] +num_threads_align = args["num_threads_align"] +split_as_secondary = args["split_as_secondary"] shell( r""" @@ -44,7 +61,7 @@ mkdir -p $TMPDIR/tmp.d # Define some global shortcuts -INDEX={snakemake.config[step_config][ngs_mapping][bwa][path_index]} +INDEX={path_bwa_index} # Define left and right reads as Bash arrays declare -a reads_left=({input_left}) @@ -72,8 +89,8 @@ {{ set -x - if [[ "{snakemake.config[step_config][ngs_mapping][bwa][trim_adapters]}" == "True" ]]; then - trimadap-mt -p {snakemake.config[step_config][ngs_mapping][bwa][num_threads_trimming]} + if [[ "{trim_adapters}" == "True" ]]; then + trimadap-mt -p {num_threads_trimming} else cat # TODO: can we somehow remove this? fi @@ -84,7 +101,7 @@ {{ set -x - if [[ "{snakemake.config[step_config][ngs_mapping][bwa][mask_duplicates]}" == "True" ]]; then + if [[ "{mask_duplicates}" == "True" ]]; then samblaster --addMateTags else cat # TODO: can we somehow remove this? @@ -101,11 +118,11 @@ | samtools view \ -u \ -Sb \ - -@ {snakemake.config[step_config][ngs_mapping][bwa][num_threads_bam_view]} \ + -@ {num_threads_bam_view} \ | samtools sort \ -T $TMPDIR/sort_bam \ - -m {snakemake.config[step_config][ngs_mapping][bwa][memory_bam_sort]} \ - -@ {snakemake.config[step_config][ngs_mapping][bwa][num_threads_bam_sort]} \ + -m {memory_bam_sort} \ + -@ {num_threads_bam_sort} \ -O BAM \ -o $out }} @@ -118,12 +135,12 @@ for ((i = 0; i < ${{#reads_left[@]}}; i++)); do # Compute suffix array indices for BWA-ALN - bwa aln -t {snakemake.config[step_config][ngs_mapping][bwa][num_threads_align]} $INDEX ${{reads_left[$i]}} >$TMPDIR/left.sai + bwa aln -t {num_threads_align} $INDEX ${{reads_left[$i]}} >$TMPDIR/left.sai if [[ $paired -eq 1 ]]; then sai_right=$TMPDIR/right.sai fastq_right=${{reads_right[$i]}} - bwa aln -t {snakemake.config[step_config][ngs_mapping][bwa][num_threads_align]} $INDEX $fastq_right >$sai_right + bwa aln -t {num_threads_align} $INDEX $fastq_right >$sai_right bwa_cmd=sampe else sai_right= @@ -155,7 +172,7 @@ | add_rg \ | samtools view \ -b \ - -@ {snakemake.config[step_config][ngs_mapping][bwa][num_threads_bam_view]} \ + -@ {num_threads_bam_view} \ -o $TMPDIR/tmp.d/out.$i.bam done }} @@ -167,7 +184,7 @@ # Decide whether to write split reads as supplementary or secondary (-M means secondary) split_as_supp_flag= - if [[ "{snakemake.config[step_config][ngs_mapping][bwa][split_as_secondary]}" == "True" ]]; then + if [[ "{split_as_secondary}" == "True" ]]; then split_as_supp_flag="-M" fi @@ -195,11 +212,11 @@ $split_as_supp_flag \ $rg_arg \ -p \ - -t {snakemake.config[step_config][ngs_mapping][bwa][num_threads_align]} \ + -t {num_threads_align} \ /dev/stdin \ | samtools view \ -b \ - -@ {snakemake.config[step_config][ngs_mapping][bwa][num_threads_bam_view]} \ + -@ {num_threads_bam_view} \ -o $TMPDIR/tmp.d/out.$i.bam done }} @@ -222,14 +239,14 @@ # Move over a single output file but merge multiple ones if [[ ${{#reads_left[@]}} -eq 1 ]]; then samtools view \ - -@ {snakemake.config[step_config][ngs_mapping][bwa][num_threads_bam_view]} \ + -@ {num_threads_bam_view} \ -h $TMPDIR/tmp.d/out.0.bam \ | postproc_bam {snakemake.output.bam} else # Create merged header for f in $TMPDIR/tmp.d/out.*.bam; do samtools view \ - -@ {snakemake.config[step_config][ngs_mapping][bwa][num_threads_bam_view]} \ + -@ {num_threads_bam_view} \ -H $f >${{f%.bam}}.hdr.sam done samtools merge $TMPDIR/merged.hdr.bam $TMPDIR/tmp.d/out.*.hdr.sam diff --git a/snappy_wrappers/wrappers/bwa_mem2/wrapper.py b/snappy_wrappers/wrappers/bwa_mem2/wrapper.py index 7196322ff..a23e0aee2 100644 --- a/snappy_wrappers/wrappers/bwa_mem2/wrapper.py +++ b/snappy_wrappers/wrappers/bwa_mem2/wrapper.py @@ -1,4 +1,9 @@ -from snakemake import shell +from typing import TYPE_CHECKING + +from snakemake.shell import shell + +if TYPE_CHECKING: + from snakemake.script import snakemake __author__ = "Manuel Holtgrewe " @@ -15,8 +20,20 @@ } """ -input_left = snakemake.params.args["input"]["reads_left"] -input_right = snakemake.params.args["input"].get("reads_right", "") +args = snakemake.params["args"] + +input_left = args["input"]["reads_left"] +input_right = args["input"].get("reads_right", "") + +path_index = args["path_index"] +trim_adapters = args["trim_adapters"] +num_threads_trimming = args["num_threads_trimming"] +mask_duplicates = args["mask_duplicates"] +num_threads_bam_view = args["num_threads_bam_view"] +memory_bam_sort = args["memory_bam_sort"] +num_threads_bam_sort = args["num_threads_bam_sort"] +num_threads_align = args["num_threads_align"] +split_as_secondary = args["split_as_secondary"] shell( r""" @@ -77,8 +94,8 @@ {{ set -x - if [[ "{snakemake.config[step_config][ngs_mapping][bwa_mem2][trim_adapters]}" == "True" ]]; then - trimadap-mt -p {snakemake.config[step_config][ngs_mapping][bwa_mem2][num_threads_trimming]} + if [[ "{trim_adapters}" == "True" ]]; then + trimadap-mt -p {num_threads_trimming} else cat fi @@ -89,7 +106,7 @@ {{ set -x - if [[ "{snakemake.config[step_config][ngs_mapping][bwa_mem2][mask_duplicates]}" == "True" ]]; then + if [[ "{mask_duplicates}" == "True" ]]; then samblaster --addMateTags else cat @@ -103,7 +120,7 @@ # Decide whether to write split reads as supplementary or secondary (-M means secondary) split_as_supp_flag= - if [[ "{snakemake.config[step_config][ngs_mapping][bwa_mem2][split_as_secondary]}" == "True" ]]; then + if [[ "{split_as_secondary}" == "True" ]]; then split_as_supp_flag="-M" fi @@ -114,11 +131,11 @@ fi bwa-mem2 mem \ - {snakemake.config[step_config][ngs_mapping][bwa_mem2][path_index]} \ + {path_index} \ $split_as_supp_flag \ $rg_arg \ -p \ - -t {snakemake.config[step_config][ngs_mapping][bwa_mem2][num_threads_align]} \ + -t {num_threads_align} \ /dev/stdin }} @@ -132,8 +149,8 @@ samtools sort \ -T $TMPDIR/sort_bam \ - -m {snakemake.config[step_config][ngs_mapping][bwa_mem2][memory_bam_sort]} \ - -@ {snakemake.config[step_config][ngs_mapping][bwa_mem2][num_threads_bam_sort]} \ + -m {memory_bam_sort} \ + -@ {num_threads_bam_sort} \ -O BAM \ -o /dev/stdout \ /dev/stdin \ diff --git a/snappy_wrappers/wrappers/canvas/germline_wgs/wrapper.py b/snappy_wrappers/wrappers/canvas/germline_wgs/wrapper.py deleted file mode 100644 index 237aa3234..000000000 --- a/snappy_wrappers/wrappers/canvas/germline_wgs/wrapper.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running Canvas in germline variant calling mode on WGS data""" - -from snakemake.shell import shell - -__author__ = "Manuel Holtgrewe" -__email__ = "manuel.holtgrewe@bih-charite.de" - -shell( - r""" -set -x - -# Write out information about conda installation -------------------------------------------------- - -conda list >{snakemake.log.conda_list} -conda info >{snakemake.log.conda_info} - -# Also pipe stderr to log file -------------------------------------------------------------------- - -if [[ -n "{snakemake.log.log}" ]]; then - if [[ "$(set +e; tty; set -e)" != "" ]]; then - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - exec 2> >(tee -a "{snakemake.log.log}" >&2) - else - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - echo "No tty, logging disabled" >"{snakemake.log.log}" - fi -fi - -# Setup auto-cleaned TMPDIR ----------------------------------------------------------------------- - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -module purge -module load Canvas/1.11.0 # also loads mono -module load HTSlib/1.3.1-foss-2015a - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -mono $EBROOTCANVAS/Canvas.exe Germline-WGS \ - --bam={snakemake.input.bam} \ - --b-allele-vcf={snakemake.input.vcf} \ - --output=$TMPDIR \ - --reference={snakemake.config[step_config][wgs_cnv_calling][canvas][path_reference]} \ - --genome-folder={snakemake.config[step_config][wgs_cnv_calling][canvas][path_genome_folder]} \ - --filter-bed={snakemake.config[step_config][wgs_cnv_calling][canvas][path_filter_bed]} \ - --sample-name={snakemake.wildcards.library_name} - -cp $TMPDIR/CNV.vcf.gz {snakemake.output.vcf} - -fname={snakemake.output.vcf} -cp $TMPDIR/CNV.CoverageAndVariantFrequency.txt \ - ${{fname%.vcf.gz}}.cov_and_var_freq.txt - -tabix -f {snakemake.output.vcf} -pushd $(dirname {snakemake.output.vcf}) - -for f in *.vcf.gz *.vcf.gz.tbi *.txt; do - md5sum $f >$f.md5 -done -""" -) diff --git a/snappy_wrappers/wrappers/canvas/somatic_wgs/wrapper.py b/snappy_wrappers/wrappers/canvas/somatic_wgs/wrapper.py index a7eb4aab9..87001fb8a 100644 --- a/snappy_wrappers/wrappers/canvas/somatic_wgs/wrapper.py +++ b/snappy_wrappers/wrappers/canvas/somatic_wgs/wrapper.py @@ -1,11 +1,21 @@ # -*- coding: utf-8 -*- """Wrapper for running Canvas in somatic variant calling mode on WGS data""" +from typing import TYPE_CHECKING + from snakemake.shell import shell +if TYPE_CHECKING: + from snakemake.script import snakemake + __author__ = "Manuel Holtgrewe" __email__ = "manuel.holtgrewe@bih-charite.de" +args = snakemake.params["args"] +path_reference = args["path_reference"] +path_genome_folder = args["path_genome_folder"] +path_filter_bed = args["path_filter_bed"] + shell( r""" set -x @@ -40,9 +50,9 @@ --bam={snakemake.input.tumor_bam} \ --b-allele-vcf={snakemake.input.somatic_vcf} \ --output=$(dirname {snakemake.output.vcf}) \ - --reference={snakemake.config[step_config][somatic_wgs_cnv_calling][canvas][path_reference]} \ - --genome-folder={snakemake.config[step_config][somatic_wgs_cnv_calling][canvas][path_genome_folder]} \ - --filter-bed={snakemake.config[step_config][somatic_wgs_cnv_calling][canvas][path_filter_bed]} \ + --reference={path_reference} \ + --genome-folder={path_genome_folder} \ + --filter-bed={path_filter_bed} \ --sample-name={snakemake.wildcards.cancer_library} tabix -f {snakemake.output.vcf} diff --git a/snappy_wrappers/wrappers/cbioportal/case_lists/wrapper.py b/snappy_wrappers/wrappers/cbioportal/case_lists/wrapper.py index 0b29e3302..459c52779 100644 --- a/snappy_wrappers/wrappers/cbioportal/case_lists/wrapper.py +++ b/snappy_wrappers/wrappers/cbioportal/case_lists/wrapper.py @@ -2,8 +2,11 @@ """CUBI+Snakemake wrapper code for preparing cbioportal patient metadata table from biomedsheets input. Takes a dict from biomedsheets/snappy_pipeline, writes out all_cases_with_mutation_data.txt """ - import os +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from snakemake.script import snakemake def write_case_list(args, outfile): @@ -21,9 +24,7 @@ def write_case_list(args, outfile): "", ] ).format( - cancer_study_id=snakemake.config["step_config"]["cbioportal_export"]["study"][ - "cancer_study_id" - ], + cancer_study_id=snakemake.params["args"]["__cancer_study_id"], stable_id=args["stable_id"], name=args["name"], description=args["description"], diff --git a/snappy_wrappers/wrappers/cbioportal/clinical_data/wrapper.py b/snappy_wrappers/wrappers/cbioportal/clinical_data/wrapper.py index 338b8699b..bcc960191 100644 --- a/snappy_wrappers/wrappers/cbioportal/clinical_data/wrapper.py +++ b/snappy_wrappers/wrappers/cbioportal/clinical_data/wrapper.py @@ -6,6 +6,10 @@ import csv import json import os +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from snakemake.script import snakemake def write_clinical_patient_tsv(donors): @@ -95,7 +99,7 @@ def write_clinical_samples_tsv(donors): """ sample_info_getters = [] - config = snakemake.config["step_config"]["cbioportal_export"] + config = snakemake.params["__config"] for step, extra_info in config["sample_info"].items(): if step == "tumor_mutational_burden": sample_info_getters.append(SampleInfoTMB(config, snakemake.wildcards, snakemake.params)) diff --git a/snappy_wrappers/wrappers/cnvetti/on_target/coverage/wrapper.py b/snappy_wrappers/wrappers/cnvetti/on_target/coverage/wrapper.py index b74378324..871b22bc4 100644 --- a/snappy_wrappers/wrappers/cnvetti/on_target/coverage/wrapper.py +++ b/snappy_wrappers/wrappers/cnvetti/on_target/coverage/wrapper.py @@ -5,6 +5,10 @@ __author__ = "Manuel Holtgrewe " +args = getattr(snakemake.param, "args", {}) + +window_length = args.get("window_length", "") + shell( r""" set -x @@ -15,8 +19,8 @@ mkdir -p $TMPDIR/tmp.d # Define some global shortcuts -REF={snakemake.config[static_data_config][reference][path]} -REGIONS={snakemake.config[step_config][somatic_targeted_seq_cnv_calling][cnvetti_on_target][path_target_regions]} +REF={args[reference]} +REGIONS={args[path_target_regions]} # Also pipe stderr to log file if [[ -n "{snakemake.log.log}" ]]; then @@ -43,10 +47,10 @@ cnvetti cmd coverage \ -vvv \ --reference "$REF" \ - $(if [[ {snakemake.params.method_name} == cnvetti_off_target ]]; then + $(if [[ {args[method_name]} == cnvetti_off_target ]]; then echo --considered-regions GenomeWide echo --mask-piles - echo --window-length {snakemake.config[step_config][somatic_targeted_seq_cnv_calling][cnvetti_off_target][window_length]} + echo --window-length {window_length} else echo --considered-regions TargetRegions echo --targets-bed "$REGIONS" diff --git a/snappy_wrappers/wrappers/cnvetti/on_target/segment/wrapper.py b/snappy_wrappers/wrappers/cnvetti/on_target/segment/wrapper.py index f903faa14..194391a00 100644 --- a/snappy_wrappers/wrappers/cnvetti/on_target/segment/wrapper.py +++ b/snappy_wrappers/wrappers/cnvetti/on_target/segment/wrapper.py @@ -14,10 +14,6 @@ trap "rm -rf $TMPDIR" EXIT mkdir -p $TMPDIR/tmp.d -# Define some global shortcuts -REF={snakemake.config[static_data_config][reference][path]} -REGIONS={snakemake.config[step_config][somatic_targeted_seq_cnv_calling][cnvetti_on_target][path_target_regions]} - # Also pipe stderr to log file if [[ -n "{snakemake.log.log}" ]]; then if [[ "$(set +e; tty; set -e)" != "" ]]; then diff --git a/snappy_wrappers/wrappers/cnvetti/wgs/coverage/wrapper.py b/snappy_wrappers/wrappers/cnvetti/wgs/coverage/wrapper.py index 6370b1b25..0c2128dfb 100644 --- a/snappy_wrappers/wrappers/cnvetti/wgs/coverage/wrapper.py +++ b/snappy_wrappers/wrappers/cnvetti/wgs/coverage/wrapper.py @@ -5,14 +5,7 @@ __author__ = "Manuel Holtgrewe " -# Get preset and individual settings from configuration. -cnvetti_config = snakemake.config["step_config"]["wgs_cnv_calling"]["cnvetti"] -preset_name = cnvetti_config["preset"] -preset = cnvetti_config["presets"][preset_name] - -window_length = cnvetti_config.get("window_length") or preset["window_length"] -count_kind = cnvetti_config.get("count_kind") or preset["count_kind"] -normalization = cnvetti_config.get("normalization") or preset["normalization"] +args = getattr(snakemake.params, "args", {}) shell( r""" @@ -42,20 +35,20 @@ # Compute coverage and normalize ------------------------------------------------------------------ -REF={snakemake.config[static_data_config][reference][path]} +REF={args[reference]} cnvetti cmd coverage \ -vvv \ --considered-regions GenomeWide \ - --count-kind {count_kind} \ - --window-length {window_length} \ + --count-kind {args[count_kind]} \ + --window-length {args[window_length]} \ --reference $REF \ --output $TMPDIR/cov.bcf \ --input {snakemake.input.bam} cnvetti cmd normalize \ -vvv \ - --normalization {normalization} \ + --normalization {args[normalization]} \ --input $TMPDIR/cov.bcf \ --output {snakemake.output.bcf} diff --git a/snappy_wrappers/wrappers/cnvetti/wgs/genotype/environment.yaml b/snappy_wrappers/wrappers/cnvetti/wgs/genotype/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/cnvetti/wgs/genotype/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/cnvetti/wgs/genotype/wrapper.py b/snappy_wrappers/wrappers/cnvetti/wgs/genotype/wrapper.py deleted file mode 100644 index 81a849774..000000000 --- a/snappy_wrappers/wrappers/cnvetti/wgs/genotype/wrapper.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running CNVetti WGS genotype step.""" - -from snakemake.shell import shell - -__author__ = "Manuel Holtgrewe " - -# Get preset and individual settings from configuration. -cnvetti_config = snakemake.config["step_config"]["wgs_cnv_calling"]["cnvetti"] -preset_name = cnvetti_config["preset"] -preset = cnvetti_config["presets"][preset_name] - -segmentation = cnvetti_config.get("segmentation") or preset["segmentation"] - -shell( - r""" -set -x - -# Write out information about conda installation -------------------------------------------------- - -conda list >{snakemake.log.conda_list} -conda info >{snakemake.log.conda_info} - -# Also pipe stderr to log file -------------------------------------------------------------------- - -if [[ -n "{snakemake.log.log}" ]]; then - if [[ "$(set +e; tty; set -e)" != "" ]]; then - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - exec 2> >(tee -a "{snakemake.log.log}" >&2) - else - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - echo "No tty, logging disabled" >"{snakemake.log.log}" - fi -fi - -# Setup auto-cleaned TMPDIR ----------------------------------------------------------------------- - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -# Perform genotyping ------------------------------------------------------------------------------ - -cnvetti cmd genotype \ - -vvv \ - --input-calls {snakemake.input.sites_bcf} \ - --input {snakemake.input.coverage_bcf} \ - --output {snakemake.output.bcf} \ - --genotyping SegmentOverlap \ - --segmentation {segmentation} - -# Compute MD5 checksums --------------------------------------------------------------------------- - -pushd $(dirname "{snakemake.output.bcf}") -md5sum $(basename "{snakemake.output.bcf}") >$(basename "{snakemake.output.bcf}").md5 -md5sum $(basename "{snakemake.output.csi}") >$(basename "{snakemake.output.csi}").md5 -""" -) diff --git a/snappy_wrappers/wrappers/cnvetti/wgs/merge_genotypes/environment.yaml b/snappy_wrappers/wrappers/cnvetti/wgs/merge_genotypes/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/cnvetti/wgs/merge_genotypes/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/cnvetti/wgs/merge_genotypes/wrapper.py b/snappy_wrappers/wrappers/cnvetti/wgs/merge_genotypes/wrapper.py deleted file mode 100644 index 5bce0e074..000000000 --- a/snappy_wrappers/wrappers/cnvetti/wgs/merge_genotypes/wrapper.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running CNVetti WGS merge_genotypes step.""" - -from snakemake.shell import shell - -__author__ = "Manuel Holtgrewe " - -shell( - r""" -set -x - -# Write out information about conda installation -------------------------------------------------- - -conda list >{snakemake.log.conda_list} -conda info >{snakemake.log.conda_info} - -# Also pipe stderr to log file -------------------------------------------------------------------- - -if [[ -n "{snakemake.log.log}" ]]; then - if [[ "$(set +e; tty; set -e)" != "" ]]; then - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - exec 2> >(tee -a "{snakemake.log.log}" >&2) - else - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - echo "No tty, logging disabled" >"{snakemake.log.log}" - fi -fi - -# Setup auto-cleaned TMPDIR ----------------------------------------------------------------------- - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -# Merge genotypes across cohort ------------------------------------------------------------------- - -bcftools merge \ - -m id \ - -O z \ - -o {snakemake.output.vcf} \ - {snakemake.input.vcf} - -tabix -f {snakemake.output.vcf} - -# Compute MD5 checksums --------------------------------------------------------------------------- - -pushd $(dirname "{snakemake.output.vcf}") -md5sum $(basename "{snakemake.output.vcf}") >$(basename "{snakemake.output.vcf}").md5 -md5sum $(basename "{snakemake.output.vcf_tbi}") >$(basename "{snakemake.output.vcf_tbi}").md5 -""" -) diff --git a/snappy_wrappers/wrappers/cnvetti/wgs/merge_segments/environment.yaml b/snappy_wrappers/wrappers/cnvetti/wgs/merge_segments/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/cnvetti/wgs/merge_segments/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/cnvetti/wgs/merge_segments/wrapper.py b/snappy_wrappers/wrappers/cnvetti/wgs/merge_segments/wrapper.py deleted file mode 100644 index 04e43d711..000000000 --- a/snappy_wrappers/wrappers/cnvetti/wgs/merge_segments/wrapper.py +++ /dev/null @@ -1,47 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running CNVetti WGS merge_segments step.""" - -from snakemake.shell import shell - -__author__ = "Manuel Holtgrewe " - -shell( - r""" -set -x - -# Write out information about conda installation -------------------------------------------------- - -conda list >{snakemake.log.conda_list} -conda info >{snakemake.log.conda_info} - -# Also pipe stderr to log file -------------------------------------------------------------------- - -if [[ -n "{snakemake.log.log}" ]]; then - if [[ "$(set +e; tty; set -e)" != "" ]]; then - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - exec 2> >(tee -a "{snakemake.log.log}" >&2) - else - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - echo "No tty, logging disabled" >"{snakemake.log.log}" - fi -fi - -# Setup auto-cleaned TMPDIR ----------------------------------------------------------------------- - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -# Merge segmentation results ---------------------------------------------------------------------- - -cnvetti cmd merge-seg \ - -vvv \ - --output {snakemake.output.bcf} \ - {snakemake.input.bcf} - -# Compute MD5 checksums --------------------------------------------------------------------------- - -pushd $(dirname "{snakemake.output.bcf}") -md5sum $(basename "{snakemake.output.bcf}") >$(basename "{snakemake.output.bcf}").md5 -md5sum $(basename "{snakemake.output.csi}") >$(basename "{snakemake.output.csi}").md5 -""" -) diff --git a/snappy_wrappers/wrappers/cnvetti/wgs/reorder_vcf/environment.yaml b/snappy_wrappers/wrappers/cnvetti/wgs/reorder_vcf/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/cnvetti/wgs/reorder_vcf/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/cnvetti/wgs/reorder_vcf/wrapper.py b/snappy_wrappers/wrappers/cnvetti/wgs/reorder_vcf/wrapper.py deleted file mode 100644 index 1b9702ed2..000000000 --- a/snappy_wrappers/wrappers/cnvetti/wgs/reorder_vcf/wrapper.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running CNVetti WGS reorder_vcf step.""" - -from snakemake.shell import shell - -__author__ = "Manuel Holtgrewe " - -shell( - r""" -set -x - -# Write out information about conda installation -------------------------------------------------- - -conda list >{snakemake.log.conda_list} -conda info >{snakemake.log.conda_info} - -# Also pipe stderr to log file -------------------------------------------------------------------- - -if [[ -n "{snakemake.log.log}" ]]; then - if [[ "$(set +e; tty; set -e)" != "" ]]; then - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - exec 2> >(tee -a "{snakemake.log.log}" >&2) - else - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - echo "No tty, logging disabled" >"{snakemake.log.log}" - fi -fi - -# Setup auto-cleaned TMPDIR ----------------------------------------------------------------------- - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -# Extract per-pedigree calls ---------------------------------------------------------------------- - -echo '{snakemake.params.ped_members}' \ -| tr ' ' '\n' \ -> $TMPDIR/samples.txt - -bcftools view \ - --samples-file $TMPDIR/samples.txt \ - --output-type u \ - {snakemake.input.bcf} \ -| bcftools view \ - --output-file {snakemake.output.vcf} \ - --output-type z \ - --include '(GT !~ "\.") && (GT ~ "1")' - -tabix -f {snakemake.output.vcf} - -# Compute MD5 checksums --------------------------------------------------------------------------- - -pushd $(dirname "{snakemake.output.vcf}") -md5sum $(basename "{snakemake.output.vcf}") >$(basename "{snakemake.output.vcf}").md5 -md5sum $(basename "{snakemake.output.vcf_tbi}") >$(basename "{snakemake.output.vcf_tbi}").md5 -""" -) diff --git a/snappy_wrappers/wrappers/cnvetti/wgs/segment/wrapper.py b/snappy_wrappers/wrappers/cnvetti/wgs/segment/wrapper.py index abdbefc85..b1becc2ca 100644 --- a/snappy_wrappers/wrappers/cnvetti/wgs/segment/wrapper.py +++ b/snappy_wrappers/wrappers/cnvetti/wgs/segment/wrapper.py @@ -5,12 +5,7 @@ __author__ = "Manuel Holtgrewe " -# Get preset and individual settings from configuration. -cnvetti_config = snakemake.config["step_config"]["wgs_cnv_calling"]["cnvetti"] -preset_name = cnvetti_config["preset"] -preset = cnvetti_config["presets"][preset_name] - -segmentation = cnvetti_config.get("segmentation") or preset["segmentation"] +args = getattr(snakemake.params, "args", {}) shell( r""" @@ -42,7 +37,7 @@ cnvetti cmd segment \ -vvv \ - --segmentation {segmentation} \ + --segmentation {args[segmentation]} \ --input {snakemake.input.bcf} \ --output {snakemake.output.windows_bcf} \ --output-segments {snakemake.output.segments_bcf} diff --git a/snappy_wrappers/wrappers/cnvkit/wgs/environment.yaml b/snappy_wrappers/wrappers/cnvkit/wgs/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/cnvkit/wgs/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/cnvkit/wgs/wrapper.py b/snappy_wrappers/wrappers/cnvkit/wgs/wrapper.py deleted file mode 100644 index 183369e02..000000000 --- a/snappy_wrappers/wrappers/cnvkit/wgs/wrapper.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper vor cnvkit.py batch --method wgs""" - -from snakemake.shell import shell - -__author__ = "Manuel Holtgrewe" -__email__ = "manuel.holtgrewe@bih-charite.de" - -shell( - r""" -set -x - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -# Also pipe stderr to log file -if [[ -n "{snakemake.log.log}" ]]; then - if [[ "$(set +e; tty; set -e)" != "" ]]; then - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - exec 2> >(tee -a "{snakemake.log.log}" >&2) - else - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - echo "No tty, logging disabled" >"{snakemake.log.log}" - fi -fi - -conda list >{snakemake.log.conda_list} -conda info >{snakemake.log.conda_info} -md5sum {snakemake.log.conda_list} >{snakemake.log.conda_list_md5} -md5sum {snakemake.log.conda_info} >{snakemake.log.conda_info_md5} - -# Run cvnkit.py - -cnvkit.py batch \ - {snakemake.input.tumor_bam} \ - -n {snakemake.input.normal_bam} \ - --method wgs \ - --annotate {snakemake.config[step_config][somatic_wgs_cnv_calling][cnvkit][path_annotate_refflat]} \ - -f {snakemake.config[static_data_config][reference][path]} \ - --output-dir $(dirname {snakemake.output.segment}) - -pushd $(dirname {snakemake.output.segment}) -file=$(basename {snakemake.output.segment}) -mapper=$(echo $file | cut -f1 -d".") -for i in $mapper.*; do ln -sr -T $i $(echo $i | sed "s/$mapper/$mapper\.cnvkit/") ; done -popd - -unset DISPLAY - -cnvkit.py scatter \ - {snakemake.output.bins} \ - -o {snakemake.output.scatter} \ - -s {snakemake.output.segment} - -pushd $(dirname {snakemake.output.segment}) -md5sum $(basename {snakemake.output.segment}) > $(basename {snakemake.output.segment}).md5 -md5sum $(basename {snakemake.output.bins}) > $(basename {snakemake.output.bins}).md5 -md5sum $(basename {snakemake.output.scatter}) > $(basename {snakemake.output.scatter}).md5 -popd -""" -) diff --git a/snappy_wrappers/wrappers/control_freec/transform/wrapper.py b/snappy_wrappers/wrappers/control_freec/transform/wrapper.py index 63d51e2cf..8867d8c44 100644 --- a/snappy_wrappers/wrappers/control_freec/transform/wrapper.py +++ b/snappy_wrappers/wrappers/control_freec/transform/wrapper.py @@ -7,7 +7,8 @@ shell.executable("/bin/bash") -config = snakemake.config["step_config"]["somatic_wgs_cnv_calling"]["control_freec"]["convert"] +args = getattr(snakemake.params, "args", {}) + rscript = os.path.join( os.path.dirname(os.path.realpath(__file__)), "snappy-convert-control_freec.R" ) @@ -47,9 +48,9 @@ segments_fn = \"{snakemake.output.segments}\", \ cns_fn = \"{snakemake.output.cns}\", \ cnr_fn = \"{snakemake.output.cnr}\", \ - org_obj={config[org_obj]}, \ - tx_obj={config[tx_obj]}, \ - bs_obj={config[bs_obj]})" + org_obj={args[org_obj]}, \ + tx_obj={args[tx_obj]}, \ + bs_obj={args[bs_obj]})" for f in {snakemake.output.log2} {snakemake.output.call} {snakemake.output.segments} \ {snakemake.output.cns} {snakemake.output.cnr}; do diff --git a/snappy_wrappers/wrappers/control_freec/wrapper.py b/snappy_wrappers/wrappers/control_freec/wrapper.py index f20ccad1f..c0550becf 100644 --- a/snappy_wrappers/wrappers/control_freec/wrapper.py +++ b/snappy_wrappers/wrappers/control_freec/wrapper.py @@ -5,8 +5,10 @@ __author__ = "Manuel Holtgrewe " +args = getattr(snakemake.params, "args", {}) + window_str = "" -w = snakemake.config["step_config"]["somatic_wgs_cnv_calling"]["control_freec"]["window_size"] +w = args.get("window_size") if w >= 0: window_str = "window = {}".format(w) @@ -54,7 +56,7 @@ ## path to sambamba (faster BAM file reading) sambamba = sambamba -chrLenFile = {snakemake.config[step_config][somatic_wgs_cnv_calling][control_freec][path_chrlenfile]} +chrLenFile = {args[path_chrlenfile]} ploidy = 2 breakPointThreshold = .8 @@ -68,8 +70,8 @@ numberOfProcesses = 4 -$(if [[ "{snakemake.config[step_config][somatic_wgs_cnv_calling][control_freec][path_mappability_enabled]}" == True ]]; then - echo gemMappabilityFile = {snakemake.config[step_config][somatic_wgs_cnv_calling][control_freec][path_mappability]}; +$(if [[ "{args[path_mappability_enabled]}" == True ]]; then + echo gemMappabilityFile = {args[path_mappability]}; fi) uniqueMatch = TRUE diff --git a/snappy_wrappers/wrappers/defuse/wrapper.py b/snappy_wrappers/wrappers/defuse/wrapper.py index dd8030fa0..7ce79ed82 100644 --- a/snappy_wrappers/wrappers/defuse/wrapper.py +++ b/snappy_wrappers/wrappers/defuse/wrapper.py @@ -41,7 +41,7 @@ pushd $workdir defuse_run.pl \ - -d {snakemake.config[step_config][somatic_gene_fusion_calling][defuse][path_dataset_directory]} \ + -d {snakemake.params.args[path_dataset_directory]} \ -1 input/reads_1.fastq.gz \ -2 input/reads_2.fastq.gz \ -o output \ diff --git a/snappy_wrappers/wrappers/delly2/germline/call/wrapper.py b/snappy_wrappers/wrappers/delly2/germline/call/wrapper.py index 37d9b3c11..0a592ad27 100644 --- a/snappy_wrappers/wrappers/delly2/germline/call/wrapper.py +++ b/snappy_wrappers/wrappers/delly2/germline/call/wrapper.py @@ -2,7 +2,7 @@ __author__ = "Manuel Holtgrewe " -delly2_config = snakemake.config["step_config"][snakemake.params.step_key]["delly2"] +delly2_config = snakemake.params.args["config"] if delly2_config["path_exclude_tsv"]: exclude_str = "--exclude %s" % delly2_config["path_exclude_tsv"] @@ -22,6 +22,7 @@ } """ + shell( r""" set -x @@ -64,7 +65,7 @@ --qual-tra {delly2_config[qual_tra]} \ --geno-qual {delly2_config[geno_qual]} \ --mad-cutoff {delly2_config[mad_cutoff]} \ - --genome {snakemake.config[static_data_config][reference][path]} \ + --genome {snakemake.params.args[genome]} \ --outfile {snakemake.output.bcf} \ {exclude_str} \ {snakemake.input.bam} diff --git a/snappy_wrappers/wrappers/delly2/germline/genotype/wrapper.py b/snappy_wrappers/wrappers/delly2/germline/genotype/wrapper.py index 6e73bfb52..4c288bd4e 100644 --- a/snappy_wrappers/wrappers/delly2/germline/genotype/wrapper.py +++ b/snappy_wrappers/wrappers/delly2/germline/genotype/wrapper.py @@ -2,7 +2,7 @@ __author__ = "Manuel Holtgrewe " -delly2_config = snakemake.config["step_config"][snakemake.params.step_key]["delly2"] +delly2_config = snakemake.params.args["config"] if delly2_config["path_exclude_tsv"]: exclude_str = "--exclude %s" % delly2_config["path_exclude_tsv"] @@ -66,7 +66,7 @@ --geno-qual {delly2_config[geno_qual]} \ --mad-cutoff {delly2_config[mad_cutoff]} \ --vcffile {snakemake.input.bcf} \ - --genome {snakemake.config[static_data_config][reference][path]} \ + --genome {snakemake.params.args[genome]} \ --outfile {snakemake.output.bcf} \ {exclude_str} \ {snakemake.input.bam} diff --git a/snappy_wrappers/wrappers/delly2/germline_cnv/call/environment.yaml b/snappy_wrappers/wrappers/delly2/germline_cnv/call/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/delly2/germline_cnv/call/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/delly2/germline_cnv/call/wrapper.py b/snappy_wrappers/wrappers/delly2/germline_cnv/call/wrapper.py deleted file mode 100644 index e8148830d..000000000 --- a/snappy_wrappers/wrappers/delly2/germline_cnv/call/wrapper.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running Delly2's CNV call step""" - -from snakemake.shell import shell - -shell( - r""" -# ----------------------------------------------------------------------------- -# Redirect stderr to log file by default and enable printing executed commands -exec &> >(tee -a "{snakemake.log.log}") -set -x -# ----------------------------------------------------------------------------- - -# Write out information about conda installation -conda list > {snakemake.log.conda_list} -conda info > {snakemake.log.conda_info} -md5sum {snakemake.log.conda_list} > {snakemake.log.conda_list_md5} -md5sum {snakemake.log.conda_info} > {snakemake.log.conda_info_md5} - -# Call CNV -delly cnv \ - --mappability {snakemake.config[step_config][wgs_cnv_calling][delly2][mappability]} \ - --genome {snakemake.config[static_data_config][reference][path]} \ - --outfile {snakemake.output.bcf} \ - {snakemake.input.bam} - -tabix -f {snakemake.output.bcf} - -pushd $(dirname {snakemake.output.bcf}) -md5sum $(basename {snakemake.output.bcf}) > $(basename {snakemake.output.bcf_md5}) -md5sum $(basename {snakemake.output.csi}) > $(basename {snakemake.output.csi_md5}) -""" -) - -# Compute MD5 sums of logs -shell( - r""" -md5sum {snakemake.log.log} > {snakemake.log.log_md5} -""" -) diff --git a/snappy_wrappers/wrappers/delly2/germline_cnv/environment.yaml b/snappy_wrappers/wrappers/delly2/germline_cnv/environment.yaml deleted file mode 100644 index 4e29cb739..000000000 --- a/snappy_wrappers/wrappers/delly2/germline_cnv/environment.yaml +++ /dev/null @@ -1,8 +0,0 @@ -channels: - - conda-forge - - bioconda - - nodefaults -dependencies: - - bcftools==1.19 - - htslib==1.19.1 - - delly==1.1.3 diff --git a/snappy_wrappers/wrappers/delly2/germline_cnv/filter/environment.yaml b/snappy_wrappers/wrappers/delly2/germline_cnv/filter/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/delly2/germline_cnv/filter/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/delly2/germline_cnv/filter/wrapper.py b/snappy_wrappers/wrappers/delly2/germline_cnv/filter/wrapper.py deleted file mode 100644 index ee203b79b..000000000 --- a/snappy_wrappers/wrappers/delly2/germline_cnv/filter/wrapper.py +++ /dev/null @@ -1,39 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running Delly2's CNV filter step""" - -from snakemake.shell import shell - -shell( - r""" -# ----------------------------------------------------------------------------- -# Redirect stderr to log file by default and enable printing executed commands -exec &> >(tee -a "{snakemake.log.log}") -set -x -# ----------------------------------------------------------------------------- - -# Write out information about conda installation -conda list > {snakemake.log.conda_list} -conda info > {snakemake.log.conda_info} -md5sum {snakemake.log.conda_list} > {snakemake.log.conda_list_md5} -md5sum {snakemake.log.conda_info} > {snakemake.log.conda_info_md5} - - -delly classify \ - --filter germline \ - --outfile {snakemake.output.bcf} \ - {snakemake.input.bcf} - -tabix -f {snakemake.output.bcf} - -pushd $(dirname {snakemake.output.bcf}) -md5sum $(basename {snakemake.output.bcf}) > $(basename {snakemake.output.bcf_md5}) -md5sum $(basename {snakemake.output.csi}) > $(basename {snakemake.output.csi_md5}) -""" -) - -# Compute MD5 sums of logs -shell( - r""" -md5sum {snakemake.log.log} > {snakemake.log.log_md5} -""" -) diff --git a/snappy_wrappers/wrappers/delly2/germline_cnv/genotype/environment.yaml b/snappy_wrappers/wrappers/delly2/germline_cnv/genotype/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/delly2/germline_cnv/genotype/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/delly2/germline_cnv/genotype/wrapper.py b/snappy_wrappers/wrappers/delly2/germline_cnv/genotype/wrapper.py deleted file mode 100644 index 1b7fa2aa3..000000000 --- a/snappy_wrappers/wrappers/delly2/germline_cnv/genotype/wrapper.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running Delly2's CNV re-genotyping step""" - -from snakemake.shell import shell - -shell( - r""" -# ----------------------------------------------------------------------------- -# Redirect stderr to log file by default and enable printing executed commands -exec &> >(tee -a "{snakemake.log.log}") -set -x -# ----------------------------------------------------------------------------- - -# Write out information about conda installation -conda list > {snakemake.log.conda_list} -conda info > {snakemake.log.conda_info} -md5sum {snakemake.log.conda_list} > {snakemake.log.conda_list_md5} -md5sum {snakemake.log.conda_info} > {snakemake.log.conda_info_md5} - - -delly cnv --segmentation \ - --mappability {snakemake.config[step_config][wgs_cnv_calling][delly2][mappability]} \ - --genome {snakemake.config[static_data_config][reference][path]} \ - --vcffile {snakemake.input.bcf} \ - --outfile {snakemake.output.bcf} \ - {snakemake.input.bam} - -tabix -f {snakemake.output.bcf} - -pushd $(dirname {snakemake.output.bcf}) -md5sum $(basename {snakemake.output.bcf}) > $(basename {snakemake.output.bcf_md5}) -md5sum $(basename {snakemake.output.csi}) > $(basename {snakemake.output.csi_md5}) -""" -) - -# Compute MD5 sums of logs -shell( - r""" -md5sum {snakemake.log.log} > {snakemake.log.log_md5} -""" -) diff --git a/snappy_wrappers/wrappers/delly2/germline_cnv/merge_calls/environment.yaml b/snappy_wrappers/wrappers/delly2/germline_cnv/merge_calls/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/delly2/germline_cnv/merge_calls/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/delly2/germline_cnv/merge_calls/wrapper.py b/snappy_wrappers/wrappers/delly2/germline_cnv/merge_calls/wrapper.py deleted file mode 100644 index 18d3a5686..000000000 --- a/snappy_wrappers/wrappers/delly2/germline_cnv/merge_calls/wrapper.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running Delly2's CNV call merging step""" - -import tempfile - -from snakemake.shell import shell - -with tempfile.NamedTemporaryFile("wt") as tmpf: - # Write paths to input files into temporary file. - # - # cf. https://bitbucket.org/snakemake/snakemake/issues/878 - print("\n".join(snakemake.input), file=tmpf) - tmpf.flush() - - # Actually run the script. - shell( - r""" - # ----------------------------------------------------------------------------- - # Redirect stderr to log file by default and enable printing executed commands - exec &> >(tee -a "{snakemake.log.log}") - set -x - # ----------------------------------------------------------------------------- - - # Write out information about conda installation - conda list > {snakemake.log.conda_list} - conda info > {snakemake.log.conda_info} - md5sum {snakemake.log.conda_list} > {snakemake.log.conda_list_md5} - md5sum {snakemake.log.conda_info} > {snakemake.log.conda_info_md5} - - export LC_ALL=C - export TMPDIR=$(mktemp -d) - trap "rm -rf $TMPDIR" EXIT - - mkdir $TMPDIR/cwd - - i=0 - for x in $(cat {tmpf.name}); do - let "i=$i+1" - ln -s $(readlink -f $x) $TMPDIR/cwd/$i.bcf - ln -s $(readlink -f $x).csi $TMPDIR/cwd/$i.bcf.csi - done - - out=$(realpath {snakemake.output.bcf}) - pushd $TMPDIR/cwd - delly merge --cnvmode --pass \ - --minsize {snakemake.config[step_config][wgs_cnv_calling][delly2][minsize]} \ - --maxsize {snakemake.config[step_config][wgs_cnv_calling][delly2][maxsize]} \ - --outfile $out \ - *.bcf - popd - tabix -f {snakemake.output.bcf} - - pushd $(dirname {snakemake.output.bcf}) - md5sum $(basename {snakemake.output.bcf}) > $(basename {snakemake.output.bcf_md5}) - md5sum $(basename {snakemake.output.csi}) > $(basename {snakemake.output.csi_md5}) - """ - ) - -# Compute MD5 sums of logs -shell( - r""" -md5sum {snakemake.log.log} > {snakemake.log.log_md5} -""" -) diff --git a/snappy_wrappers/wrappers/delly2/germline_cnv/merge_genotypes/environment.yaml b/snappy_wrappers/wrappers/delly2/germline_cnv/merge_genotypes/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/delly2/germline_cnv/merge_genotypes/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/delly2/germline_cnv/merge_genotypes/wrapper.py b/snappy_wrappers/wrappers/delly2/germline_cnv/merge_genotypes/wrapper.py deleted file mode 100644 index 1e13768c3..000000000 --- a/snappy_wrappers/wrappers/delly2/germline_cnv/merge_genotypes/wrapper.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running Delly2's calls tep""" - -import tempfile - -from snakemake.shell import shell - -with tempfile.NamedTemporaryFile("wt") as tmpf: - # Write paths to input files into temporary file. - # - # cf. https://bitbucket.org/snakemake/snakemake/issues/878 - print("\n".join(snakemake.input), file=tmpf) - tmpf.flush() - # Actually run the script. - shell( - r""" - # ----------------------------------------------------------------------------- - # Redirect stderr to log file by default and enable printing executed commands - exec &> >(tee -a "{snakemake.log.log}") - set -x - # ----------------------------------------------------------------------------- - - # Write out information about conda installation - conda list > {snakemake.log.conda_list} - conda info > {snakemake.log.conda_info} - md5sum {snakemake.log.conda_list} > {snakemake.log.conda_list_md5} - md5sum {snakemake.log.conda_info} > {snakemake.log.conda_info_md5} - - export TMPDIR=$(mktemp -d) - trap "rm -rf $TMPDIR" EXIT - - mkdir $TMPDIR/cwd - - i=0 - for x in $(cat {tmpf.name}); do - let "i=$i+1" - ln -s $(readlink -f $x) $TMPDIR/cwd/$i.bcf - ln -s $(readlink -f $x).csi $TMPDIR/cwd/$i.bcf.csi - done - - # --------------- - # Merge genotypes - # --------------- - # If a single sample, there is no need to merge. - # ``$i`` is reused from previous BCFs for-loop. - if [[ $i -eq 1 ]]; then - cp $TMPDIR/cwd/1.bcf {snakemake.output.bcf} - cp $TMPDIR/cwd/1.bcf.csi {snakemake.output.csi} - else - out=$(realpath {snakemake.output.bcf}) - pushd $TMPDIR/cwd - bcftools merge \ - -m id \ - -O b \ - -o $out \ - *.bcf - popd - tabix -f {snakemake.output.bcf} - fi - - pushd $(dirname {snakemake.output.bcf}) - md5sum $(basename {snakemake.output.bcf}) > $(basename {snakemake.output.bcf_md5}) - md5sum $(basename {snakemake.output.csi}) > $(basename {snakemake.output.csi_md5}) - """ - ) - - # Compute MD5 sums of logs - shell( - r""" - md5sum {snakemake.log.log} > {snakemake.log.log_md5} - """ - ) diff --git a/snappy_wrappers/wrappers/delly2/somatic/call/environment.yaml b/snappy_wrappers/wrappers/delly2/somatic/call/environment.yaml deleted file mode 120000 index 788259c04..000000000 --- a/snappy_wrappers/wrappers/delly2/somatic/call/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../../germline/environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/delly2/somatic/call/wrapper.py b/snappy_wrappers/wrappers/delly2/somatic/call/wrapper.py deleted file mode 100644 index c7adbc9ca..000000000 --- a/snappy_wrappers/wrappers/delly2/somatic/call/wrapper.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running Delly2's call step on tumor/matched-normal pairs""" - -from snakemake.shell import shell - -__author__ = "Nina Thiessen" -__email__ = "nina.thiessen@bih-charite.de" - -exclude_str = "" -s = snakemake.config["step_config"]["somatic_wgs_sv_calling"]["delly2"]["path_exclude_tsv"] -if s is not None: - exclude_str = "--exclude {}".format(s) - -shell( - r""" -# ----------------------------------------------------------------------------- -# Redirect stderr to log file by default and enable printing executed commands -exec &> >(tee -a "{snakemake.log}") -set -x -# ----------------------------------------------------------------------------- - -delly call \ - {exclude_str} \ - --map-qual 1 \ - --qual-tra 20 \ - --genome {snakemake.config[static_data_config][reference][path]} \ - --outfile {snakemake.output.bcf} \ - {snakemake.input.tumor_bam} \ - {snakemake.input.normal_bam} - -tabix -f {snakemake.output.bcf} - -pushd $(dirname {snakemake.output.bcf}) -md5sum $(basename {snakemake.output.bcf}) >$(basename {snakemake.output.bcf}).md5 -md5sum $(basename {snakemake.output.bcf}).csi >$(basename {snakemake.output.bcf}).csi.md5 -""" -) diff --git a/snappy_wrappers/wrappers/delly2/somatic/filter/environment.yaml b/snappy_wrappers/wrappers/delly2/somatic/filter/environment.yaml deleted file mode 120000 index 788259c04..000000000 --- a/snappy_wrappers/wrappers/delly2/somatic/filter/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../../germline/environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/delly2/somatic/filter/wrapper.py b/snappy_wrappers/wrappers/delly2/somatic/filter/wrapper.py deleted file mode 100644 index c5227551f..000000000 --- a/snappy_wrappers/wrappers/delly2/somatic/filter/wrapper.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running Delly2's "pre-filter" and "post-filter" steps""" - -from snakemake.shell import shell - -__author__ = "Nina Thiessen" -__email__ = "nina.thiessen@bih-charite.de" - -shell( - r""" -# ----------------------------------------------------------------------------- -# Redirect stderr to log file by default and enable printing executed commands -exec &> >(tee -a "{snakemake.log}") -set -x -# ----------------------------------------------------------------------------- - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -echo '{snakemake.params.membership}' \ -| tr ' ' '\n' \ ->$TMPDIR/samples.tsv - -delly filter \ - --filter somatic \ - --samples $TMPDIR/samples.tsv \ - --outfile {snakemake.output.bcf} \ - {snakemake.input.bcf} - -tabix -f {snakemake.output.bcf} - -pushd $(dirname {snakemake.output.bcf}) -md5sum $(basename {snakemake.output.bcf}) >$(basename {snakemake.output.bcf}).md5 -md5sum $(basename {snakemake.output.bcf}).csi >$(basename {snakemake.output.bcf}).csi.md5 -""" -) diff --git a/snappy_wrappers/wrappers/delly2/somatic/final_vcf/environment.yaml b/snappy_wrappers/wrappers/delly2/somatic/final_vcf/environment.yaml deleted file mode 120000 index 788259c04..000000000 --- a/snappy_wrappers/wrappers/delly2/somatic/final_vcf/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../../germline/environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/delly2/somatic/final_vcf/wrapper.py b/snappy_wrappers/wrappers/delly2/somatic/final_vcf/wrapper.py deleted file mode 100644 index dc005e772..000000000 --- a/snappy_wrappers/wrappers/delly2/somatic/final_vcf/wrapper.py +++ /dev/null @@ -1,28 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running Delly2's "final vcf" step""" - -from snakemake.shell import shell - -__author__ = "Nina Thiessen" -__email__ = "nina.thiessen@bih-charite.de" - -shell( - r""" -# ----------------------------------------------------------------------------- -# Redirect stderr to log file by default and enable printing executed commands -exec &> >(tee -a "{snakemake.log}") -set -x -# ----------------------------------------------------------------------------- - -bcftools view - -O z - --outfile {snakemake.output.vcf} \ - {snakemake.input.bcf} - -tabix -f {snakemake.output.vcf} - -pushd $(dirname {snakemake.output.vcf}) -md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf}).md5 -md5sum $(basename {snakemake.output.vcf}).tbi >$(basename {snakemake.output.vcf}).tbi.md5 -""" -) diff --git a/snappy_wrappers/wrappers/dkfz_bias_filter/environment.yaml b/snappy_wrappers/wrappers/dkfz_bias_filter/environment.yaml index 349838bf6..7fae65fc5 100644 --- a/snappy_wrappers/wrappers/dkfz_bias_filter/environment.yaml +++ b/snappy_wrappers/wrappers/dkfz_bias_filter/environment.yaml @@ -3,7 +3,6 @@ channels: - bioconda - nodefaults dependencies: - - htslib ==1.3.1 - - dkfz-bias-filter ==1.2.3a - # bcftools incompatible with dkfz-bias-filter in bioconda (2023-10-13) - # - bcftools +- htslib =1.17 +- dkfz-bias-filter ==1.2.3a +- bcftools =1.17 diff --git a/snappy_wrappers/wrappers/dkfz_bias_filter/wrapper.py b/snappy_wrappers/wrappers/dkfz_bias_filter/wrapper.py index 20dca9ef4..b97f05117 100644 --- a/snappy_wrappers/wrappers/dkfz_bias_filter/wrapper.py +++ b/snappy_wrappers/wrappers/dkfz_bias_filter/wrapper.py @@ -40,13 +40,11 @@ --writeQC \ {snakemake.input.vcf} \ {snakemake.input.bam} \ - {snakemake.config[static_data_config][reference][path]} \ + {snakemake.params.args[reference]} \ ${{out%.gz}} -# bcftools incompatible with dkfzbiasfilter.py in bioconda (2023-10-13) if [[ ! -s ${{out%.gz}} ]]; then - zgrep '^#' {snakemake.input.vcf} \ - # bcftools view --header-only {snakemake.input.vcf} \ + bcftools view --header-only {snakemake.input.vcf} \ > ${{out%.gz}} fi diff --git a/snappy_wrappers/wrappers/dupradar/environment.yaml b/snappy_wrappers/wrappers/dupradar/environment.yaml deleted file mode 100644 index b41bebd97..000000000 --- a/snappy_wrappers/wrappers/dupradar/environment.yaml +++ /dev/null @@ -1,8 +0,0 @@ -channels: - - conda-forge - - bioconda - - nodefaults -dependencies: - - r-base==4.3 - - bioconductor-dupradar==1.32.0 - - samtools==1.19.2 diff --git a/snappy_wrappers/wrappers/dupradar/wrapper.py b/snappy_wrappers/wrappers/dupradar/wrapper.py deleted file mode 100644 index 9593691ab..000000000 --- a/snappy_wrappers/wrappers/dupradar/wrapper.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running CopywriteR""" - -from snakemake import shell - -__author__ = "Clemens Messerschmidt " - -shell.executable("/bin/bash") - -shell( - r""" -set -x - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -# Also pipe stderr to log file -if [[ -n "{snakemake.log}" ]]; then - if [[ "$(set +e; tty; set -e)" != "" ]]; then - rm -f "{snakemake.log}" && mkdir -p $(dirname {snakemake.log}) - exec 2> >(tee -a "{snakemake.log}" >&2) - else - rm -f "{snakemake.log}" && mkdir -p $(dirname {snakemake.log}) - echo "No tty, logging disabled" >"{snakemake.log}" - fi -fi - -# ------------------------------------------------------------------------------------------------- -# Write helper script and call R -# -cat << __EOF > $TMPDIR/run_dupradar.R -library(dupRadar) - -args = commandArgs(trailingOnly=TRUE) - -file = args[1] -out = args[2] -gtf = args[3] -stranded = as.integer(args[4]) -paired = as.logical(args[5]) -threads = as.integer(args[6]) -outdir = args[7] -sample = basename(dirname(outdir)) - -dm = analyzeDuprates(file, gtf, stranded, paired, threads, tmpDir="$TMPDIR", autosort=FALSE) - -bitmap("{snakemake.output.densplot}", type='png16m', height=1000, width=1000, unit='px', taa=4, gaa=4) -duprateExpDensPlot(DupMat=dm, main=sample) -dev.off() - -bitmap("{snakemake.output.boxplot}", type='png16m', height=1000, width=1000, unit='px', taa=4, gaa=4) -par(mar=c(10, 4, 4, 2) + 0.1) -duprateExpBoxplot(DupMat=dm, main=sample) -dev.off() - -bitmap("{snakemake.output.plot}", type='png16m', height=1000, width=1000, unit='px', taa=4, gaa=4) -duprateExpPlot(DupMat=dm, main=sample) -dev.off() - -bitmap("{snakemake.output.hist}", type='png16m', height=1000, width=1000, unit='px', taa=4, gaa=4) -expressionHist(dm) -dev.off() - -write.table(dm, file=out, sep="\t", col.names=TRUE, row.names=FALSE, quote=FALSE) -__EOF - -out_dir=$(readlink -f $(dirname {snakemake.output.tsv})) - -# Find if the bam file contains paired reads -n_pair=$(samtools view -f 0x1 {snakemake.input.bam} | head -n 1000 | wc -l || true) -if [[ $n_pair -eq 0 ]]; then - paired="FALSE" -else - paired="TRUE" -fi - -Rscript --vanilla $TMPDIR/run_dupradar.R \ - "{snakemake.input.bam}" \ - "{snakemake.output.tsv}" \ - {snakemake.config[step_config][gene_expression_quantification][dupradar][path_annotation_gtf]} \ - {snakemake.config[step_config][gene_expression_quantification][dupradar][strandedness]} \ - $paired \ - {snakemake.config[step_config][gene_expression_quantification][dupradar][num_threads]} \ - $out_dir - -for f in $(ls $out_dir); do - md5sum $out_dir/$f > $out_dir/$f.md5 -done -""" -) diff --git a/snappy_wrappers/wrappers/eb_filter/wrapper.py b/snappy_wrappers/wrappers/eb_filter/wrapper.py index ad1a57b94..d74a7e8c3 100644 --- a/snappy_wrappers/wrappers/eb_filter/wrapper.py +++ b/snappy_wrappers/wrappers/eb_filter/wrapper.py @@ -27,7 +27,7 @@ export TMPDIR=$(mktemp -d) trap "rm -rf $TMPDIR" EXIT -export REF={snakemake.config[static_data_config][reference][path]} +export REF={snakemake.params.args[reference]} # Also pipe stderr to log file if [[ -n "{snakemake.log.log}" ]]; then diff --git a/snappy_wrappers/wrappers/expansionhunter/wrapper.py b/snappy_wrappers/wrappers/expansionhunter/wrapper.py index 9ae21deb4..8008342b7 100644 --- a/snappy_wrappers/wrappers/expansionhunter/wrapper.py +++ b/snappy_wrappers/wrappers/expansionhunter/wrapper.py @@ -45,9 +45,9 @@ mkdir -p $(dirname {snakemake.output.json}) # Call tool -ExpansionHunter --reads {snakemake.input} \ - --reference {snakemake.config[static_data_config][reference][path]} \ - --variant-catalog {snakemake.config[step_config][repeat_expansion][repeat_catalog]} \ +ExpansionHunter --reads {snakemake.input.bam} \ + --reference {snakemake.input.reference} \ + --variant-catalog {snakemake.input.repeat_catalog} \ --output-prefix {prefix} {sex_argument} """ ) diff --git a/snappy_wrappers/wrappers/fastp/run/wrapper.py b/snappy_wrappers/wrappers/fastp/run/wrapper.py index e65af5261..a005bef7e 100644 --- a/snappy_wrappers/wrappers/fastp/run/wrapper.py +++ b/snappy_wrappers/wrappers/fastp/run/wrapper.py @@ -39,8 +39,7 @@ this_file = __file__ -this_step = snakemake.config["pipeline_step"]["name"] -config = snakemake.config["step_config"][this_step]["fastp"] +config = snakemake.params.args["config"] shell( r""" diff --git a/snappy_wrappers/wrappers/featurecounts/wrapper.py b/snappy_wrappers/wrappers/featurecounts/wrapper.py index 2b66d0053..3df642193 100644 --- a/snappy_wrappers/wrappers/featurecounts/wrapper.py +++ b/snappy_wrappers/wrappers/featurecounts/wrapper.py @@ -7,6 +7,8 @@ shell.executable("/bin/bash") +args = getattr(snakemake.params, args, {}) + shell( r""" set -euo pipefail @@ -27,7 +29,7 @@ fi fi -strand={snakemake.config[step_config][gene_expression_quantification][strand]} +strand={args[strand]} if [ ${{strand}} -eq -1 ] then @@ -45,7 +47,7 @@ -T 2 \ -g gene_id \ -t exon \ - -a {snakemake.config[step_config][gene_expression_quantification][featurecounts][path_annotation_gtf]} \ + -a {args[path_annotation_gtf]} \ -s ${{strand}} \ -p \ --verbose \ diff --git a/snappy_wrappers/wrappers/flag_oxog_artifacts/environment.yaml b/snappy_wrappers/wrappers/flag_oxog_artifacts/environment.yaml deleted file mode 100644 index 57cafdb25..000000000 --- a/snappy_wrappers/wrappers/flag_oxog_artifacts/environment.yaml +++ /dev/null @@ -1,8 +0,0 @@ -channels: - - conda-forge - - bioconda - - nodefaults -dependencies: - - bedtools==2.31.1 - - bcftools==1.19 - - htslib==1.19.1 diff --git a/snappy_wrappers/wrappers/flag_oxog_artifacts/wrapper.py b/snappy_wrappers/wrappers/flag_oxog_artifacts/wrapper.py deleted file mode 100644 index 29a0e1fbc..000000000 --- a/snappy_wrappers/wrappers/flag_oxog_artifacts/wrapper.py +++ /dev/null @@ -1,92 +0,0 @@ -# -*- coding: utf-8 -*- -"""CUBI+Snakemake wrapper code for additional flagging. - -TODO: rename to reflect that mroe is done than just flagging the oxog artifacts. -""" - -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -shell( - r""" -set -x - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -# Also pipe stderr to log file -if [[ -n "{snakemake.log}" ]]; then - if [[ "$(set +e; tty; set -e)" != "" ]]; then - rm -f "{snakemake.log}" && mkdir -p $(dirname {snakemake.log}) - exec 2> >(tee -a "{snakemake.log}" >&2) - else - rm -f "{snakemake.log}" && mkdir -p $(dirname {snakemake.log}) - echo "No tty, logging disabled" >"{snakemake.log}" - fi -fi - -# Extract genotype calls of the somatic DNA library and pipe into the -# actual filtration command. We simply build a BED file which we then use -# below for flagging. -bcftools view \ - -s "{snakemake.wildcards.tumor_library}" \ - -O u \ - -i ' - (AF[0] <= 0.08 || AD[1] < 5) && - ((REF == "G" && ALT ~ "T") || (REF = "C" && ALT ~ "A"))' \ - {snakemake.input.vcf} \ -| bcftools query \ - -i 'GT ~ "1"' \ - -f "%CHROM\t%POS0\t%END\t1\n" \ -| bgzip -c \ -> $TMPDIR/oxog_positions.bed.gz -tabix -f $TMPDIR/oxog_positions.bed.gz - -# Create BED file with maximal alternate allele depth of normal sample. -bcftools query -f "%CHROM\t%POS0\t%END\t[%AD\t]\n" {snakemake.input.vcf} \ -| awk -F $'\t' ' - BEGIN {{ - OFS = FS; - }} - {{ - split($5, arr, ","); - n = 0; - for (i = 2; i <= length(arr); ++i) {{ - if (arr[i] > n) {{ - n = arr[i]; - }} - }} - print $1, $2, $3, n; -}}' \ -| bgzip -c \ -> $TMPDIR/alt_mdaa.bed.gz -tabix -f $TMPDIR/alt_mdaa.bed.gz - -# Build header lines to add -cat <<"EOF" >$TMPDIR/header.txt -##FORMAT= -##INFO= -EOF - -# Flag the input VCF file with the BED files we built earlier. -bcftools annotate \ - -s "{snakemake.wildcards.tumor_library}" \ - --header-lines $TMPDIR/header.txt \ - --annotations $TMPDIR/oxog_positions.bed.gz \ - --columns "-CHROM,-FROM,-TO,FMT/CUBI_OXOG" \ - -O u \ - {snakemake.input.vcf} \ -| bcftools annotate \ - --annotations $TMPDIR/alt_mdaa.bed.gz \ - --columns "-CHROM,-FROM,-TO,CUBI_NMDAA" \ - -O z \ - -o {snakemake.output.vcf} -tabix -f {snakemake.output.vcf} - -pushd $(dirname {snakemake.output.vcf}) && \ - md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf}).md5 && \ - md5sum $(basename {snakemake.output.vcf_tbi}) >$(basename {snakemake.output.vcf_tbi}).md5 && \ - popd -""" -) diff --git a/snappy_wrappers/wrappers/gatk3_hc/wrapper.py b/snappy_wrappers/wrappers/gatk3_hc/wrapper.py index 851c8b276..f775cb411 100644 --- a/snappy_wrappers/wrappers/gatk3_hc/wrapper.py +++ b/snappy_wrappers/wrappers/gatk3_hc/wrapper.py @@ -15,6 +15,8 @@ } """ +args = getattr(snakemake.params, "args", {}) + shell( r""" set -x @@ -54,13 +56,13 @@ # Create binning of the reference into windows of roughly the same size. gatk PreprocessIntervals \ - --reference {snakemake.config[static_data_config][reference][path]} \ - --bin-length {snakemake.config[step_config][variant_calling][gatk3_hc][window_length]} \ + --reference {args[reference]} \ + --bin-length args[window_length]} \ --output $TMPDIR/raw.interval_list \ --interval-merging-rule OVERLAPPING_ONLY \ - $(for ignore_chrom in {snakemake.config[step_config][variant_calling][ignore_chroms]}; do \ + $(for ignore_chrom in {args[ignore_chroms]}; do \ awk "(\$1 ~ /$ignore_chrom/) {{ printf(\"--exclude-intervals %s:1-%d\\n\", \$1, \$2) }}" \ - {snakemake.config[static_data_config][reference][path]}.fai; \ + {args[reference]}.fai; \ done) # Postprocess the Picard-style interval list into properly padded interval strings suitable for @@ -93,13 +95,13 @@ GATK_JAVA_MEMORY=3750m gatk3 -Xmx$GATK_JAVA_MEMORY -Djava.io.tmpdir=$TMPDIR \ --analysis_type HaplotypeCaller \ - $(if [[ {snakemake.config[step_config][variant_calling][gatk3_hc][allow_seq_dict_incompatibility]} == "True" ]]; then \ + $(if [[ {args[allow_seq_dict_incompatibility]} == "True" ]]; then \ echo --disable-sequence-dictionary-validation true; \ fi) \ --out $TMPDIR/shards-output/$(printf %06d $job_no).vcf.gz \ - --reference_sequence {snakemake.config[static_data_config][reference][path]} \ + --reference_sequence {args[reference]} \ --sample_ploidy 2 \ - --dbsnp {snakemake.config[static_data_config][dbsnp][path]} \ + --dbsnp {args[dbsnp]} \ --intervals $interval \ $(for path in {snakemake.input.bam}; do \ echo --input_file $path; \ @@ -109,7 +111,7 @@ # Perform parallel execution (set -x; sleep $(echo "scale=3; $RANDOM/32767*10" | bc)s) # sleep up to 10s to work around bug -num_threads={snakemake.config[step_config][variant_calling][gatk3_hc][num_threads]} +num_threads={args[num_threads]} cat $TMPDIR/final_intervals.txt \ | parallel --plain -j $num_threads 'run-shard {{#}} {{}}' @@ -125,7 +127,7 @@ /dev/stdin \ | bcftools norm \ -d exact \ - -f {snakemake.config[static_data_config][reference][path]} \ + -f {args[reference]} \ -O z \ -o {snakemake.output.vcf} tabix {snakemake.output.vcf} diff --git a/snappy_wrappers/wrappers/gatk3_ug/wrapper.py b/snappy_wrappers/wrappers/gatk3_ug/wrapper.py index 44e69d995..08a821ec0 100644 --- a/snappy_wrappers/wrappers/gatk3_ug/wrapper.py +++ b/snappy_wrappers/wrappers/gatk3_ug/wrapper.py @@ -15,6 +15,8 @@ } """ +args = getattr(snakemake.params, "args", {}) + shell( r""" set -x @@ -54,13 +56,13 @@ # Create binning of the reference into windows of roughly the same size. gatk PreprocessIntervals \ - --reference {snakemake.config[static_data_config][reference][path]} \ + --reference {args[reference]} \ --bin-length {snakemake.config[step_config][variant_calling][gatk4_hc_joint][window_length]} \ --output $TMPDIR/raw.interval_list \ --interval-merging-rule OVERLAPPING_ONLY \ - $(for ignore_chrom in {snakemake.config[step_config][variant_calling][ignore_chroms]}; do \ + $(for ignore_chrom in {args[ignore_chroms]}; do \ awk "(\$1 ~ /$ignore_chrom/) {{ printf(\"--exclude-intervals %s:1-%d\\n\", \$1, \$2) }}" \ - {snakemake.config[static_data_config][reference][path]}.fai; \ + {args[reference]}.fai; \ done) # Postprocess the Picard-style interval list into properly padded interval strings suitable for @@ -94,15 +96,15 @@ gatk3 -Xmx$GATK_JAVA_MEMORY -Djava.io.tmpdir=$TMPDIR \ --analysis_type UnifiedGenotyper \ --out $TMPDIR/shards-output/$(printf %06d $job_no).vcf.gz \ - --reference_sequence {snakemake.config[static_data_config][reference][path]} \ + --reference_sequence {args[reference]} \ --sample_ploidy 2 \ - --dbsnp {snakemake.config[static_data_config][dbsnp][path]} \ - --downsample_to_coverage {snakemake.config[step_config][variant_calling][gatk3_ug][downsample_to_coverage]} \ + --dbsnp {args[dbsnp]} \ + --downsample_to_coverage {args[downsample_to_coverage]} \ --intervals $interval \ $(for path in {snakemake.input.bam}; do \ echo --input_file $path; \ done) \ - $(if [[ {snakemake.config[step_config][variant_calling][gatk3_ug][allow_seq_dict_incompatibility]} == "True" ]]; then \ + $(if [[ {args[allow_seq_dict_incompatibility]} == "True" ]]; then \ echo --disable-sequence-dictionary-validation true; \ fi) }} @@ -110,7 +112,7 @@ # Perform parallel execution (set -x; sleep $(echo "scale=3; $RANDOM/32767*10" | bc)s) # sleep up to 10s to work around bug -num_threads={snakemake.config[step_config][variant_calling][gatk3_ug][num_threads]} +num_threads={args[gatk3_ug][num_threads]} cat $TMPDIR/final_intervals.txt \ | parallel --plain -j $num_threads 'run-shard {{#}} {{}}' @@ -126,7 +128,7 @@ /dev/stdin \ | bcftools norm \ -d exact \ - -f {snakemake.config[static_data_config][reference][path]} \ + -f {args[reference]} \ -O z \ -o {snakemake.output.vcf} tabix {snakemake.output.vcf} diff --git a/snappy_wrappers/wrappers/gatk4_hc/combine_gvcfs/wrapper.py b/snappy_wrappers/wrappers/gatk4_hc/combine_gvcfs/wrapper.py index bd988fce3..348c97fe2 100644 --- a/snappy_wrappers/wrappers/gatk4_hc/combine_gvcfs/wrapper.py +++ b/snappy_wrappers/wrappers/gatk4_hc/combine_gvcfs/wrapper.py @@ -15,6 +15,8 @@ } """ +args = getattr(snakemake.params, "args", {}) + shell( r""" set -x @@ -54,13 +56,13 @@ # Create binning of the reference into windows of roughly the same size. gatk PreprocessIntervals \ - --reference {snakemake.config[static_data_config][reference][path]} \ - --bin-length {snakemake.config[step_config][variant_calling][gatk4_hc_gvcf][window_length]} \ + --reference {args[reference]} \ + --bin-length {args[window_length]} \ --output $TMPDIR/raw.interval_list \ --interval-merging-rule OVERLAPPING_ONLY \ - $(for ignore_chrom in {snakemake.config[step_config][variant_calling][ignore_chroms]}; do \ + $(for ignore_chrom in {args[ignore_chroms]}; do \ awk "(\$1 ~ /$ignore_chrom/) {{ printf(\"--exclude-intervals %s:1-%d\\n\", \$1, \$2) }}" \ - {snakemake.config[static_data_config][reference][path]}.fai; \ + {args[reference]}.fai; \ done) # Postprocess the Picard-style interval list into properly padded interval strings suitable for @@ -95,9 +97,9 @@ CombineGVCFs \ --java-options "-Xmx$GATK_JAVA_MEMORY -Djava.io.tmpdir=$TMPDIR" \ --tmp-dir $TMPDIR \ - --reference {snakemake.config[static_data_config][reference][path]} \ + --reference {args[reference]} \ --output $TMPDIR/shards-output/$(printf %06d $job_no).g.vcf.gz \ - --break-bands-at-multiples-of {snakemake.config[step_config][variant_calling][gatk4_hc_gvcf][window_length]} \ + --break-bands-at-multiples-of {args[window_length]} \ --intervals $interval \ -G StandardAnnotation \ -G AS_StandardAnnotation \ @@ -111,7 +113,7 @@ # Perform parallel execution (set -x; sleep $(echo "scale=3; $RANDOM/32767*10" | bc)s) # sleep up to 10s to work around bug -num_threads={snakemake.config[step_config][variant_calling][gatk4_hc_gvcf][num_threads]} +num_threads={args[num_threads]} cat $TMPDIR/final_intervals.txt \ | parallel --plain -j $num_threads 'run-shard {{#}} {{}}' @@ -128,7 +130,7 @@ | bcftools norm \ -d exact \ -c ws \ - -f {snakemake.config[static_data_config][reference][path]} \ + -f {args[reference]} \ -O z \ -o {snakemake.output.gvcf} tabix {snakemake.output.gvcf} diff --git a/snappy_wrappers/wrappers/gatk4_hc/discover/wrapper.py b/snappy_wrappers/wrappers/gatk4_hc/discover/wrapper.py index 55093b145..60a519f53 100644 --- a/snappy_wrappers/wrappers/gatk4_hc/discover/wrapper.py +++ b/snappy_wrappers/wrappers/gatk4_hc/discover/wrapper.py @@ -15,6 +15,8 @@ } """ +args = getattr(snakemake.params, "args", {}) + shell( r""" set -x @@ -54,13 +56,13 @@ # Create binning of the reference into windows of roughly the same size. gatk PreprocessIntervals \ - --reference {snakemake.config[static_data_config][reference][path]} \ - --bin-length {snakemake.config[step_config][variant_calling][gatk4_hc_gvcf][window_length]} \ + --reference {args[reference]} \ + --bin-length {args[window_length]} \ --output $TMPDIR/raw.interval_list \ --interval-merging-rule OVERLAPPING_ONLY \ - $(for ignore_chrom in {snakemake.config[step_config][variant_calling][ignore_chroms]}; do \ + $(for ignore_chrom in {args[ignore_chroms]}; do \ awk "(\$1 ~ /$ignore_chrom/) {{ printf(\"--exclude-intervals %s:1-%d\\n\", \$1, \$2) }}" \ - {snakemake.config[static_data_config][reference][path]}.fai; \ + {args[reference]}.fai; \ done) # Postprocess the Picard-style interval list into properly padded interval strings suitable for @@ -97,15 +99,15 @@ --java-options "-Xmx$GATK_JAVA_MEMORY -Djava.io.tmpdir=$TMPDIR" \ --tmp-dir $TMPDIR \ --output $TMPDIR/shards-output/$(printf %06d $job_no).g.vcf.gz \ - --reference {snakemake.config[static_data_config][reference][path]} \ - --dbsnp {snakemake.config[static_data_config][dbsnp][path]} \ + --reference {args[reference]} \ + --dbsnp {args[dbsnp]} \ --intervals $interval \ --input {snakemake.input.bam} \ -G StandardAnnotation \ -G AS_StandardAnnotation \ -G StandardHCAnnotation \ -G AlleleSpecificAnnotation \ - $(if [[ {snakemake.config[step_config][variant_calling][gatk4_hc_gvcf][allow_seq_dict_incompatibility]} == "True" ]]; then \ + $(if [[ {args][allow_seq_dict_incompatibility]} == "True" ]]; then \ echo --disable-sequence-dictionary-validation true; \ fi) \ -ERC GVCF @@ -114,7 +116,7 @@ # Perform parallel execution (set -x; sleep $(echo "scale=3; $RANDOM/32767*10" | bc)s) # sleep up to 10s to work around bug -num_threads={snakemake.config[step_config][variant_calling][gatk4_hc_gvcf][num_threads]} +num_threads={args[num_threads]} cat $TMPDIR/final_intervals.txt \ | parallel --plain -j $num_threads 'run-shard {{#}} {{}}' @@ -130,7 +132,7 @@ /dev/stdin \ | bcftools norm \ -d exact \ - -f {snakemake.config[static_data_config][reference][path]} \ + -f {args[reference]} \ -O z \ -o {snakemake.output.gvcf} tabix {snakemake.output.gvcf} diff --git a/snappy_wrappers/wrappers/gatk4_hc/genotype/wrapper.py b/snappy_wrappers/wrappers/gatk4_hc/genotype/wrapper.py index ad85c7907..d522966d1 100644 --- a/snappy_wrappers/wrappers/gatk4_hc/genotype/wrapper.py +++ b/snappy_wrappers/wrappers/gatk4_hc/genotype/wrapper.py @@ -15,6 +15,8 @@ } """ +args = getattr(snakemake.params, "args", {}) + shell( r""" set -x @@ -54,13 +56,13 @@ # Create binning of the reference into windows of roughly the same size. gatk PreprocessIntervals \ - --reference {snakemake.config[static_data_config][reference][path]} \ - --bin-length {snakemake.config[step_config][variant_calling][gatk4_hc_gvcf][window_length]} \ + --reference {args[reference]} \ + --bin-length {args[window_length]} \ --output $TMPDIR/raw.interval_list \ --interval-merging-rule OVERLAPPING_ONLY \ - $(for ignore_chrom in {snakemake.config[step_config][variant_calling][ignore_chroms]}; do \ + $(for ignore_chrom in {args[ignore_chroms]}; do \ awk "(\$1 ~ /$ignore_chrom/) {{ printf(\"--exclude-intervals %s:1-%d\\n\", \$1, \$2) }}" \ - {snakemake.config[static_data_config][reference][path]}.fai; \ + {args[reference]}.fai; \ done) # Postprocess the Picard-style interval list into properly padded interval strings suitable for @@ -95,7 +97,7 @@ GenotypeGVCFs \ --java-options "-Xmx$GATK_JAVA_MEMORY -Djava.io.tmpdir=$TMPDIR" \ --tmp-dir $TMPDIR \ - --reference {snakemake.config[static_data_config][reference][path]} \ + --reference {args[reference]} \ --output $TMPDIR/shards-output/$(printf %06d $job_no).g.vcf.gz \ --intervals $interval \ -G StandardAnnotation \ @@ -107,7 +109,7 @@ # Perform parallel execution (set -x; sleep $(echo "scale=3; $RANDOM/32767*10" | bc)s) # sleep up to 10s to work around bug -num_threads={snakemake.config[step_config][variant_calling][gatk4_hc_gvcf][num_threads]} +num_threads={args[num_threads]} cat $TMPDIR/final_intervals.txt \ | parallel --plain -j $num_threads 'run-shard {{#}} {{}}' @@ -123,7 +125,7 @@ /dev/stdin \ | bcftools norm \ -d exact \ - -f {snakemake.config[static_data_config][reference][path]} \ + -f {args[reference]} \ -O z \ -o {snakemake.output.vcf} tabix {snakemake.output.vcf} diff --git a/snappy_wrappers/wrappers/gatk4_hc/joint/wrapper.py b/snappy_wrappers/wrappers/gatk4_hc/joint/wrapper.py index 1c2cbb203..01c3adb34 100644 --- a/snappy_wrappers/wrappers/gatk4_hc/joint/wrapper.py +++ b/snappy_wrappers/wrappers/gatk4_hc/joint/wrapper.py @@ -15,6 +15,8 @@ } """ +args = getattr(snakemake.params, "args", {}) + shell( r""" set -x @@ -54,13 +56,13 @@ # Create binning of the reference into windows of roughly the same size. gatk PreprocessIntervals \ - --reference {snakemake.config[static_data_config][reference][path]} \ - --bin-length {snakemake.config[step_config][variant_calling][gatk4_hc_joint][window_length]} \ + --reference {args[reference]} \ + --bin-length {args[window_length]} \ --output $TMPDIR/raw.interval_list \ --interval-merging-rule OVERLAPPING_ONLY \ - $(for ignore_chrom in {snakemake.config[step_config][variant_calling][ignore_chroms]}; do \ + $(for ignore_chrom in {args[ignore_chroms]}; do \ awk "(\$1 ~ /$ignore_chrom/) {{ printf(\"--exclude-intervals %s:1-%d\\n\", \$1, \$2) }}" \ - {snakemake.config[static_data_config][reference][path]}.fai; \ + {args[reference]}.fai; \ done) # Postprocess the Picard-style interval list into properly padded interval strings suitable for @@ -96,15 +98,15 @@ --java-options "-Xmx$GATK_JAVA_MEMORY -Djava.io.tmpdir=$TMPDIR" \ --tmp-dir $TMPDIR \ --output $TMPDIR/shards-output/$(printf %06d $job_no).vcf.gz \ - --reference {snakemake.config[static_data_config][reference][path]} \ - --dbsnp {snakemake.config[static_data_config][dbsnp][path]} \ + --reference {args[reference]} \ + --dbsnp {args[dbsnp]} \ -G StandardAnnotation \ -G StandardHCAnnotation \ --intervals $interval \ $(for path in {snakemake.input.bam}; do \ echo --input $path; \ done) \ - $(if [[ {snakemake.config[step_config][variant_calling][gatk4_hc_joint][allow_seq_dict_incompatibility]} == "True" ]]; then \ + $(if [[ {args[allow_seq_dict_incompatibility]} == "True" ]]; then \ echo --disable-sequence-dictionary-validation true; \ fi) }} @@ -112,7 +114,7 @@ # Perform parallel execution (set -x; sleep $(echo "scale=3; $RANDOM/32767*10" | bc)s) # sleep up to 10s to work around bug -num_threads={snakemake.config[step_config][variant_calling][gatk4_hc_joint][num_threads]} +num_threads={args[num_threads]} cat $TMPDIR/final_intervals.txt \ | parallel -j $num_threads 'run-shard {{#}} {{}}' @@ -128,7 +130,7 @@ /dev/stdin \ | bcftools norm \ -d exact \ - -f {snakemake.config[static_data_config][reference][path]} \ + -f {args[reference]} \ -O z \ -o {snakemake.output.vcf} tabix {snakemake.output.vcf} diff --git a/snappy_wrappers/wrappers/gatk_phase_by_transmission/wrapper.py b/snappy_wrappers/wrappers/gatk_phase_by_transmission/wrapper.py index 11f2d03ac..e63eb37df 100644 --- a/snappy_wrappers/wrappers/gatk_phase_by_transmission/wrapper.py +++ b/snappy_wrappers/wrappers/gatk_phase_by_transmission/wrapper.py @@ -7,6 +7,8 @@ shell.prefix("set -euo pipefail; ") +args = getattr(snakemake.params, "args", {}) + shell( r""" set -x @@ -49,11 +51,11 @@ -nct 1 \ --pedigreeValidationType SILENT \ --FatherAlleleFirst \ - --DeNovoPrior {snakemake.config[step_config][variant_phasing][gatk_phase_by_transmission][de_novo_prior]} \ + --DeNovoPrior {args[de_novo_prior]} \ --pedigree {snakemake.input.ped} \ --variant $TMPDIR/trio_only.vcf.gz \ --out {snakemake.output.vcf} \ - --reference_sequence {snakemake.config[static_data_config][reference][path]} + --reference_sequence {args[reference]} tabix -f {snakemake.output.vcf} diff --git a/snappy_wrappers/wrappers/gcnv/annotate_gc/wrapper.py b/snappy_wrappers/wrappers/gcnv/annotate_gc/wrapper.py index 539d35b9b..c24bcb156 100644 --- a/snappy_wrappers/wrappers/gcnv/annotate_gc/wrapper.py +++ b/snappy_wrappers/wrappers/gcnv/annotate_gc/wrapper.py @@ -5,19 +5,18 @@ from snakemake.shell import shell +args = getattr(snakemake.params, "args", {}) + # Pick the target BED file to use. # FIXME: why is 'target_interval_bed' not used? -config = DictQuery(snakemake.config).get("step_config/helper_gcnv_model_targeted/gcnv") -for item in config["path_target_interval_list_mapping"]: +for item in args["path_target_interval_list_mapping"]: if item["name"] == snakemake.wildcards.library_kit: target_interval_bed = item["path"] break else: # of for, did not break out raise Exception("Found no target intervals for %s" % item["name"]) -map_bed = DictQuery(snakemake.config).get( - "step_config/helper_gcnv_model_targeted/gcnv/path_uniquely_mapable_bed" -) +map_bed = args["path_uniquely_mapable_bed"] shell( r""" @@ -26,7 +25,7 @@ gatk AnnotateIntervals \ --interval-merging-rule OVERLAPPING_ONLY \ --mappability-track {map_bed} \ - --reference {snakemake.config[static_data_config][reference][path]} \ + --reference {args[reference]} \ --intervals {snakemake.input.interval_list} \ --output {snakemake.output.tsv} """ diff --git a/snappy_wrappers/wrappers/gcnv/annotate_gc_wgs/wrapper.py b/snappy_wrappers/wrappers/gcnv/annotate_gc_wgs/wrapper.py index ec9482886..3ad8d0759 100644 --- a/snappy_wrappers/wrappers/gcnv/annotate_gc_wgs/wrapper.py +++ b/snappy_wrappers/wrappers/gcnv/annotate_gc_wgs/wrapper.py @@ -11,13 +11,13 @@ base_dir = os.path.normpath(os.path.join(os.path.dirname(__file__), "..", "..", "..", "..")) sys.path.insert(0, base_dir) +args = getattr(snakemake.params, "args", {}) + from snappy_pipeline.utils import DictQuery # Although optional for the tool, GATK recommend a providing a mappability track -map_bed = DictQuery(snakemake.config).get( - "step_config/helper_gcnv_model_wgs/gcnv/path_uniquely_mapable_bed" -) +map_bed = args["path_uniquely_mapable_bed"] shell( @@ -27,7 +27,7 @@ gatk AnnotateIntervals \ --interval-merging-rule OVERLAPPING_ONLY \ --mappability-track {map_bed} \ - --reference {snakemake.config[static_data_config][reference][path]} \ + --reference {args[reference]} \ --intervals {snakemake.input.interval_list} \ --output {snakemake.output.tsv} """ diff --git a/snappy_wrappers/wrappers/gcnv/contig_ploidy/wrapper.py b/snappy_wrappers/wrappers/gcnv/contig_ploidy/wrapper.py index 0973a76e6..db92eee83 100644 --- a/snappy_wrappers/wrappers/gcnv/contig_ploidy/wrapper.py +++ b/snappy_wrappers/wrappers/gcnv/contig_ploidy/wrapper.py @@ -8,6 +8,8 @@ # # cf. https://github.com/broadinstitute/gatk/issues/8164 +args = getattr(snakemake.params, "args", {}) + out_path = pathlib.Path(snakemake.output.done).parent MALE = "male" @@ -26,9 +28,7 @@ paths_tsv = " ".join(snakemake.input.tsv) # Add interval block list for PAR regions if configured. -par_intervals = snakemake.config["step_config"][snakemake.params.step_key]["gcnv"].get( - "path_par_intervals" -) +par_intervals = args["path_par_intervals"] if par_intervals: par_args = f"-XL {par_intervals}" else: diff --git a/snappy_wrappers/wrappers/gcnv/coverage/wrapper.py b/snappy_wrappers/wrappers/gcnv/coverage/wrapper.py index 7fc020196..c578cecf0 100644 --- a/snappy_wrappers/wrappers/gcnv/coverage/wrapper.py +++ b/snappy_wrappers/wrappers/gcnv/coverage/wrapper.py @@ -2,13 +2,15 @@ from snakemake.shell import shell +args = getattr(snakemake.params, "args", {}) + shell( r""" set -x gatk CollectReadCounts \ --interval-merging-rule OVERLAPPING_ONLY \ - -R {snakemake.config[static_data_config][reference][path]} \ + -R {args[reference]} \ -L {snakemake.input.interval_list} \ -I {snakemake.input.bam} \ --format TSV \ diff --git a/snappy_wrappers/wrappers/gcnv/joint_germline_cnv_segmentation/wrapper.py b/snappy_wrappers/wrappers/gcnv/joint_germline_cnv_segmentation/wrapper.py index 0b0a20e64..11f2c14cf 100644 --- a/snappy_wrappers/wrappers/gcnv/joint_germline_cnv_segmentation/wrapper.py +++ b/snappy_wrappers/wrappers/gcnv/joint_germline_cnv_segmentation/wrapper.py @@ -2,6 +2,8 @@ __author__ = "Manuel Holtgrewe " +args = getattr(snakemake.params, "args", {}) + DEF_HELPER_FUNCS = r""" compute-md5() { @@ -53,7 +55,7 @@ # Run actual tools -------------------------------------------------------------------------------- gatk JointGermlineCNVSegmentation \ - --reference {snakemake.config[static_data_config][reference][path]} \ + --reference {args[reference]} \ $(for vcf in {snakemake.input.vcf}; do echo --variant $vcf; done) \ --model-call-intervals {snakemake.input.interval_list} \ --pedigree {snakemake.input.ped} \ diff --git a/snappy_wrappers/wrappers/gcnv/preprocess_intervals/wrapper.py b/snappy_wrappers/wrappers/gcnv/preprocess_intervals/wrapper.py index 8e3a3b4fa..8145e9cd9 100644 --- a/snappy_wrappers/wrappers/gcnv/preprocess_intervals/wrapper.py +++ b/snappy_wrappers/wrappers/gcnv/preprocess_intervals/wrapper.py @@ -3,10 +3,10 @@ from snakemake.shell import shell +args = getattr(snakemake.params, "args", {}) # Pick the target BED file to use. -config = snakemake.config["step_config"][snakemake.params.step_key]["gcnv"] -for item in config["path_target_interval_list_mapping"]: +for item in args["path_target_interval_list_mapping"]: if item["name"] == snakemake.wildcards.library_kit: target_interval_bed = item["path"] break @@ -20,7 +20,7 @@ gatk PreprocessIntervals \ --bin-length 0 \ --interval-merging-rule OVERLAPPING_ONLY \ - -R {snakemake.config[static_data_config][reference][path]} \ + -R {args[reference]} \ -L {target_interval_bed} \ -O {snakemake.output.interval_list} """ diff --git a/snappy_wrappers/wrappers/gcnv/preprocess_intervals_wgs/wrapper.py b/snappy_wrappers/wrappers/gcnv/preprocess_intervals_wgs/wrapper.py index 7d1f58501..3388a5b89 100644 --- a/snappy_wrappers/wrappers/gcnv/preprocess_intervals_wgs/wrapper.py +++ b/snappy_wrappers/wrappers/gcnv/preprocess_intervals_wgs/wrapper.py @@ -2,6 +2,8 @@ from snakemake.shell import shell +args = getattr(snakemake.params, "args", {}) + shell( r""" set -x @@ -9,7 +11,7 @@ gatk PreprocessIntervals \ --padding 0 \ --interval-merging-rule OVERLAPPING_ONLY \ - --reference {snakemake.config[static_data_config][reference][path]} \ + --reference {args[reference]} \ --output {snakemake.output.interval_list} """ ) diff --git a/snappy_wrappers/wrappers/hts_screen/__init__.py b/snappy_wrappers/wrappers/hts_screen/__init__.py deleted file mode 100644 index b4490f161..000000000 --- a/snappy_wrappers/wrappers/hts_screen/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# -*- coding: utf-8 -*- -"""CUBI+Snakemake wrapper for Kraken""" diff --git a/snappy_wrappers/wrappers/hts_screen/environment.yaml b/snappy_wrappers/wrappers/hts_screen/environment.yaml deleted file mode 100644 index 8ebc35905..000000000 --- a/snappy_wrappers/wrappers/hts_screen/environment.yaml +++ /dev/null @@ -1,9 +0,0 @@ -channels: - - conda-forge - - bioconda - - nodefaults -dependencies: - - bwa==0.7.17 - - seqtk==1.4 - - kraken==1.1.1 - - gawk==5.3.0 diff --git a/snappy_wrappers/wrappers/hts_screen/match_vector_to_report.py b/snappy_wrappers/wrappers/hts_screen/match_vector_to_report.py deleted file mode 100644 index ca426500c..000000000 --- a/snappy_wrappers/wrappers/hts_screen/match_vector_to_report.py +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env python3 -"""Convert match_vector.tsv file from snappy-hts_screen.sh""" - -import argparse -import sys - -__author__ = "Manuel Holtgrewe " - -KEYS = ["unmapped", "one_one", "one_multi", "multi_one", "multi_multi"] - - -def init_counts(genomes): - return [dict((key, 0) for key in KEYS) for g in genomes] - - -def run(args): - counts = [] - num_reads = 0 - num_reads_no_hits = 0 - for lineno, line in enumerate(args.in_file): - line = line.strip() - fields = line.split("\t") - if lineno == 0: # initialize counts and genomes - genomes = fields[1:-1] - counts = init_counts(genomes) - continue - num_reads += 1 - # compute values for determining match class - arr = list(map(int, fields[1:-1])) - if not sum(arr): - num_reads_no_hits += 1 - is_multi, n_genomes = False, 0 - for i, n_alis in enumerate(arr): - if n_alis >= 1: - n_genomes += 1 - if n_alis > 1: - is_multi = True - # increment counts - for i, n_alis in enumerate(arr): - if n_alis == 0: - counts[i]["unmapped"] += 1 - elif not is_multi: - assert n_genomes >= 1 - if genomes == 1: - counts[i]["one_one"] += 1 - else: - counts[i]["one_multi"] += 1 - else: - if genomes == 1: - counts[i]["multi_one"] += 1 - else: - counts[i]["multi_multi"] += 1 - # print results - print("#hts_screen version 0.1\t#Reads in subset: ${num_reads}", file=args.out_file) - print( - ( - "Genome\t#Reads_processed\t#Unmapped\t%Unmapped\t" - "#One_hit_one_genome\t%One_hit_one_genome\t" - "#Multiple_hits_one_genome\t%Multiple_hits_one_genome\t" - "#One_hit_multiple_genomes\t%One_hit_multiple_genomes\t" - "Multiple_hits_multiple_genomes\t" - "%Multiple_hits_multiple_genomes" - ), - file=args.out_file, - ) - for i, genome in enumerate(genomes): - tpl = "{}\t{}\t{}\t{:0.2f}\t{}\t{:0.2f}\t{}\t{:0.2f}\t{}\t{:0.2f}\t{}\t{:0.2f}" - print( - tpl.format( - genome, - num_reads, - counts[i]["unmapped"], - 100 * counts[i]["unmapped"] / num_reads, - counts[i]["one_one"], - 100 * counts[i]["one_one"] / num_reads, - counts[i]["multi_one"], - 100 * counts[i]["multi_one"] / num_reads, - counts[i]["one_multi"], - 100 * counts[i]["one_multi"] / num_reads, - counts[i]["multi_multi"], - 100 * counts[i]["multi_multi"] / num_reads, - ), - file=args.out_file, - ) - - perc_no_hits = 100 * num_reads_no_hits / num_reads - print(file=args.out_file) - print("%Hit_no_genomes {:.02f}".format(perc_no_hits), file=args.out_file) - - -def main(argv=None): - parser = argparse.ArgumentParser(description="Process some integers.") - parser.add_argument( - "--in-file", - default=sys.stdin, - type=argparse.FileType("rt"), - help="Input file, defaults to stdin", - ) - parser.add_argument( - "--out-file", - default=sys.stdout, - type=argparse.FileType("wt"), - help="Output file, defaults to stdout", - ) - run(parser.parse_args()) - - -def __main__(): - return main() - - -if __name__ == "__main__": - main(sys.argv) diff --git a/snappy_wrappers/wrappers/hts_screen/meta.yaml b/snappy_wrappers/wrappers/hts_screen/meta.yaml deleted file mode 100644 index cb9fbb50f..000000000 --- a/snappy_wrappers/wrappers/hts_screen/meta.yaml +++ /dev/null @@ -1,4 +0,0 @@ -name: hts_screen -description: Screen HTS reads with known organism references -authors: -- Manuel Holtgrewe diff --git a/snappy_wrappers/wrappers/hts_screen/wrapper.py b/snappy_wrappers/wrappers/hts_screen/wrapper.py deleted file mode 100644 index 06072e550..000000000 --- a/snappy_wrappers/wrappers/hts_screen/wrapper.py +++ /dev/null @@ -1,220 +0,0 @@ -# -*- coding: utf-8 -*- -"""CUBI+Snakemake wrapper code for HTS Screen: Snakemake wrapper.py""" - -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -# Build command snippet for passing through BWA-MEM and paths to references -tee_cmd = "tee" -refs = [] -for i, ref in enumerate( - snakemake.config["step_config"]["illumina_demultiplexing"]["hts_screen"]["references"] -): - name = ref["name"] - path_index = ref["bwa_index"] - tee_cmd += ' >(bwa_alignment {i} "{ref_name}" "{path_index}")'.format( - i=(i + 1), ref_name=name, path_index=path_index - ) - refs.append("{ref_name}:{path_index}".format(ref_name=name, path_index=path_index)) - - -shell.executable("/bin/bash") - -shell( - r""" -set -x - -# Also pipe stderr to log file -if [[ -n "{snakemake.log}" ]]; then - if [[ "$(set +e; tty; set -e)" != "" ]]; then - rm -f "{snakemake.log}" && mkdir -p $(dirname {snakemake.log}) - exec 2> >(tee -a "{snakemake.log}" >&2) - else - rm -f "{snakemake.log}" && mkdir -p $(dirname {snakemake.log}) - echo "No tty, logging disabled" >"{snakemake.log}" - fi -fi - -bwa_alignment() -{{ - num=$1 - name=$2 - index=$3 - - set -euo pipefail - - # Align using BWA-MEM and remove headers from SAM. Then, - # filter to primary alignments and print out TSV format with - # the following columns, sorted by read name. - # - # 1: read name - # 2: number of alignments found - # 3: sequence (if $num==1) - # 4: PHRED qualities (if $num==1) - bwa mem \ - -t {snakemake.config[step_config][illumina_demultiplexing][hts_screen][num_threads_bwa]} \ - $index \ - - \ - | tee $TMPDIR/bwa_$num.sam \ - | grep -v '^@' \ - | gawk -v num=$num -F $'\t' ' - BEGIN {{ OFS=FS; }} - (!and($2, 256) && !and($2, 1024) && !and($2, 2048)) {{ - if (and($2, 4)) {{ - count = 0; - }} else {{ - count = 1; - for (i = 12; i <= NF; ++i) {{ - if ($i ~ "XA:Z") {{ - s = $i; - gsub(/[^;]/, "", s); - count = count + length(s); - }} - }} - $2 = count; - }} - - if (num == 1) {{ - print $1, count, $10, $11; - }} else {{ - print $1, count; - }} - }}' \ - | sort -k1,1 \ - > $TMPDIR/bwa_$num.tsv - touch $TMPDIR/bwa_$num.tsv.done -}} - -process_bwa_results() -{{ - set -euo pipefail - - header="#name" - paste_cmd="paste" - i=1 - for ref in $*; do - num=$i - name=$(echo $ref | cut -d : -f 1) - index=$(echo $ref | cut -d : -f 2) - - while [[ ! -f /$TMPDIR/bwa_$num.tsv.done ]]; do - sleep 2 - done - - header="$header\t$name" - - if [[ $num -eq 1 ]]; then - paste_cmd="$paste_cmd $TMPDIR/bwa_$num.tsv" - else - paste_cmd="$paste_cmd <(cut \ - -f 2 $TMPDIR/bwa_$num.tsv)" - fi - - let "i=$i+1" - done - header="$header\tUnaligned" - - echo -e "$header" > $TMPDIR/match_vector.tsv - eval "$paste_cmd" \ - | tee >( - gawk -F $'\t' '{{ - x = 1; - for (i = 5; i <= NF; ++i) {{ - if ($i >= 1) {{ x = 0; }} - }} - if (x) {{ - printf("@%s\n%s\n+\n%s\n", $1, $3, $4); - }} - }}' \ - | gzip -c \ - > ${{TMPDIR}}/unaligned_reads.fq.gz - ) \ - | cut -f 1,2,5- \ - | gawk -F $'\t' ' - BEGIN {{ OFS=FS }} - {{ - x = 1; - for (i = 2; i <= NF; ++i) {{ - if ($i >= 1) {{ x = 0; }} - }} - print $0, x; - }}' \ - >> ${{TMPDIR}}/match_vector.tsv -}} - -postprocess_match_vector() -{{ - set -euo pipefail - - fname=$1 - - num_reads=$(( $(cat $fname | wc -l) - 1)) - - hit_no_genomes=$(rev $fname \ - | cut -f 1 \ - | grep -v '^0$' \ - | wc -l) - perc_hit_no_genomes=$(\ - printf "%0.2f" $(echo $hit_no_genomes / $num_reads \ - | bc -l)) - - genomes=$(head -n 1 $fname | rev \ - | cut -f 2- | rev | cut -f 2-) - - python -m snappy_wrappers.wrappers.hts_screen.match_vector_to_report --in-file $fname -}} - -# Setup Temporary Directory ----------------------------------------------------------------------- -TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -# Perform BWA Alignment --------------------------------------------------------------------------- -(seqtk sample \ - {snakemake.input} \ - {snakemake.config[step_config][illumina_demultiplexing][hts_screen][sample_rate]} || true) \ -| eval {tee_cmd} >/dev/null - -# Process BWA Results ----------------------------------------------------------------------------- -process_bwa_results {refs} - -# Create Match Vector ----------------------------------------------------------------------------- -prefix=$(dirname {snakemake.output[1]}) -fastq_basename=$(basename {snakemake.input} .fastq.gz) - -gzip -c $TMPDIR/match_vector.tsv \ -> $prefix/${{fastq_basename}}_match_vector.tsv.gz - -postprocess_match_vector $TMPDIR/match_vector.tsv \ -> $prefix/${{fastq_basename}}_hts_screen.txt - -if [[ "{snakemake.params.args[run_kraken]}" == "True" ]]; then - prefix=$(dirname snakemake.output[3]) - - if [[ $((zcat $TMPDIR/unaligned_reads.fq.gz || true) \ - | head | wc -l) -eq 0 ]]; then - >&2 echo "Not executing Kraken; no unaligned reads" - touch $prefix/NO_UNALIGNED_READS - test -f $prefix/${{fastq_basename}}_kraken.gz || gzip -c /dev/null \ - > $prefix/${{fastq_basename}}_kraken.gz - test -f $prefix/${{fastq_basename}}_kraken_report.gz || gzip -c /dev/null \ - > $prefix/${{fastq_basename}}_kraken_report.gz - else - kraken \ - --threads {snakemake.config[step_config][illumina_demultiplexing][hts_screen][num_threads_kraken]} \ - --db {snakemake.config[step_config][illumina_demultiplexing][hts_screen][path_kraken_db]} - --fastq-input \ - --gzip-compressed \ - $TMPDIR/unaligned_reads.fq.gz \ - | gzip -c \ - > $prefix/${{fastq_basename}}_kraken.gz - - kraken-report \ - --db {snakemake.config[step_config][illumina_demultiplexing][hts_screen][path_kraken_db]} - <(zcat $prefix/${{fastq_basename}}_kraken.gz) \ - | gzip -c \ - > $prefix/${{fastq_basename}}_kraken_report.gz - fi -fi -""" -) diff --git a/snappy_wrappers/wrappers/maelstrom/bam_collect_doc/wrapper.py b/snappy_wrappers/wrappers/maelstrom/bam_collect_doc/wrapper.py index f593d6116..06cd84107 100644 --- a/snappy_wrappers/wrappers/maelstrom/bam_collect_doc/wrapper.py +++ b/snappy_wrappers/wrappers/maelstrom/bam_collect_doc/wrapper.py @@ -2,6 +2,8 @@ from snakemake.shell import shell +args = getattr(snakemake.params, "args", {}) + shell( r""" set -x @@ -31,7 +33,7 @@ export TMPDIR=$(mktemp -d) trap "rm -rf $TMPDIR" ERR EXIT -WINDOW={snakemake.config[step_config][ngs_mapping][bam_collect_doc][window_length]} +WINDOW={args[window_length]} # Compute coverage vcf.gz @@ -39,7 +41,7 @@ bam-collect-doc \ --in {snakemake.input.bam} \ --out {snakemake.output.vcf} \ - --reference {snakemake.config[static_data_config][reference][path]} \ + --reference {args[reference]} \ --window-length $WINDOW find $(dirname $(dirname {snakemake.output.vcf})) @@ -64,7 +66,7 @@ prev=$1; }}' \ > $TMPDIR/out_cov.wig -cut -f 1-2 {snakemake.config[static_data_config][reference][path]}.fai \ +cut -f 1-2 {args[reference]}.fai \ > $TMPDIR/chrom.sizes wigToBigWig $TMPDIR/out_cov.wig $TMPDIR/chrom.sizes $(basename {snakemake.output.cov_bw}) @@ -84,7 +86,7 @@ prev=$1; }}' \ > $TMPDIR/out_mq.wig -cut -f 1-2 {snakemake.config[static_data_config][reference][path]}.fai \ +cut -f 1-2 {args[reference]}.fai \ > $TMPDIR/chrom.sizes wigToBigWig $TMPDIR/out_mq.wig $TMPDIR/chrom.sizes $(basename {snakemake.output.mq_bw}) diff --git a/snappy_wrappers/wrappers/manta/germline_targeted/wrapper.py b/snappy_wrappers/wrappers/manta/germline_targeted/wrapper.py index 1b0796708..d0207c99d 100644 --- a/snappy_wrappers/wrappers/manta/germline_targeted/wrapper.py +++ b/snappy_wrappers/wrappers/manta/germline_targeted/wrapper.py @@ -2,6 +2,8 @@ __author__ = "Manuel Holtgrewe " +args = getattr(snakemake.params, "args", {}) + DEF_HELPER_FUNCS = r""" compute-md5() { @@ -64,7 +66,7 @@ configManta.py \ --retainTempFiles \ --exome \ - --referenceFasta {snakemake.config[static_data_config][reference][path]} \ + --referenceFasta {args[reference]} \ --runDir $workdir \ $(echo "{snakemake.input}" | tr ' ' '\n' | grep -v 'bai$' | sed 's/^/--bam /g') diff --git a/snappy_wrappers/wrappers/manta/germline_wgs/wrapper.py b/snappy_wrappers/wrappers/manta/germline_wgs/wrapper.py index aacfe58b6..2ce1f4de9 100644 --- a/snappy_wrappers/wrappers/manta/germline_wgs/wrapper.py +++ b/snappy_wrappers/wrappers/manta/germline_wgs/wrapper.py @@ -2,6 +2,8 @@ __author__ = "Manuel Holtgrewe " +args = getattr(snakemake.params, "args", {}) + DEF_HELPER_FUNCS = r""" compute-md5() { @@ -62,7 +64,7 @@ rm -rf $outdir/* $workdir/* configManta.py \ - --referenceFasta {snakemake.config[static_data_config][reference][path]} \ + --referenceFasta {args[reference]} \ --runDir $workdir \ $(echo "{snakemake.input}" | tr ' ' '\n' | grep -v 'bai$' | sed 's/^/--bam /g') diff --git a/snappy_wrappers/wrappers/manta/somatic_wgs/wrapper.py b/snappy_wrappers/wrappers/manta/somatic_wgs/wrapper.py index 58f33b6ac..aa3266739 100644 --- a/snappy_wrappers/wrappers/manta/somatic_wgs/wrapper.py +++ b/snappy_wrappers/wrappers/manta/somatic_wgs/wrapper.py @@ -6,6 +6,8 @@ __author__ = "Manuel Holtgrewe" __email__ = "manuel.holtgrewe@bih-charite.de" +args = getattr(snakemake.params, "args", {}) + shell( r""" # ----------------------------------------------------------------------------- @@ -22,7 +24,7 @@ trap "rm -rf \"$workdir\"" EXIT configManta.py \ - --referenceFasta {snakemake.config[static_data_config][reference][path]} \ + --referenceFasta {args[reference]} \ --runDir $workdir \ --normalBam {snakemake.input.normal_bam} \ --tumorBam {snakemake.input.tumor_bam} diff --git a/snappy_wrappers/wrappers/mantis/mantis_msi2/run/wrapper.py b/snappy_wrappers/wrappers/mantis/mantis_msi2/run/wrapper.py index 822ae00d0..3b74e7c20 100644 --- a/snappy_wrappers/wrappers/mantis/mantis_msi2/run/wrapper.py +++ b/snappy_wrappers/wrappers/mantis/mantis_msi2/run/wrapper.py @@ -5,6 +5,8 @@ __author__ = "Clemens Messerschmidt" +args = getattr(snakemake.params, "args", {}) + shell.executable("/bin/bash") @@ -42,8 +44,8 @@ mantis-msi2 \ -t {snakemake.input.tumor_bam} \ -n {snakemake.input.normal_bam} \ - --genome {snakemake.config[static_data_config][reference][path]} \ - --bedfile {snakemake.config[step_config][somatic_msi_calling][loci_bed]} \ + --genome {args[reference]} \ + --bedfile {args[loci_bed]} \ --min-read-length 35 \ --min-read-quality 20.0 \ --min-locus-quality 25.0 \ diff --git a/snappy_wrappers/wrappers/mbcs/wrapper.py b/snappy_wrappers/wrappers/mbcs/wrapper.py index f63a1fa6d..1e98c2eb6 100644 --- a/snappy_wrappers/wrappers/mbcs/wrapper.py +++ b/snappy_wrappers/wrappers/mbcs/wrapper.py @@ -20,6 +20,7 @@ shell.executable("/bin/bash") +args = getattr(snakemake.params, "args", {}) # Helper functions ------------------------------------------------------------ def pair_fastq_files(input_left, input_right): @@ -50,19 +51,19 @@ def pair_fastq_files(input_left, input_right): # Read snakemake input -------------------------------------------------------- -input_left = snakemake.params.args["input"]["reads_left"] -input_right = snakemake.params.args["input"].get("reads_right", "") +input_left = args["input"]["reads_left"] +input_right = args["input"].get("reads_right", "") -config = snakemake.config["step_config"]["ngs_mapping"]["mbcs"] +config = args["config"] mapper = config["mapping_tool"] -mapper_config = snakemake.config["step_config"]["ngs_mapping"][mapper] +mapper_config = args["mapper_config"] if mapper == "bwa_mem2": mapper = "bwa-mem2" if config["use_barcodes"]: barcoder = config["barcode_tool"] - config_barcodes = snakemake.config["step_config"]["ngs_mapping"][barcoder] + config_barcodes = args["barcode_config"] if config["recalibrate"]: - config_bqsr = snakemake.config["step_config"]["ngs_mapping"]["bqsr"] + config_bqsr = args["bqsr_config"] # Group fastq files by lane --------------------------------------------------- pairs = pair_fastq_files(input_left, input_right) @@ -162,7 +163,7 @@ def pair_fastq_files(input_left, input_right): ).format( mapper=mapper, indices=mapper_config["path_index"], - sample_name=snakemake.params.args["sample_name"], + sample_name=args["sample_name"], extra_args=" ".join(mapper_config.get("extra_args", [])), threads=mapper_config["num_threads_align"], ) @@ -276,7 +277,7 @@ def pair_fastq_files(input_left, input_right): " -R {reference} --known-sites {common_sites} \\\n" " -O {{output.tbl}}" ).format( - reference=snakemake.config["static_data_config"]["reference"]["path"], + reference=args["reference"], common_sites=config_bqsr["common_variants"], ) @@ -297,7 +298,7 @@ def pair_fastq_files(input_left, input_right): " -R {reference} \\\n" " -O {{output.bam}} \n" " samtools index {{output.bam}}" - ).format(reference=snakemake.config["static_data_config"]["reference"]["path"]) + ).format(reference=args["reference"]) kwargs = { "rule": "apply", diff --git a/snappy_wrappers/wrappers/mehari/annotate_seqvars/wrapper.py b/snappy_wrappers/wrappers/mehari/annotate_seqvars/wrapper.py index ceea699b4..c7c77c714 100644 --- a/snappy_wrappers/wrappers/mehari/annotate_seqvars/wrapper.py +++ b/snappy_wrappers/wrappers/mehari/annotate_seqvars/wrapper.py @@ -4,9 +4,7 @@ __author__ = "Manuel Holtgrewe " -# Get shortcut to configuration of varfish_export step -step_name = snakemake.params.args["step_name"] -export_config = snakemake.config["step_config"][step_name] +export_config = getattr(snakemake.params, "args", {}) DEF_HELPER_FUNCS = r""" compute-md5() @@ -67,7 +65,7 @@ | bcftools norm \ -m -any \ --force \ - --fasta-ref {snakemake.config[static_data_config][reference][path]} \ + --fasta-ref {export_config[reference]} \ | bcftools sort -T $TMPDIR \ | bgzip -c \ > $TMPDIR/tmp.vcf.gz @@ -77,7 +75,7 @@ bcftools norm \ -m -any \ --force \ - --fasta-ref {snakemake.config[static_data_config][reference][path]} \ + --fasta-ref {export_config[reference]} \ {snakemake.input.vcf} \ | bcftools sort -T $TMPDIR \ | bgzip -c \ diff --git a/snappy_wrappers/wrappers/mehari/annotate_strucvars/wrapper.py b/snappy_wrappers/wrappers/mehari/annotate_strucvars/wrapper.py index 258f8aa07..6cb8e7610 100644 --- a/snappy_wrappers/wrappers/mehari/annotate_strucvars/wrapper.py +++ b/snappy_wrappers/wrappers/mehari/annotate_strucvars/wrapper.py @@ -4,9 +4,11 @@ __author__ = "Manuel Holtgrewe " -# Get shortcut to configuration of varfish_export step -step_name = snakemake.params.args["step_name"] -export_config = snakemake.config["step_config"][step_name] +# Optionally get path to coverage VCF file. +coverage_vcf = " ".join(getattr(snakemake.input, "vcf_cov", [])) + +export_config = getattr(snakemake.params, "args", {}) + # Get shortcut to "fix_manta_invs.py" postprocessing script fix_manta_invs = os.path.join( os.path.dirname(__file__), @@ -72,7 +74,7 @@ num=$(printf %03d $i) python3 {fix_manta_invs} \ - --reference-fasta {snakemake.config[static_data_config][reference][path]} \ + --reference-fasta {export_config[reference]} \ --input-vcf $vcf \ --output-vcf $TMPDIR/fixed_bnd_to_inv_unsorted.$num.vcf bcftools sort -o $TMPDIR/fixed_bnd_to_inv.$num.vcf $TMPDIR/fixed_bnd_to_inv_unsorted.$num.vcf diff --git a/snappy_wrappers/wrappers/melt/genotype/wrapper.py b/snappy_wrappers/wrappers/melt/genotype/wrapper.py index d6d646d95..c2750c916 100644 --- a/snappy_wrappers/wrappers/melt/genotype/wrapper.py +++ b/snappy_wrappers/wrappers/melt/genotype/wrapper.py @@ -2,7 +2,7 @@ __author__ = "Manuel Holtgrewe " -melt_config = snakemake.config["step_config"][snakemake.params.step_key]["melt"] +melt_config = getattr(snakemake.params, "args", {}) shell( r""" @@ -18,7 +18,7 @@ java -Xmx13G -jar $JAR \ Genotype \ - -h {snakemake.config[static_data_config][reference][path]} \ + -h {melt_config[reference]} \ -bamfile {snakemake.input.bam} \ -p $(dirname {snakemake.input.done}) \ -t $ME_REFS/$ME_INFIX/{snakemake.wildcards.me_type}_MELT.zip \ diff --git a/snappy_wrappers/wrappers/melt/group_analysis/wrapper.py b/snappy_wrappers/wrappers/melt/group_analysis/wrapper.py index 30f7a2f60..e37f9686f 100644 --- a/snappy_wrappers/wrappers/melt/group_analysis/wrapper.py +++ b/snappy_wrappers/wrappers/melt/group_analysis/wrapper.py @@ -2,7 +2,7 @@ __author__ = "Manuel Holtgrewe " -melt_config = snakemake.config["step_config"][snakemake.params.step_key]["melt"] +melt_config = getattr(snakemake.params, "args", {}) shell( r""" @@ -18,7 +18,7 @@ java -jar -Xmx13G -jar $JAR \ GroupAnalysis \ - -h {snakemake.config[static_data_config][reference][path]} \ + -h {melt_config[reference]} \ -t $ME_REFS/$ME_INFIX/{snakemake.wildcards.me_type}_MELT.zip \ $(if [[ $ME_REFS == *37* ]] || [[ $ME_REFS == *hg19* ]]; then echo -v $ME_REFS/../../prior_files/{snakemake.wildcards.me_type}.1KGP.sites.vcf; diff --git a/snappy_wrappers/wrappers/melt/indiv_analysis/wrapper.py b/snappy_wrappers/wrappers/melt/indiv_analysis/wrapper.py index cebc9aecd..576decaa0 100644 --- a/snappy_wrappers/wrappers/melt/indiv_analysis/wrapper.py +++ b/snappy_wrappers/wrappers/melt/indiv_analysis/wrapper.py @@ -2,7 +2,7 @@ __author__ = "Manuel Holtgrewe " -melt_config = snakemake.config["step_config"][snakemake.params.step_key]["melt"] +melt_config = getattr(snakemake.params, "args", {}) melt_arg_exome = {"sv_calling_targeted": "-exome"}.get(snakemake.params.step_key, "") shell( @@ -21,7 +21,7 @@ IndivAnalysis \ -b hs37d5/NC_007605 \ {melt_arg_exome} \ - -h {snakemake.config[static_data_config][reference][path]} \ + -h {melt_config[reference]} \ -t $ME_REFS/$ME_INFIX/{snakemake.wildcards.me_type}_MELT.zip \ -w $(dirname {snakemake.output.done}) \ -r 150 \ diff --git a/snappy_wrappers/wrappers/melt/make_vcf/wrapper.py b/snappy_wrappers/wrappers/melt/make_vcf/wrapper.py index f6f816c5f..096998682 100644 --- a/snappy_wrappers/wrappers/melt/make_vcf/wrapper.py +++ b/snappy_wrappers/wrappers/melt/make_vcf/wrapper.py @@ -2,7 +2,7 @@ __author__ = "Manuel Holtgrewe " -melt_config = snakemake.config["step_config"][snakemake.params.step_key]["melt"] +melt_config = getattr(snakemake.params, "args", {}) shell( r""" @@ -25,7 +25,7 @@ java -Xmx13G -jar $JAR \ MakeVCF \ -genotypingdir $genotype_dir \ - -h {snakemake.config[static_data_config][reference][path]} \ + -h {melt_config[reference]} \ -j 100 \ -t $ME_REFS/$ME_INFIX/{snakemake.wildcards.me_type}_MELT.zip \ -p $(dirname {snakemake.input.group_analysis}) \ diff --git a/snappy_wrappers/wrappers/melt/preprocess/wrapper.py b/snappy_wrappers/wrappers/melt/preprocess/wrapper.py index a66885c10..c42b1bde9 100644 --- a/snappy_wrappers/wrappers/melt/preprocess/wrapper.py +++ b/snappy_wrappers/wrappers/melt/preprocess/wrapper.py @@ -2,7 +2,7 @@ __author__ = "Manuel Holtgrewe " -melt_config = snakemake.config["step_config"][snakemake.params.step_key]["melt"] +melt_config = getattr(snakemake.params, "args", {}) shell( r""" @@ -20,6 +20,6 @@ java -Xmx13G -jar $JAR \ Preprocess \ -bamfile {snakemake.output.orig_bam} \ - -h {snakemake.config[static_data_config][reference][path]} + -h {melt_config[reference]} """ ) diff --git a/snappy_wrappers/wrappers/melt/reorder_vcf/environment.yaml b/snappy_wrappers/wrappers/melt/reorder_vcf/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/melt/reorder_vcf/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/melt/reorder_vcf/wrapper.py b/snappy_wrappers/wrappers/melt/reorder_vcf/wrapper.py deleted file mode 100644 index c68839f8e..000000000 --- a/snappy_wrappers/wrappers/melt/reorder_vcf/wrapper.py +++ /dev/null @@ -1,39 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running Melt genotype""" - -from snakemake.shell import shell - -__author__ = "Manuel Holtgrewe" -__email__ = "manuel.holtgrewe@bih-charite.de" - -shell( - r""" -# ----------------------------------------------------------------------------- -# Redirect stderr to log file by default and enable printing executed commands -exec 2> >(tee -a "{snakemake.log}") -set -x -# ----------------------------------------------------------------------------- - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -echo '{snakemake.params.ped_members}' \ -| tr ' ' '\n' \ ->$TMPDIR/samples.txt - -bcftools view \ - --samples-file $TMPDIR/samples.txt \ - --output-type u \ - {snakemake.input.vcf} \ -| bcftools view \ - --output-file {snakemake.output.vcf} \ - --output-type z \ - --include '(GT !~ "\.") && (GT ~ "1")' - -tabix -f {snakemake.output.vcf} - -pushd $(dirname {snakemake.output.vcf}) -md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf}).md5 -md5sum $(basename {snakemake.output.vcf_tbi}) >$(basename {snakemake.output.vcf_tbi}).md5 -""" -) diff --git a/snappy_wrappers/wrappers/minialign/run/environment.yaml b/snappy_wrappers/wrappers/minialign/run/environment.yaml deleted file mode 100644 index 64d70746f..000000000 --- a/snappy_wrappers/wrappers/minialign/run/environment.yaml +++ /dev/null @@ -1,10 +0,0 @@ -channels: - - conda-forge - - bioconda - - http://conda.cubi.bihealth.org/cubiconda - - nodefaults -dependencies: - - minialign==0.5.3 - - samtools==1.9 - - htslib==1.9 - - inline-html==0.1.2 diff --git a/snappy_wrappers/wrappers/minialign/run/meta.yaml b/snappy_wrappers/wrappers/minialign/run/meta.yaml deleted file mode 100644 index eddb86af4..000000000 --- a/snappy_wrappers/wrappers/minialign/run/meta.yaml +++ /dev/null @@ -1,4 +0,0 @@ -name: minialign -description: Fast and accurate alignment tool for PacBio and Nanopore long reads -authors: -- Manuel Holtgrewe diff --git a/snappy_wrappers/wrappers/minialign/run/wrapper.py b/snappy_wrappers/wrappers/minialign/run/wrapper.py deleted file mode 100644 index c8752218b..000000000 --- a/snappy_wrappers/wrappers/minialign/run/wrapper.py +++ /dev/null @@ -1,77 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running BWA-MEM for PacBio data""" - -from snakemake.shell import shell - -__author__ = "Manuel Holtgrewe" -__email__ = "manuel.holtgrewe@bih-charite.de" - -this_file = __file__ - -shell( - r""" -set -x - -# Write out information about conda installation. -conda list >{snakemake.log.conda_list} -conda info >{snakemake.log.conda_info} - -# Also pipe stderr to log file -if [[ -n "{snakemake.log.log}" ]]; then - if [[ "$(set +e; tty; set -e)" != "" ]]; then - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - exec 2> >(tee -a "{snakemake.log.log}" >&2) - else - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - echo "No tty, logging disabled" >"{snakemake.log.log}" - fi -fi - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -i=0 -for fname in $(find $(dirname {snakemake.input}) -name '*.bam' -or -name '*.fast?.gz'); do - let "i=$i+1" - - minialign \ - -t {snakemake.config[step_config][ngs_mapping][minialign][mapping_threads]} \ - {snakemake.config[static_data_config][reference][path]} \ - $fname \ - | samtools addreplacerg \ - -r "@RT\tID:{snakemake.wildcards.library_name}.$i\tSM:{snakemake.wildcards.library_name}\tPL:PACBIO" - \ - | samtools sort -o $TMPDIR/tmp.$i.bam -done - -out_bam={snakemake.output[0]} - -samtools merge $out_bam $TMPDIR/tmp.*.bam -samtools index $out_bam - -# Build MD5 files -pushd $(dirname {snakemake.output.bam}) -md5sum $(basename {snakemake.output.bam}) > $(basename {snakemake.output.bam}).md5 -md5sum $(basename {snakemake.output.bam_bai}) > $(basename {snakemake.output.bam_bai}).md5 -popd - -# QC Report --------------------------------------------------------------------------------------- - -# gather statistics from BAM file -# TODO: use pipes for only reading once from disk? -samtools stats {snakemake.output.bam} > {snakemake.output.report_bamstats_txt} -samtools flagstat {snakemake.output.bam} > {snakemake.output.report_flagstats_txt} -samtools idxstats {snakemake.output.bam} > {snakemake.output.report_idxstats_txt} - -# Build MD5 files for the reports -md5sum {snakemake.output.report_bamstats_txt} > {snakemake.output.report_bamstats_txt_md5} -md5sum {snakemake.output.report_flagstats_txt} >{snakemake.output.report_flagstats_txt_md5} -md5sum {snakemake.output.report_idxstats_txt} > {snakemake.output.report_idxstats_txt_md5} - -# Additional logging for transparency & reproducibility -# Logging: Save a copy this wrapper (with the pickle details in the header) -cp {this_file} $(dirname {snakemake.log.log})/wrapper_bwa.py - -# Logging: Save a permanent copy of the environment file used -cp $(dirname {this_file})/environment.yaml $(dirname {snakemake.log.log})/environment_wrapper_bwa.yaml -""" -) diff --git a/snappy_wrappers/wrappers/ngs_chew/fingerprint/wrapper.py b/snappy_wrappers/wrappers/ngs_chew/fingerprint/wrapper.py index be1da287e..244c51d0f 100644 --- a/snappy_wrappers/wrappers/ngs_chew/fingerprint/wrapper.py +++ b/snappy_wrappers/wrappers/ngs_chew/fingerprint/wrapper.py @@ -6,6 +6,8 @@ __author__ = "Manuel Holtgrewe" __email__ = "manuel.holtgrewe@bih-charite.de" +args = getattr(snakemake.params, "args", {}) + shell( r""" set -x @@ -37,7 +39,7 @@ mkdir -p $TMPDIR/{{out,sorted,sort.tmp}} ngs-chew fingerprint \ - --reference {snakemake.config[static_data_config][reference][path]} \ + --reference {args[reference]} \ --output-aafs \ --output-fingerprint {snakemake.output.npz} \ --input-bam {snakemake.input.bam} diff --git a/snappy_wrappers/wrappers/oncotator/run/wrapper.py b/snappy_wrappers/wrappers/oncotator/run/wrapper.py deleted file mode 100644 index b4abba749..000000000 --- a/snappy_wrappers/wrappers/oncotator/run/wrapper.py +++ /dev/null @@ -1,81 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running Oncotator""" - -from snakemake.shell import shell - -__author__ = "Manuel Holtgrewe" -__email__ = "manuel.holtgrewe@bih-charite.de" - -shell( - r""" -# ----------------------------------------------------------------------------- -# Redirect stderr to log file by default and enable printing executed commands -exec 2> >(tee -a "{snakemake.log}") -set -x -# ----------------------------------------------------------------------------- - -module purge -module load HTSlib/1.2.1-foss-2015a -module load BCFtools/1.2-foss-2015a -module load Oncotator/v1.8.0.0-foss-2015a-Python-2.7.9 - -# Shortcut to corpus directory (line length limit...) -corpus={snakemake.config[step_config][somatic_variant_annotation][oncotator][path_corpus]} - -# Save original sample names -bcftools view -h {snakemake.input.vcf} | tail -n 1 | cut -f 10- | tr '\t' '\n' \ ->{snakemake.output.samples} - -# Prepare input VCF file for Oncotator ------------------------------------------------ - -# Create new samples file with TUMOR/NORMAL -echo -e "TUMOR\nNORMAL" > {snakemake.output.fake_samples} - -# Create transmogrified VCF file for the input of Oncotator -bcftools filter \ - -r "1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,X,Y" \ - {snakemake.input.vcf} \ -| bcftools reheader --samples {snakemake.output.fake_samples} \ -> {snakemake.output.vcf_onco_in} - -# Call Oncotator with VCF output ------------------------------------------------------ - -# Perform Oncotator annotation (using fake sample names) -oncotator -v -i VCF -o VCF \ - --db-dir $corpus \ - -c $corpus/override_lists/tx_exact_uniprot_matches.AKT1_CRLF2_FGFR1.txt \ - --log_name $(dirname {snakemake.log})/oncotator.vcf.log \ - {snakemake.output.vcf_onco_in} \ - {snakemake.output.tmp_vcf} \ - {snakemake.params.genome} - -# Add back the real sample names -bcftools reheader --samples {snakemake.output.samples} {snakemake.output.tmp_vcf} \ -| bgzip -c \ ->{snakemake.output.vcf} -tabix {snakemake.output.vcf} - -# Compute MD5 sums -pushd $(dirname {snakemake.output.vcf}) && \ - md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf_md5}) && \ - md5sum $(basename {snakemake.output.vcf_tbi}) >$(basename {snakemake.output.vcf_tbi_md5}) && \ - popd - -# Call Oncotator with MAF output ------------------------------------------------------ - -# Perform Oncotator annotation (using fake sample names) -oncotator -v -i VCF -o TCGAMAF \ - --db-dir $corpus \ - -c $corpus/override_lists/tx_exact_uniprot_matches.AKT1_CRLF2_FGFR1.txt \ - --log_name $(dirname {snakemake.log})/oncotator.vcf.log \ - {snakemake.output.vcf_onco_in} \ - {snakemake.output.tmp_maf} \ - {snakemake.params.genome} -bgzip -c {snakemake.output.tmp_maf} >{snakemake.output.maf} - -# Compute MD5 sums -pushd $(dirname {snakemake.output.vcf}) && \ - md5sum $(basename {snakemake.output.maf}) >$(basename {snakemake.output.maf_md5}) && \ - popd -""" -) diff --git a/snappy_wrappers/wrappers/optitype/wrapper.py b/snappy_wrappers/wrappers/optitype/wrapper.py index 8682398b1..d08a2226e 100644 --- a/snappy_wrappers/wrappers/optitype/wrapper.py +++ b/snappy_wrappers/wrappers/optitype/wrapper.py @@ -5,10 +5,12 @@ __author__ = "Manuel Holtgrewe " +args = getattr(snakemake.params, "args", {}) + # Input fastqs are passed through snakemake.params. # snakemake.input is a .done file touched after linking files in. -reads_left = snakemake.params.args["input"]["reads_left"] -reads_right = snakemake.params.args["input"].get("reads_right", "") +reads_left = args["input"]["reads_left"] +reads_right = args["input"].get("reads_right", "") shell.executable("/bin/bash") @@ -46,7 +48,7 @@ # First alignment step (filter to candidate HLA reads) -------------------------------------------- data_dir=$(dirname $(readlink -f $(which OptiTypePipeline.py)))/data -seq_type={snakemake.params.args[seq_type]} +seq_type={args[seq_type]} refname=hla_reference_$seq_type.fasta reference=$data_dir/$refname @@ -55,7 +57,7 @@ yara_indexer -o $TMPDIR/yara_index/$refname $TMPDIR/yara_index/$refname yara_mapper \ - -t {snakemake.config[step_config][hla_typing][optitype][num_mapping_threads]} \ + -t {args[num_mapping_threads]} \ --error-rate 5 \ --output-format sam \ $TMPDIR/yara_index/$refname \ @@ -65,7 +67,7 @@ if [[ $paired -eq 1 ]]; then yara_mapper \ - -t {snakemake.config[step_config][hla_typing][optitype][num_mapping_threads]} \ + -t {args[num_mapping_threads]} \ --error-rate 5 \ --output-format sam \ $TMPDIR/yara_index/$refname \ @@ -75,7 +77,7 @@ fi cat $TMPDIR/tmp.d/reads_*.fastq | \ -seqtk sample - {snakemake.config[step_config][hla_typing][optitype][max_reads]} \ +seqtk sample - {args[max_reads]} \ > $TMPDIR/tmp.d/reads_sampled.fastq wc -l $TMPDIR/tmp.d/reads_*.fastq @@ -85,7 +87,7 @@ cat <<"EOF" >$TMPDIR/tmp.d/optitype.ini [mapping] razers3=razers3 -threads={snakemake.config[step_config][hla_typing][optitype][num_mapping_threads]} +threads={args[num_mapping_threads]} [ilp] solver=glpk diff --git a/snappy_wrappers/wrappers/pb_honey_spots/germline/wrapper.py b/snappy_wrappers/wrappers/pb_honey_spots/germline/wrapper.py deleted file mode 100644 index fc6531dea..000000000 --- a/snappy_wrappers/wrappers/pb_honey_spots/germline/wrapper.py +++ /dev/null @@ -1,124 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running PB Honey Spots on Germline data""" - -from snakemake.shell import shell - -__author__ = "Manuel Holtgrewe" -__email__ = "manuel.holtgrewe@bih-charite.de" - -shell( - r""" -# ----------------------------------------------------------------------------- -# Redirect stderr to log file by default and enable printing executed commands -exec 2> >(tee -a "{snakemake.log}") -set -x -# ----------------------------------------------------------------------------- - -export LC_ALL=C - -module purge -module load Python/2.7.9-foss-2015a -module load parallel/20160322-foss-2015a - -PBSUITE=/fast/users/mholtgr/scratch/build/PBSuite_15.8.24/ - -. $PBSUITE/setup.sh - -set -euo pipefail - -inputs=$(echo {snakemake.input} | tr ' ' '\n' | grep '\.bam$') - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -honey_py_spots() -{{ - set -ex - mkdir -p $1 && pushd $1 - - ln -s ../ins.bam . - ln -s ../ins.bam.bai . - ln -s ../del.bam . - ln -s ../del.bam.bai . - - pwd - ls -lh - tree - - # Call insertions - Honey.py spots \ - --chrom $1 \ - -q 20 \ - -m 30 \ - --reference {snakemake.config[static_data_config][reference][path]} \ - -i 20 \ - -e 1 \ - -E 1 \ - --spanMax 100000 \ - --consensus None \ - ins.bam - - # Call deletions - Honey.py spots \ - --chrom $1 \ - -q 20 \ - -m 40 \ - --reference {snakemake.config[static_data_config][reference][path]} \ - -i 20 \ - -e 2 \ - -E 2 \ - --spanMax 100000 \ - --consensus None \ - del.bam - - cat <(grep DEL del.hon.spots) <(grep INS ins.hon.spots) \ - | sort -k1,1 -k2,2g \ - >hon.spots - - cat <(grep DEL hon.spots \ - | tr ';=' '\t' \ - | awk '(($4=="DEL") && ($13 > 1) && ($5>=30) && ($13/$21)>=0.25) {{ print $0; }}' \ - | awk '{{ w=int($5); s=$3-$2; n=ws?w:s; if (n/x > 0.1) {{ print $0; }} }}' \ - | awk '{{ lw=int($5/2); rw=$5-lw; mp=int(($2+$3)/2); - print $1 "\t" mp-lw "\t" mp+rw "\tDEL:" lw+rw ":" int($13) "/" int($21); }}') \ - <(grep INS hon.spots \ - | tr ';=' '\t' \ - | awk '(($5>=30) && ($13 > 1) && ($13/$21)>=0.25) {{ print $0; }}' \ - | awk '($4=="INS") {{ - print $1 "\t" $3 "\t" $3+1 "\t" "INS:" $5 ":" int($13) "/" int($21); }}') \ - | sort -k1,1 -k2,2g \ - >hon.bed -}} -export -f honey_py_spots - -outdir=$PWD/$(dirname {snakemake.output.bed}) - -for input in $inputs; do - mkdir -p $TMPDIR/$(basename $input) - ln -sr $input $TMPDIR/$(basename $input)/ins.bam - ln -sr $input $TMPDIR/$(basename $input)/del.bam - ln -sr $input.bai $TMPDIR/$(basename $input)/ins.bam.bai - ln -sr $input.bai $TMPDIR/$(basename $input)/del.bam.bai - pushd $TMPDIR/$(basename $input) - - parallel -t -j {snakemake.config[step_config][sv_calling_wgs][pb_honey_spots][num_threads]} honey_py_spots ::: {{1..22}} X Y - - prefix=$(basename $input .bam | rev | cut -d . -f 2- | rev) - suffix=$(basename $input .bam | rev | cut -d . -f 1 | rev) - - cat $TMPDIR/$(basename $input)/*/hon.bed \ - | sort -k1,1 -k2,2g \ - >$outdir/$prefix.pb_honey_spots.$suffix.bed - - mkdir -p $outdir/full_result - cp -R $TMPDIR/$(basename $input) $outdir/full_result - - popd -done - -pushd $(dirname {snakemake.output.bed}) -for f in *.bed; do - md5sum $f >$f.md5 -done -""" -) diff --git a/snappy_wrappers/wrappers/picard/metrics/wrapper.py b/snappy_wrappers/wrappers/picard/metrics/wrapper.py index 810a466e8..6f681ac11 100644 --- a/snappy_wrappers/wrappers/picard/metrics/wrapper.py +++ b/snappy_wrappers/wrappers/picard/metrics/wrapper.py @@ -5,9 +5,9 @@ __author__ = "Eric Blanc " -reference = snakemake.config["static_data_config"]["reference"]["path"] -step = snakemake.config["pipeline_step"]["name"] -config = snakemake.config["step_config"][step]["picard"] +args = getattr(snakemake.params, "args", {}) + +reference = args["reference"] collect_multiple_metrics_programs = { "CollectAlignmentSummaryMetrics", @@ -22,18 +22,13 @@ collect_multiple_metrics = " ".join( [ f"-PROGRAM {pgm}" - for pgm in collect_multiple_metrics_programs.intersection(set(config["programs"])) + for pgm in collect_multiple_metrics_programs.intersection(set(args["programs"])) ] ) -# TODO: understand why snakemake.params is a list... -prefix = "" -if "prefix" in snakemake.params[0].keys() and snakemake.params[0]["prefix"]: - prefix = snakemake.params[0]["prefix"] + "." +prefix = args.get("prefix", "") -name = "null" -if "bait_name" in config.keys() and config["bait_name"]: - name = config["bait_name"] +name = args.get("bait_name", "null") shell.executable("/bin/bash") @@ -84,25 +79,25 @@ {collect_multiple_metrics} fi -if [[ "{config[programs]}" == *"EstimateLibraryComplexity"* ]] +if [[ "{args[programs]}" == *"EstimateLibraryComplexity"* ]] then java -jar $picard_jar EstimateLibraryComplexity \ -I {snakemake.input.bam} \ -O $d/{prefix}EstimateLibraryComplexity.txt fi -if [[ "{config[programs]}" == *"CollectJumpingLibraryMetrics"* ]] +if [[ "{args[programs]}" == *"CollectJumpingLibraryMetrics"* ]] then java -jar $picard_jar CollectJumpingLibraryMetrics \ -I {snakemake.input.bam} \ -O $d/{prefix}CollectJumpingLibraryMetrics.txt fi -if [[ "{config[programs]}" == *"CollectOxoGMetrics"* ]] +if [[ "{args[programs]}" == *"CollectOxoGMetrics"* ]] then - if [[ -r "{snakemake.config[static_data_config][dbsnp][path]}" ]] + if [[ -n "{args[dbsnp]}" ]] then - dbsnp="-DB_SNP {snakemake.config[static_data_config][dbsnp][path]}" + dbsnp="-DB_SNP {args[dbsnp]}" else dbsnp="" fi diff --git a/snappy_wrappers/wrappers/picard/prepare/wrapper.py b/snappy_wrappers/wrappers/picard/prepare/wrapper.py index b8be4d3a5..c6ee5f6b2 100644 --- a/snappy_wrappers/wrappers/picard/prepare/wrapper.py +++ b/snappy_wrappers/wrappers/picard/prepare/wrapper.py @@ -8,15 +8,14 @@ __author__ = "Eric Blanc " -reference = snakemake.config["static_data_config"]["reference"]["path"] -step = snakemake.config["pipeline_step"]["name"] -config = snakemake.config["step_config"][step]["picard"] +args = getattr(snakemake.params, "args", {}) +reference = args["reference"] reference = re.sub("\.fa(sta)?(\.b?gz)?$", ".dict", reference) assert os.path.exists(reference), "Missing dict of reference fasta" -baits = config["path_to_baits"] -targets = config.get("path_to_targets", "") +baits = args["path_to_baits"] +targets = args.get("path_to_targets", "") shell.executable("/bin/bash") diff --git a/snappy_wrappers/wrappers/pizzly/run/wrapper.py b/snappy_wrappers/wrappers/pizzly/run/wrapper.py index 629473b69..20e9f7fd1 100644 --- a/snappy_wrappers/wrappers/pizzly/run/wrapper.py +++ b/snappy_wrappers/wrappers/pizzly/run/wrapper.py @@ -5,6 +5,8 @@ __author__ = "Manuel Holtgrewe " +args = getattr(snakemake.params, "args", {}) + shell.executable("/bin/bash") shell( @@ -19,17 +21,17 @@ mkdir -p $inputdir if [[ ! -f "$inputdir/reads_1.fastq.gz" ]]; then - cat {snakemake.params.args[left]} > $inputdir/reads_1.fastq.gz + cat {args[left]} > $inputdir/reads_1.fastq.gz fi if [[ ! -f "$inputdir/reads_2.fastq.gz" ]]; then - cat {snakemake.params.args[right]} > $inputdir/reads_2.fastq.gz + cat {args[right]} > $inputdir/reads_2.fastq.gz fi pushd $workdir test -f output/fusion.txt \ || kallisto quant \ - -i {snakemake.config[step_config][somatic_gene_fusion_calling][pizzly][kallisto_index]} \ + -i {args[kallisto_index]} \ --fusion \ -o output \ input/reads_1.fastq.gz \ @@ -38,13 +40,13 @@ mkdir -p output pizzly \ - -k {snakemake.config[step_config][somatic_gene_fusion_calling][pizzly][kmer_size]} \ + -k {args[kmer_size]} \ --cache index.cache.txt \ --align-score 2 \ --insert-size 400 \ - --fasta {snakemake.config[step_config][somatic_gene_fusion_calling][pizzly][transcripts_fasta]} \ + --fasta {args[transcripts_fasta]} \ --output fusions \ - --gtf {snakemake.config[step_config][somatic_gene_fusion_calling][pizzly][annotations_gtf]} \ + --gtf {args[annotations_gtf]} \ output/fusion.txt pizzly_flatten_json.py fusions.json > fusions.txt diff --git a/snappy_wrappers/wrappers/platypus/call_joint/wrapper.py b/snappy_wrappers/wrappers/platypus/call_joint/wrapper.py index f8445916a..530180cbc 100644 --- a/snappy_wrappers/wrappers/platypus/call_joint/wrapper.py +++ b/snappy_wrappers/wrappers/platypus/call_joint/wrapper.py @@ -4,13 +4,20 @@ import os from snakemake.shell import shell +from snakemake.script import snakemake __author__ = "Manuel Holtgrewe" __email__ = "manuel.holtgrewe@bih-charite.de" +args = snakemake.params["args"] args_ignore_chroms = "" -if snakemake.params.args["ignore_chroms"]: - args_ignore_chroms = " ".join(["--ignore-chroms"] + snakemake.params.args["ignore_chroms"]) +if ignore_chroms := args.get("ignore_chroms"): + args_ignore_chroms = " ".join(["--ignore-chroms"] + ignore_chroms) + +reference_path = args["reference_path"] +num_threads = args["num_threads"] +split_complex_mnvs = str(args["split_complex_mnvs"]) + split_script = os.path.join(os.path.dirname(__file__), "splitMNPsAndComplex.py") @@ -28,7 +35,7 @@ # Platypus Variant Calling ---------------------------------------------------- -REF={snakemake.config[static_data_config][reference][path]} +REF={reference_path} out_final={snakemake.output.vcf} #out_tmp=$TMPDIR/out_tmp.vcf.gz out_tmp=${{out_final%.vcf.gz}}.tmp.vcf.gz @@ -39,7 +46,7 @@ platypus callVariants \ --logFileName=$(dirname {snakemake.log})/platypus.log \ --bamFiles=$(echo "{snakemake.input.bam}" | tr ' ' ',') \ - --nCPU={snakemake.config[step_config][somatic_variant_calling][platypus_joint][num_threads]} \ + --nCPU={num_threads} \ --refFile=$REF \ --output=${{out_tmp%.gz}} \ --regions=$(snappy-genome_windows \ @@ -50,7 +57,7 @@ | tr '\n' ',' \ | sed -e 's/,$//g') -if [[ "{snakemake.config[step_config][somatic_variant_calling][platypus_joint][split_complex_mnvs]}" == "True" ]]; then +if [[ "{split_complex_mnvs}" == "True" ]]; then cat ${{out_tmp%.gz}} \ | python2 {split_script} \ | snappy-vcf_sort $REF.fai \ diff --git a/snappy_wrappers/wrappers/popdel/profile/wrapper.py b/snappy_wrappers/wrappers/popdel/profile/wrapper.py index 559605246..8919e06e8 100644 --- a/snappy_wrappers/wrappers/popdel/profile/wrapper.py +++ b/snappy_wrappers/wrappers/popdel/profile/wrapper.py @@ -6,6 +6,8 @@ __author__ = "Manuel Holtgrewe" __email__ = "manuel.holtgrewe@bih-charite.de" +args = getattr(snakemake.params, "args", {}) + shell( r""" # ----------------------------------------------------------------------------- @@ -42,7 +44,7 @@ chr22:25000000-26000000 EOF -if [[ "{snakemake.config[static_data_config][reference][path]}" =~ .*hs?37.* ]]; then +if [[ "{args[reference]}" =~ .*hs?37.* ]]; then perl -p -i -e 's/chr//g' $TMPDIR/intervals.txt fi diff --git a/snappy_wrappers/wrappers/r/environment.yaml b/snappy_wrappers/wrappers/r/environment.yaml deleted file mode 100644 index 6aba7643e..000000000 --- a/snappy_wrappers/wrappers/r/environment.yaml +++ /dev/null @@ -1,8 +0,0 @@ -channels: - - conda-forge - - bioconda - - nodefaults -dependencies: - - r-base==4.3.3 - - r-remotes==2.5.0 - - r-biocmanager==1.30.22 diff --git a/snappy_wrappers/wrappers/r/wrapper.py b/snappy_wrappers/wrappers/r/wrapper.py deleted file mode 100644 index c2759e9bf..000000000 --- a/snappy_wrappers/wrappers/r/wrapper.py +++ /dev/null @@ -1,68 +0,0 @@ -"""CUBI+Snakemake wrapper code for non-conda package installation""" - -from snakemake import shell - -__author__ = "Eric Blanc " - -step = snakemake.config["pipeline_step"]["name"] -config = snakemake.config["step_config"][step] - -if "packages" in snakemake.params.keys(): - packages = snakemake.params["packages"] -elif "packages" in config.keys(): - packages = config["packages"] -else: - packages = None - -assert packages is not None -assert isinstance(packages, list) -assert len(packages) > 0 - -to_install = [] -for package in packages: - package = dict(package) - to_install.append('list(name="{}", repo="{}")'.format(package["name"], package["repo"])) -to_install = "list({})".format(", ".join(to_install)) - -shell.executable("/bin/bash") - -shell( - r""" -set -x - -# Write out information about conda installation. -conda list >{snakemake.log.conda_list} -conda info >{snakemake.log.conda_info} -md5sum {snakemake.log.conda_list} >{snakemake.log.conda_list_md5} -md5sum {snakemake.log.conda_info} >{snakemake.log.conda_info_md5} - -# Also pipe stderr to log file -if [[ -n "{snakemake.log.log}" ]]; then - if [[ "$(set +e; tty; set -e)" != "" ]]; then - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - exec 2> >(tee -a "{snakemake.log.log}" >&2) - else - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - echo "No tty, logging disabled" >"{snakemake.log.log}" - fi -fi - -R --vanilla --slave << __EOF -for (pkg in {to_install}) {{ - if (pkg[["repo"]] == "cran") install.packages(pkg[["name"]], lib=dirname("{snakemake.output.done}"), update=FALSE, ask=FALSE) - if (pkg[["repo"]] == "bioconductor") BiocManager::install(pkg[["name"]], lib=dirname("{snakemake.output.done}"), update=FALSE, ask=FALSE) - if (pkg[["repo"]] == "github") remotes::install_github(pkg[["name"]], lib=dirname("{snakemake.output.done}"), upgrade="never") - if (pkg[["repo"]] == "bitbucket") remotes::install_bitbucket(pkg[["name"]], lib=dirname("{snakemake.output.done}"), upgrade="never") - if (pkg[["repo"]] == "local") remotes::install_local(pkg[["name"]], lib=dirname("{snakemake.output.done}"), upgrade="never") -}} -__EOF -touch {snakemake.output.done} -""" -) - -# Compute MD5 sums of logs. -shell( - r""" -md5sum {snakemake.log.log} >{snakemake.log.log_md5} -""" -) diff --git a/snappy_wrappers/wrappers/rnaqc/duplication/wrapper.py b/snappy_wrappers/wrappers/rnaqc/duplication/wrapper.py index 51b618cc0..a7377dd34 100644 --- a/snappy_wrappers/wrappers/rnaqc/duplication/wrapper.py +++ b/snappy_wrappers/wrappers/rnaqc/duplication/wrapper.py @@ -5,6 +5,8 @@ __author__ = "Clemens Messerschmidt " +args = getattr(snakemake.params, "args", {}) + shell.executable("/bin/bash") shell( @@ -39,7 +41,7 @@ fi # Find out strand -strand={snakemake.config[step_config][gene_expression_quantification][strand]} +strand={args[strand]} if [ ${{strand}} -eq -1 ] then diff --git a/snappy_wrappers/wrappers/rnaqc/dupradar/wrapper.py b/snappy_wrappers/wrappers/rnaqc/dupradar/wrapper.py index 35ccb7f70..6d91cee68 100644 --- a/snappy_wrappers/wrappers/rnaqc/dupradar/wrapper.py +++ b/snappy_wrappers/wrappers/rnaqc/dupradar/wrapper.py @@ -5,6 +5,8 @@ __author__ = "Clemens Messerschmidt " +args = getattr(snakemake.params, "args", {}) + shell.executable("/bin/bash") shell( @@ -39,7 +41,7 @@ fi # Find out strand -strand={snakemake.config[step_config][gene_expression_quantification][strand]} +strand={args[strand]} if [ ${{strand}} -eq -1 ] then @@ -84,10 +86,10 @@ Rscript --vanilla run_dupradar.R \ "{snakemake.input.bam}" \ results.tsv \ - {snakemake.config[step_config][gene_expression_quantification][dupradar][dupradar_path_annotation_gtf]} \ + {args[dupradar_path_annotation_gtf]} \ ${{strand}} \ ${{paired_cmd}} \ - {snakemake.config[step_config][gene_expression_quantification][dupradar][num_threads]} \ + {args[num_threads]} \ "." popd diff --git a/snappy_wrappers/wrappers/rnaqc/rnaseqc/wrapper.py b/snappy_wrappers/wrappers/rnaqc/rnaseqc/wrapper.py index f6c714708..e194efd91 100644 --- a/snappy_wrappers/wrappers/rnaqc/rnaseqc/wrapper.py +++ b/snappy_wrappers/wrappers/rnaqc/rnaseqc/wrapper.py @@ -5,6 +5,8 @@ __author__ = "Clemens Messerschmidt " +args = getattr(snakemake.params, "args", {}) + shell.executable("/bin/bash") shell( @@ -39,7 +41,7 @@ fi # Find out strand -strand={snakemake.config[step_config][gene_expression_quantification][strand]} +strand={args[strand]} if [ ${{strand}} -eq -1 ] then @@ -59,7 +61,7 @@ fi # IMPORTANT NOTE- -# The GTF annotation file (snakemake.config[step_config][gene_expression_quantification][rnaseqc][rnaseqc_path_annotation_gtf]) +# The GTF annotation file (args[rnaseqc_path_annotation_gtf]) # assumes that: # - all records have a "transcript_id" entry among their attributes. Many records won't have it, # for example all "gene" (in feature column) are missing it, and it will trigger an error whn present. @@ -69,8 +71,8 @@ jar_path=${{JAVA_HOME}}/share/rna-seqc-1.1.8-2/RNA-SeQC_v1.1.8.jar ${{JAVA_HOME}}/bin/java -jar ${{jar_path}} \ - -r {snakemake.config[static_data_config][reference][path]} \ - -t {snakemake.config[step_config][gene_expression_quantification][rnaseqc][rnaseqc_path_annotation_gtf]} \ + -r {args[reference]} \ + -t {args[rnaseqc_path_annotation_gtf]} \ -s "Sample,{snakemake.input.bam}, " \ ${{paired_cmd}} \ -o ${{TMPDIR}}/rnaseqc diff --git a/snappy_wrappers/wrappers/rnaqc/stats/wrapper.py b/snappy_wrappers/wrappers/rnaqc/stats/wrapper.py index f39f667c6..4118dc25d 100644 --- a/snappy_wrappers/wrappers/rnaqc/stats/wrapper.py +++ b/snappy_wrappers/wrappers/rnaqc/stats/wrapper.py @@ -7,6 +7,8 @@ shell.executable("/bin/bash") +args = getattr(snakemake.params, "args", {}) + shell( r""" set -euo pipefail @@ -39,7 +41,7 @@ fi # Find out strand -strand={snakemake.config[step_config][gene_expression_quantification][strand]} +strand={args[strand]} if [ ${{strand}} -eq -1 ] then diff --git a/snappy_wrappers/wrappers/rseqc/wrapper.py b/snappy_wrappers/wrappers/rseqc/wrapper.py index 974abff2f..905449f80 100644 --- a/snappy_wrappers/wrappers/rseqc/wrapper.py +++ b/snappy_wrappers/wrappers/rseqc/wrapper.py @@ -9,8 +9,8 @@ shell.executable("/bin/bash") -current_step = snakemake.config["pipeline_step"]["name"] -config = snakemake.config["step_config"][current_step]["strandedness"] +config = getattr(snakemake.params, "args", {}) + out_link_dir = ( os.path.dirname(snakemake.output.output) if "output" in snakemake.output.keys() else "" ) diff --git a/snappy_wrappers/wrappers/salmon/wrapper.py b/snappy_wrappers/wrappers/salmon/wrapper.py index 9f3b1d2ba..ac4bd7cd5 100644 --- a/snappy_wrappers/wrappers/salmon/wrapper.py +++ b/snappy_wrappers/wrappers/salmon/wrapper.py @@ -7,10 +7,12 @@ shell.executable("/bin/bash") +args = getattr(snakemake.params, "args", {}) + # Input fastqs are passed through snakemake.params. # snakemake.input is a .done file touched after linking files in. -reads_left = snakemake.params.args["input"]["reads_left"] -reads_right = snakemake.params.args["input"].get("reads_right", "") +reads_left = args["input"]["reads_left"] +reads_right = args["input"].get("reads_right", "") # salmon flag for first reads changes for single-end data. if reads_right: @@ -60,7 +62,7 @@ right_files_prefixed=" -2 ${{right_files}}" fi -t2g="{snakemake.config[step_config][gene_expression_quantification][salmon][path_transcript_to_gene]}" +t2g="{args[path_transcript_to_gene]}" t2g_cmd="" if [[ "$t2g" != "" ]] && [[ "$t2g" != "REQUIRED" ]] && [[ -r "$t2g" ]] then @@ -68,13 +70,13 @@ fi libraryType="A" -if [[ {snakemake.config[step_config][gene_expression_quantification][strand]} -ge 0 ]] +if [[ {args[strand]} -ge 0 ]] then libraryType="I" - if [[ {snakemake.config[step_config][gene_expression_quantification][strand]} -gt 0 ]] + if [[ {args[strand]} -gt 0 ]] then libraryType="${{libraryType}}S" - if [[ {snakemake.config[step_config][gene_expression_quantification][strand]} -eq 1 ]] + if [[ {args[strand]} -eq 1 ]] then libraryType="${{libraryType}}F" else @@ -86,14 +88,14 @@ fi salmon quant \ - -i {snakemake.config[step_config][gene_expression_quantification][salmon][path_index]} \ + -i {args[path_index]} \ -l $libraryType \ {read_flag} ${{left_files_prefixed}} ${{right_files_prefixed}} \ ${{t2g_cmd}} \ -o $TMPDIR \ - -p {snakemake.config[step_config][gene_expression_quantification][salmon][num_threads]} \ + -p {args[num_threads]} \ --auxDir aux \ - {snakemake.config[step_config][gene_expression_quantification][salmon][salmon_params]} + {args[salmon_params]} # Copy over the output files cp $TMPDIR/quant.sf {snakemake.output.transcript_sf} diff --git a/snappy_wrappers/wrappers/scalpel/somatic/wrapper.py b/snappy_wrappers/wrappers/scalpel/somatic/wrapper.py index 816e448ab..041bed5cb 100644 --- a/snappy_wrappers/wrappers/scalpel/somatic/wrapper.py +++ b/snappy_wrappers/wrappers/scalpel/somatic/wrapper.py @@ -8,6 +8,8 @@ this_file = __file__ +args = getattr(snakemake.params, "args", {}) + shell( r""" # Also pipe everything to log file @@ -56,10 +58,10 @@ --pathlimit 10000 \ --outratio 0.1 \ --mincov 3 \ - --ref {snakemake.config[static_data_config][reference][path]} \ + --ref {args[reference]} \ --dir $TMPDIR/scalpel.tmp \ --bed \ - {snakemake.config[step_config][somatic_variant_calling][scalpel][path_target_regions]} \ + {args[path_target_regions]} \ --somatic \ --normal {snakemake.input.normal_bam} \ --tumor {snakemake.input.tumor_bam} @@ -73,7 +75,7 @@ # Obtain fixed contig header lines awk '{{ printf("##contig=\n", $1, $2); }}' \ - {snakemake.config[static_data_config][reference][path]}.fai \ + {args[reference]}.fai \ > $TMPDIR/contig_headers.txt # join and transform output file for tumor/normal pairs diff --git a/snappy_wrappers/wrappers/scarHRD/run/wrapper.py b/snappy_wrappers/wrappers/scarHRD/run/wrapper.py index 7668f6f0c..64e67b316 100644 --- a/snappy_wrappers/wrappers/scarHRD/run/wrapper.py +++ b/snappy_wrappers/wrappers/scarHRD/run/wrapper.py @@ -6,15 +6,16 @@ __author__ = "Eric Blanc " +args = getattr(snakemake.params, "args", {}) + lib_path = os.path.realpath(os.path.dirname(snakemake.input.done)) -step = snakemake.config["pipeline_step"]["name"] -genome = snakemake.config["static_data_config"]["reference"]["path"] -length = snakemake.config["step_config"][step]["scarHRD"]["length"] -genome_name = snakemake.config["step_config"][step]["scarHRD"]["genome_name"] +genome = args["reference"] +length = args["length"] +genome_name = args["genome_name"] -chr_in_name = "TRUE" if snakemake.config["step_config"][step]["scarHRD"]["chr_prefix"] else "FALSE" -prefix = "chr" if snakemake.config["step_config"][step]["scarHRD"]["chr_prefix"] else "" +chr_in_name = "TRUE" if args["chr_prefix"] else "FALSE" +prefix = "chr" if args["chr_prefix"] else "" if genome_name == "grch37" or genome_name == "grch38": chromosomes = " ".join([prefix + str(x) for x in list(range(1, 23)) + ["X", "Y"]]) elif genome_name == "mouse": diff --git a/snappy_wrappers/wrappers/sequenza/coverage/wrapper.py b/snappy_wrappers/wrappers/sequenza/coverage/wrapper.py index 4b52abe3f..86716ac9c 100644 --- a/snappy_wrappers/wrappers/sequenza/coverage/wrapper.py +++ b/snappy_wrappers/wrappers/sequenza/coverage/wrapper.py @@ -15,10 +15,9 @@ __author__ = "Eric Blanc " +config = getattr(snakemake.params, "args", {}) -step = snakemake.config["pipeline_step"]["name"] -config = snakemake.config["step_config"][step]["sequenza"] -genome = snakemake.config["static_data_config"]["reference"]["path"] +genome = config["reference"] length = config["length"] f = open(genome + ".fai", "rt") diff --git a/snappy_wrappers/wrappers/sequenza/gcreference/wrapper.py b/snappy_wrappers/wrappers/sequenza/gcreference/wrapper.py index 66412c181..c19c22aa6 100644 --- a/snappy_wrappers/wrappers/sequenza/gcreference/wrapper.py +++ b/snappy_wrappers/wrappers/sequenza/gcreference/wrapper.py @@ -6,9 +6,10 @@ __author__ = "Eric Blanc " -step = snakemake.config["pipeline_step"]["name"] -genome = snakemake.config["static_data_config"]["reference"]["path"] -length = snakemake.config["step_config"][step]["sequenza"]["length"] +args = getattr(snakemake.params, "args", {}) + +genome = args["reference"] +length = args["length"] shell.executable("/bin/bash") diff --git a/snappy_wrappers/wrappers/sequenza/report/environment.yaml b/snappy_wrappers/wrappers/sequenza/report/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/sequenza/report/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/sequenza/report/wrapper.py b/snappy_wrappers/wrappers/sequenza/report/wrapper.py deleted file mode 100644 index a8f4b2bdc..000000000 --- a/snappy_wrappers/wrappers/sequenza/report/wrapper.py +++ /dev/null @@ -1,98 +0,0 @@ -"""CUBI+Snakemake wrapper code for sequenza (R part, post-processing)""" - -import os -import sys - -# The following is required for being able to import snappy_wrappers modules -# inside wrappers. These run in an "inner" snakemake process which uses its -# own conda environment which cannot see the snappy_pipeline installation. -base_dir = os.path.normpath(os.path.join(os.path.dirname(__file__), "..", "..", "..", "..")) -sys.path.insert(0, base_dir) - -from snakemake import shell - -from snappy_wrappers.tools.genome_windows import yield_contigs - -__author__ = "Eric Blanc " - - -def config_to_r(x): - if x is None: - return "NULL" - if isinstance(x, str): - return f'"{x}"' - if isinstance(x, bool): - return "TRUE" if x else "FALSE" - if isinstance(x, list): - return "c({})".format(", ".join([config_to_r(xx) for xx in x])) - if isinstance(x, dict): - return "list({})".format( - ", ".join(['"{}"={}'.format(k, config_to_r(v)) for k, v in x.items()]) - ) - return str(x) - - -step = snakemake.config["pipeline_step"]["name"] -config = snakemake.config["step_config"][step]["sequenza"] -genome = snakemake.config["static_data_config"]["reference"]["path"] - -f = open(genome + ".fai", "rt") -contigs = config_to_r(list(yield_contigs(f, config.get("ignore_chroms")))) -f.close() - -args_extract = config_to_r(dict(config["extra_args_extract"])) -args_fit = config_to_r(dict(config["extra_args_fit"])) - -shell.executable("/bin/bash") - -shell( - r""" -set -x - -# Write out information about conda installation. -conda list >{snakemake.log.conda_list} -conda info >{snakemake.log.conda_info} -md5sum {snakemake.log.conda_list} >{snakemake.log.conda_list_md5} -md5sum {snakemake.log.conda_info} >{snakemake.log.conda_info_md5} - -# Also pipe stderr to log file -if [[ -n "{snakemake.log.log}" ]]; then - if [[ "$(set +e; tty; set -e)" != "" ]]; then - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - exec 2> >(tee -a "{snakemake.log.log}" >&2) - else - rm -f "{snakemake.log.log}" && mkdir -p $(dirname {snakemake.log.log}) - echo "No tty, logging disabled" >"{snakemake.log.log}" - fi -fi - -export R_LIBS_USER=$(dirname {snakemake.input.packages}) -export VROOM_CONNECTION_SIZE=2000000000 - -R --vanilla --slave << __EOF -library(sequenza) - -args <- list(file="{snakemake.input.seqz}", assembly="{config[assembly]}", chromosome.list={contigs}) -args <- c(args, {args_extract}) -seqz <- do.call(sequenza.extract, args=args) - -args <- list(sequenza.extract=seqz, chromosome.list={contigs}, mc.cores=1) -args <- c(args, {args_fit}) -CP <- do.call(sequenza.fit, args=args) - -sequenza.results(sequenza.extract=seqz, cp.table=CP, sample.id="{snakemake.wildcards[library_name]}", out.dir=dirname("{snakemake.output.done}")) - -__EOF - -pushd $(dirname {snakemake.output.done}) ; fns=$(ls) ; for f in $fns ; do md5sum $f > $f.md5 ; done ; popd - -touch {snakemake.output.done} -""" -) - -# Compute MD5 sums of logs. -shell( - r""" -md5sum {snakemake.log.log} >{snakemake.log.log_md5} -""" -) diff --git a/snappy_wrappers/wrappers/sequenza/run/wrapper.py b/snappy_wrappers/wrappers/sequenza/run/wrapper.py index 2250a2c3a..3faabb627 100644 --- a/snappy_wrappers/wrappers/sequenza/run/wrapper.py +++ b/snappy_wrappers/wrappers/sequenza/run/wrapper.py @@ -15,6 +15,7 @@ __author__ = "Eric Blanc " +config = getattr(snakemake.params, "args", {}) def config_to_r(x): if x is None: @@ -33,9 +34,7 @@ def config_to_r(x): return str(x) -step = snakemake.config["pipeline_step"]["name"] -config = snakemake.config["step_config"][step]["sequenza"] -genome = snakemake.config["static_data_config"]["reference"]["path"] +genome = config["reference"] length = config["length"] f = open(genome + ".fai", "rt") @@ -83,16 +82,16 @@ def config_to_r(x): args <- c(args, {args_fit}) CP <- do.call(sequenza.fit, args=args) -sequenza.results(sequenza.extract=seqz, cp.table=CP, sample.id="{snakemake.wildcards[library_name]}", out.dir=dirname("{snakemake.output.done}")) +sequenza.results(sequenza.extract=seqz, cp.table=CP, sample.id="{config[library_name]}", out.dir=dirname("{snakemake.output.done}")) warnings() # Convert *_segment.txt to *_dnacopy.seg to follow pipeline output format -segments <- file.path(dirname("{snakemake.output.done}"), sprintf("%s_segments.txt", "{snakemake.wildcards[library_name]}")) +segments <- file.path(dirname("{snakemake.output.done}"), sprintf("%s_segments.txt", "{config[library_name]}")) stopifnot(file.exists(segments)) dnacopy <- read.table(segments, sep="\t", header=1, stringsAsFactors=FALSE, check.names=FALSE) -dnacopy[,"ID"] <- "{snakemake.wildcards[library_name]}" +dnacopy[,"ID"] <- "{config[library_name]}" dnacopy[,"depth.ratio"] <- log2(dnacopy[,"depth.ratio"]) col_names <- c( diff --git a/snappy_wrappers/wrappers/sniffles/germline/wrapper.py b/snappy_wrappers/wrappers/sniffles/germline/wrapper.py deleted file mode 100644 index 23c21ac37..000000000 --- a/snappy_wrappers/wrappers/sniffles/germline/wrapper.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running Sniffles on germline data""" - -from snakemake.shell import shell - -__author__ = "Manuel Holtgrewe" -__email__ = "manuel.holtgrewe@bih-charite.de" - -shell( - r""" -# ----------------------------------------------------------------------------- -# Redirect stderr to log file by default and enable printing executed commands -exec 2> >(tee -a "{snakemake.log}") -set -x -# ----------------------------------------------------------------------------- - -module purge -module load BCFtools/1.3.1-foss-2015a - -set -euo pipefail - -inputs=$(echo {snakemake.input} | tr ' ' '\n' | grep '\.bam$') -outfile=$(echo {snakemake.output} | tr ' ' '\n' | grep 'vcf.gz' | head -n 1) -outdir=$(dirname $outfile) - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -for input in $inputs; do - mkdir -p $outdir/more - outname=$outdir/more/$(basename $input .bam).vcf - - sniffles \ - -m $input \ - -t {snakemake.config[step_config][sv_calling_wgs][sniffles][num_threads]} \ - -v $outname \ - --tmp_file $TMPDIR/sniffles.tmp \ - --cluster \ - --genotype -done - -touch {snakemake.output} -""" -) diff --git a/snappy_wrappers/wrappers/sniffles2/germline/bam_to_snf/environment.yaml b/snappy_wrappers/wrappers/sniffles2/germline/bam_to_snf/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/sniffles2/germline/bam_to_snf/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/sniffles2/germline/bam_to_snf/wrapper.py b/snappy_wrappers/wrappers/sniffles2/germline/bam_to_snf/wrapper.py deleted file mode 100644 index c505f1008..000000000 --- a/snappy_wrappers/wrappers/sniffles2/germline/bam_to_snf/wrapper.py +++ /dev/null @@ -1,31 +0,0 @@ -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -shell.executable("/bin/bash") - -shell( - r""" -set -x - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT ERR - -sniffles \ - --reference {snakemake.config[static_data_config][reference][path]} \ - --input {snakemake.input.bam} \ - --tandem-repeats {snakemake.config[step_config][sv_calling_wgs][sniffles2][tandem_repeats]} \ - --vcf $TMPDIR/tmp.vcf \ - --snf {snakemake.output.snf} \ - --threads {snakemake.threads} - -bgzip -c $TMPDIR/tmp.vcf >{snakemake.output.vcf} -tabix -f {snakemake.output.vcf} - -pushd $(dirname {snakemake.output.vcf}) -md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf}).md5 -md5sum $(basename {snakemake.output.vcf}).tbi >$(basename {snakemake.output.vcf}).tbi.md5 -md5sum $(basename {snakemake.output.snf}) >$(basename {snakemake.output.snf}).md5 -popd -""" -) diff --git a/snappy_wrappers/wrappers/sniffles2/germline/environment.yaml b/snappy_wrappers/wrappers/sniffles2/germline/environment.yaml deleted file mode 100644 index 012332248..000000000 --- a/snappy_wrappers/wrappers/sniffles2/germline/environment.yaml +++ /dev/null @@ -1,8 +0,0 @@ -channels: - - conda-forge - - bioconda - - nodefaults -dependencies: - - sniffles==2.2 - - htslib==1.16 - - bcftools==1.16 diff --git a/snappy_wrappers/wrappers/sniffles2/germline/snf_to_vcf/environment.yaml b/snappy_wrappers/wrappers/sniffles2/germline/snf_to_vcf/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/sniffles2/germline/snf_to_vcf/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/sniffles2/germline/snf_to_vcf/wrapper.py b/snappy_wrappers/wrappers/sniffles2/germline/snf_to_vcf/wrapper.py deleted file mode 100644 index 779f1659f..000000000 --- a/snappy_wrappers/wrappers/sniffles2/germline/snf_to_vcf/wrapper.py +++ /dev/null @@ -1,41 +0,0 @@ -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -shell.executable("/bin/bash") - -shell( - r""" -set -x - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT ERR - -sniffles \ - --reference {snakemake.config[static_data_config][reference][path]} \ - --input {snakemake.input.snf} \ - --tandem-repeats {snakemake.config[step_config][sv_calling_wgs][sniffles2][tandem_repeats]} \ - --vcf $TMPDIR/tmp_raw.vcf \ - --threads {snakemake.threads} - -# Remove decoy lines as sniffles writes out garbage IDs for some of them. -grep -v '^hs37d5' $TMPDIR/tmp_raw.vcf > $TMPDIR/tmp_nohs37d5.vcf - -for snf in {snakemake.input.snf}; do - sniffles_name=$(basename {snakemake.input.snf} .snf) - library_name=$(echo $sniffles_name | rev | cut -d . -f 1 | rev) - echo "$sniffles_name $library_name" >>$TMPDIR/samples.txt -done - -bcftools reheader \ - --samples $TMPDIR/samples.txt \ - $TMPDIR/tmp_nohs37d5.vcf \ -| bgzip -c >{snakemake.output.vcf} -tabix -f {snakemake.output.vcf} - -pushd $(dirname {snakemake.output.vcf}) -md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf}).md5 -md5sum $(basename {snakemake.output.vcf}).tbi >$(basename {snakemake.output.vcf}).tbi.md5 -popd -""" -) diff --git a/snappy_wrappers/wrappers/snpeff/somatic/wrapper.py b/snappy_wrappers/wrappers/snpeff/somatic/wrapper.py deleted file mode 100644 index af5a48642..000000000 --- a/snappy_wrappers/wrappers/snpeff/somatic/wrapper.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running SnpEff variant annotation""" - -from snakemake.shell import shell - -__author__ = "Manuel Holtgrewe" -__email__ = "manuel.holtgrewe@bih-charite.de" - -normal_library = snakemake.params.args["normal_library"] -cancer_library = snakemake.params.args["cancer_library"] - -shell( - r""" -# ----------------------------------------------------------------------------- -# Redirect to log file by default and enable printing executed commands -exec 2> >(tee -a "{snakemake.log}") -set -x -# ----------------------------------------------------------------------------- - -module purge -module load Java/1.8.0_92 -module load HTSlib/1.3.1-foss-2015a - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -JAR=/fast/users/mholtgr/scratch/build/SnpEff4.3i/snpEff/snpEff.jar - -echo -e "{normal_library}\t{cancer_library}" > $TMPDIR/samples.txt - -java -Xmx15g -jar $JAR ann \ - -verbose \ - -cancer \ - -cancerSamples $TMPDIR/samples.txt \ - -csvStats {snakemake.output.report_csv} \ - -htmlStats {snakemake.output.report_html} \ - -nodownload \ - -dataDir $(dirname {snakemake.config[step_config][somatic_variant_annotation][snpeff][path_db]}) \ - $(basename {snakemake.config[step_config][somatic_variant_annotation][snpeff][path_db]}) \ - {snakemake.input.vcf} \ -> $TMPDIR/$(basename {snakemake.output.vcf} .gz) - -bgzip -c $TMPDIR/$(basename {snakemake.output.vcf} .gz) \ -> {snakemake.output.vcf} -tabix -f {snakemake.output.vcf} - -pushd $(dirname {snakemake.output.vcf}) && \ - md5sum $(basename {snakemake.output.vcf}) > $(basename {snakemake.output.vcf}).md5 && \ - md5sum $(basename {snakemake.output.vcf_tbi}) > $(basename {snakemake.output.vcf_tbi}).md5 -""" -) diff --git a/snappy_wrappers/wrappers/somatic_cnv_checking/wrapper.py b/snappy_wrappers/wrappers/somatic_cnv_checking/wrapper.py index bf1277484..e53541bdd 100644 --- a/snappy_wrappers/wrappers/somatic_cnv_checking/wrapper.py +++ b/snappy_wrappers/wrappers/somatic_cnv_checking/wrapper.py @@ -11,7 +11,8 @@ rscript = os.path.join(os.path.dirname(os.path.realpath(__file__)), "cnv-check-plot.R") -reference = snakemake.config["static_data_config"]["reference"]["path"] +args = getattr(snakemake.params, "args", {}) +reference = args["reference"] shell( r""" diff --git a/snappy_wrappers/wrappers/somatic_variant_filtration/apply_filters/wrapper.py b/snappy_wrappers/wrappers/somatic_variant_filtration/apply_filters/wrapper.py index 740b6bdbd..07bf93a69 100644 --- a/snappy_wrappers/wrappers/somatic_variant_filtration/apply_filters/wrapper.py +++ b/snappy_wrappers/wrappers/somatic_variant_filtration/apply_filters/wrapper.py @@ -8,8 +8,8 @@ from snakemake import shell -config = snakemake.config["step_config"]["somatic_variant_filtration"]["filter_sets"] -params = snakemake.params.args +params = getattr(snakemake.params, "args", {}) +config = params.get("config", {}) print("DEBUG- params = {}".format(params), file=sys.stderr) __author__ = "Manuel Holtgrewe " diff --git a/snappy_wrappers/wrappers/somatic_variant_filtration/filter_to_exons/wrapper.py b/snappy_wrappers/wrappers/somatic_variant_filtration/filter_to_exons/wrapper.py index e1463b760..8affcb3dc 100644 --- a/snappy_wrappers/wrappers/somatic_variant_filtration/filter_to_exons/wrapper.py +++ b/snappy_wrappers/wrappers/somatic_variant_filtration/filter_to_exons/wrapper.py @@ -9,6 +9,8 @@ shell.executable("/bin/bash") +args = getattr(snakemake.params, "args", {}) + if snakemake.wildcards.exon_list == "genome_wide": shell( textwrap.dedent( @@ -21,9 +23,7 @@ ) ) else: - bed_path = snakemake.config["step_config"]["somatic_variant_filtration"]["exon_lists"][ - snakemake.wildcards.exon_list - ] + bed_path = args["exon_lists"][snakemake.wildcards.exon_list] shell( textwrap.dedent( r""" @@ -34,7 +34,7 @@ -b {bed_path} \ -wa \ -u \ - -g {snakemake.config[static_data_config][reference][path]}.genome \ + -g {args[reference]}.genome \ -sorted \ -header \ | bgzip -c \ diff --git a/snappy_wrappers/wrappers/star/wrapper.py b/snappy_wrappers/wrappers/star/wrapper.py index c62eaf9db..1893eb5da 100644 --- a/snappy_wrappers/wrappers/star/wrapper.py +++ b/snappy_wrappers/wrappers/star/wrapper.py @@ -2,6 +2,8 @@ __author__ = "Manuel Holtgrewe " +args = getattr(snakemake.params, "args", {}) + out_gc = snakemake.output.get("gene_counts", "__dummy__") out_sj = snakemake.output.get("junctions", "__dummy__") out_tx = snakemake.output.get("transcriptome", "__dummy__") @@ -21,8 +23,8 @@ # Input fastqs are passed through snakemake.params. # snakemake.input is a .done file touched after linking files in. -reads_left = snakemake.params.args["input"]["reads_left"] -reads_right = snakemake.params.args["input"].get("reads_right", "") +reads_left = args["input"]["reads_left"] +reads_right = args["input"].get("reads_right", "") shell( r""" @@ -64,7 +66,7 @@ mkdir -p $TMPDIR/tmp.d $TMPDIR/pre.d # Define some global shortcuts -INDEX={snakemake.config[step_config][ngs_mapping][star][path_index]} +INDEX={args[path_index]} # Define left and right reads as Bash arrays declare -a reads_left=({reads_left}) @@ -87,8 +89,8 @@ set -x samtools sort -O BAM \ - -m {snakemake.config[step_config][ngs_mapping][star][memory_bam_sort]} \ - -@ {snakemake.config[step_config][ngs_mapping][star][num_threads_bam_sort]} \ + -m {args[memory_bam_sort]} \ + -@ {args[num_threads_bam_sort]} \ - }} @@ -133,42 +135,42 @@ fi trim_cmd="" - if [[ "{snakemake.config[step_config][ngs_mapping][star][trim_adapters]}" == "True" ]]; then - trim_cmd="\"trimadap-mt -p {snakemake.config[step_config][ngs_mapping][star][num_threads_trimming]}\"" + if [[ "{args[trim_adapters]}" == "True" ]]; then + trim_cmd="\"trimadap-mt -p {args[num_threads_trimming]}\"" else trim_cmd="zcat" fi quant_mode="" - if [[ -n "{snakemake.config[static_data_config][features][path]}" ]] + if [[ -n "{args[features]}" ]] then quant_mode="$quant_mode GeneCounts" fi - if [[ "{snakemake.config[step_config][ngs_mapping][star][transcriptome]}" = "True" ]] + if [[ "{args[transcriptome]}" = "True" ]] then quant_mode="$quant_mode TranscriptomeSAM" fi STAR \ --readFilesIn ${{left_files}} ${{right_files}} \ - {snakemake.config[step_config][ngs_mapping][star][raw_star_options]} \ + {args[raw_star_options]} \ $rg_args \ --readFilesCommand ${{trim_cmd}} \ - --alignIntronMax {snakemake.config[step_config][ngs_mapping][star][align_intron_max]} \ - --alignIntronMin {snakemake.config[step_config][ngs_mapping][star][align_intron_min]} \ - --alignMatesGapMax {snakemake.config[step_config][ngs_mapping][star][align_mates_gap_max]} \ - --alignSJDBoverhangMin {snakemake.config[step_config][ngs_mapping][star][align_sjdb_overhang_min]} \ - --alignSJoverhangMin {snakemake.config[step_config][ngs_mapping][star][align_sj_overhang_min]} \ - --genomeDir {snakemake.config[step_config][ngs_mapping][star][path_index]} \ - --genomeLoad {snakemake.config[step_config][ngs_mapping][star][genome_load]} \ + --alignIntronMax {args[align_intron_max]} \ + --alignIntronMin {args[align_intron_min]} \ + --alignMatesGapMax {args[align_mates_gap_max]} \ + --alignSJDBoverhangMin {args[align_sjdb_overhang_min]} \ + --alignSJoverhangMin {args[align_sj_overhang_min]} \ + --genomeDir {args[path_index]} \ + --genomeLoad {args[genome_load]} \ --outFileNamePrefix $TMPDIR/pre.d/out. \ - --outFilterIntronMotifs {snakemake.config[step_config][ngs_mapping][star][out_filter_intron_motifs]} \ - --outFilterMismatchNmax {snakemake.config[step_config][ngs_mapping][star][out_filter_mismatch_n_max]} \ - --outFilterMismatchNoverLmax {snakemake.config[step_config][ngs_mapping][star][out_filter_mismatch_n_over_l_max]} \ - --outFilterMultimapNmax {snakemake.config[step_config][ngs_mapping][star][out_filter_multimap_n_max]} \ - --outFilterType {snakemake.config[step_config][ngs_mapping][star][out_filter_type]} \ - --outSAMstrandField {snakemake.config[step_config][ngs_mapping][star][out_sam_strand_field]} \ - --outSAMunmapped $(if [[ "{snakemake.config[step_config][ngs_mapping][star][include_unmapped]}" == "True" ]]; then \ + --outFilterIntronMotifs {args[out_filter_intron_motifs]} \ + --outFilterMismatchNmax {args[out_filter_mismatch_n_max]} \ + --outFilterMismatchNoverLmax {args[out_filter_mismatch_n_over_l_max]} \ + --outFilterMultimapNmax {args[out_filter_multimap_n_max]} \ + --outFilterType {args[out_filter_type]} \ + --outSAMstrandField {args[out_sam_strand_field]} \ + --outSAMunmapped $(if [[ "{args[include_unmapped]}" == "True" ]]; then \ echo "Within"; \ else echo "None"; \ @@ -176,15 +178,15 @@ $(if [[ -n "$quant_mode" ]]; then \ echo "--quantMode $quant_mode" fi) \ - $(if [[ -n "{snakemake.config[static_data_config][features][path]}" ]]; then \ - echo --sjdbGTFfile "{snakemake.config[static_data_config][features][path]}" + $(if [[ -n "{args[features]}" ]]; then \ + echo --sjdbGTFfile "{args[features]}" fi) \ - $(if [[ "{snakemake.config[step_config][ngs_mapping][star][mask_duplicates]}" == "True" ]]; then \ + $(if [[ "{args[mask_duplicates]}" == "True" ]]; then \ echo " --outStd SAM " ; \ else echo " --outSAMtype BAM SortedByCoordinate "; \ fi) \ - --runThreadN {snakemake.config[step_config][ngs_mapping][star][num_threads_align]} + --runThreadN {args[num_threads_align]} >&2 ls -lhR $TMPDIR }} @@ -192,7 +194,7 @@ # Perform Alignment ------------------------------------------------------------------------------- # Run STAR -if [[ "{snakemake.config[step_config][ngs_mapping][star][mask_duplicates]}" == "True" ]]; then +if [[ "{args[mask_duplicates]}" == "True" ]]; then run_star | mask_duplicates | sort_by_coord > {snakemake.output.bam} else run_star @@ -208,8 +210,8 @@ # Optional output: mapping on transcriptome ------------------------------------------------------- -if [[ "{snakemake.config[step_config][ngs_mapping][star][transcriptome]}" = "True" ]]; then - if [[ "{snakemake.config[step_config][ngs_mapping][star][mask_duplicates]}" == "True" ]]; then +if [[ "{args[transcriptome]}" = "True" ]]; then + if [[ "{args[mask_duplicates]}" == "True" ]]; then samtools view -h -S $TMPDIR/pre.d/out.Aligned.toTranscriptome.out.bam | mask_duplicates | samtools view -h -b - > {out_tx} else mv $TMPDIR/pre.d/out.Aligned.toTranscriptome.out.bam {out_tx} diff --git a/snappy_wrappers/wrappers/star_fusion/wrapper.py b/snappy_wrappers/wrappers/star_fusion/wrapper.py index 32769d02f..cb20ed435 100644 --- a/snappy_wrappers/wrappers/star_fusion/wrapper.py +++ b/snappy_wrappers/wrappers/star_fusion/wrapper.py @@ -7,6 +7,8 @@ shell.executable("/bin/bash") +args = getattr(snakemake.params, "args", {}) + shell( r""" set -x @@ -32,10 +34,10 @@ mkdir -p $inputdir if [[ ! -f "$inputdir/reads_1.fastq.gz" ]]; then - cat {snakemake.params.args[left]} > $inputdir/reads_1.fastq.gz + cat {args[left]} > $inputdir/reads_1.fastq.gz fi if [[ ! -f "$inputdir/reads_2.fastq.gz" ]]; then - cat {snakemake.params.args[right]} > $inputdir/reads_2.fastq.gz + cat {args[right]} > $inputdir/reads_2.fastq.gz fi pushd $workdir @@ -43,7 +45,7 @@ mkdir -p output STAR-Fusion \ - --genome_lib_dir {snakemake.config[step_config][somatic_gene_fusion_calling][star_fusion][path_ctat_resource_lib]} \ + --genome_lib_dir {args[path_ctat_resource_lib]} \ --left_fq input/reads_1.fastq.gz \ --right_fq input/reads_2.fastq.gz \ --output_dir output diff --git a/snappy_wrappers/wrappers/strelka2/somatic/wrapper.py b/snappy_wrappers/wrappers/strelka2/somatic/wrapper.py index 448ac2ce2..0c0abdb94 100644 --- a/snappy_wrappers/wrappers/strelka2/somatic/wrapper.py +++ b/snappy_wrappers/wrappers/strelka2/somatic/wrapper.py @@ -7,6 +7,8 @@ this_file = __file__ +args = getattr(snakemake.params, "args", {}) + shell.executable("/bin/bash") shell( @@ -42,18 +44,18 @@ configManta.py \ --normalBam "{snakemake.input.normal_bam}" \ --tumorBam "{snakemake.input.tumor_bam}" \ - --referenceFasta "{snakemake.config[static_data_config][reference][path]}" \ + --referenceFasta "{args[reference]}" \ --runDir $TMPDIR/manta $TMPDIR/manta/runWorkflow.py -m local -j 8 # Use target bed file if present cmd="" -if [[ "X{snakemake.config[step_config][somatic_variant_calling][strelka2][path_target_regions]}" != "X" ]] +if [[ "X{args[path_target_regions]}" != "X" ]] then - if [[ -r "{snakemake.config[step_config][somatic_variant_calling][strelka2][path_target_regions]}" ]] + if [[ -r "{args[path_target_regions]}" ]] then - cmd=' --exome --callRegions "{snakemake.config[step_config][somatic_variant_calling][strelka2][path_target_regions]}" ' + cmd=' --exome --callRegions "{args[path_target_regions]}" ' fi fi @@ -61,7 +63,7 @@ configureStrelkaSomaticWorkflow.py \ --normalBam "{snakemake.input.normal_bam}" \ --tumorBam "{snakemake.input.tumor_bam}" \ - --referenceFasta "{snakemake.config[static_data_config][reference][path]}" \ + --referenceFasta "{args[reference]}" \ --indelCandidates "$TMPDIR/manta/results/variants/candidateSmallIndels.vcf.gz" \ $cmd --outputCallableRegions \ --runDir $TMPDIR/strelka diff --git a/snappy_wrappers/wrappers/svtk/environment.yaml b/snappy_wrappers/wrappers/svtk/environment.yaml deleted file mode 100644 index 42238c2e3..000000000 --- a/snappy_wrappers/wrappers/svtk/environment.yaml +++ /dev/null @@ -1,8 +0,0 @@ -channels: - - conda-forge - - bioconda - - nodefaults -dependencies: - - svtk==0.0.20190615 - - bcftools==1.19 - - htslib==1.19.1 diff --git a/snappy_wrappers/wrappers/svtk/standardize/environment.yaml b/snappy_wrappers/wrappers/svtk/standardize/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/svtk/standardize/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/svtk/standardize/wrapper.py b/snappy_wrappers/wrappers/svtk/standardize/wrapper.py deleted file mode 100644 index 5f44b2fa6..000000000 --- a/snappy_wrappers/wrappers/svtk/standardize/wrapper.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running ``svtk normalize``""" - -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -shell.executable("/bin/bash") - -shell( - r""" - caller=$(echo {snakemake.wildcards.caller} | sed -e 's/delly2/delly/g') - - export TMPDIR=$(mktemp -d) - trap "rm -rf $TMPDIR" EXIT - - bcftools view \ - -O z \ - -o $TMPDIR/calls.vcf.gz \ - {snakemake.input.calls} - - svtk standardize \ - $TMPDIR/calls.vcf.gz \ - $TMPDIR/tmp.vcf \ - $caller - - bcftools sort -O z -o {snakemake.output.vcf} $TMPDIR/tmp.vcf - - tabix -s 1 -b 2 -e 2 -f {snakemake.output.vcf} - pushd $(dirname {snakemake.output.vcf}) - md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf}).md5 - md5sum $(basename {snakemake.output.vcf}).tbi >$(basename {snakemake.output.vcf}).tbi.md5 -""" -) diff --git a/snappy_wrappers/wrappers/varfish_annotator/annotate/wrapper.py b/snappy_wrappers/wrappers/varfish_annotator/annotate/wrapper.py index ea0e3571f..ea30e41b4 100644 --- a/snappy_wrappers/wrappers/varfish_annotator/annotate/wrapper.py +++ b/snappy_wrappers/wrappers/varfish_annotator/annotate/wrapper.py @@ -4,9 +4,8 @@ __author__ = "Manuel Holtgrewe " -# Get shortcut to configuration of varfish_export step -step_name = snakemake.params.args["step_name"] -export_config = snakemake.config["step_config"][step_name] +args = getattr(snakemake.params, "args", {}) +export_config = args["config"] DEF_HELPER_FUNCS = r""" compute-md5() diff --git a/snappy_wrappers/wrappers/varfish_annotator/annotate_svs/wrapper.py b/snappy_wrappers/wrappers/varfish_annotator/annotate_svs/wrapper.py index 0565841fb..710a0de55 100644 --- a/snappy_wrappers/wrappers/varfish_annotator/annotate_svs/wrapper.py +++ b/snappy_wrappers/wrappers/varfish_annotator/annotate_svs/wrapper.py @@ -7,9 +7,9 @@ # Optionally get path to coverage VCF file. coverage_vcf = " ".join(getattr(snakemake.input, "vcf_cov", [])) -# Get shortcut to configuration of varfish_export step -step_name = snakemake.params.args["step_name"] -export_config = snakemake.config["step_config"][step_name] +args = getattr(snakemake.params, "args", {}) +export_config = args["config"] + # Get shortcut to "fix_manta_invs.py" postprocessing script fix_manta_invs = os.path.join( os.path.dirname(__file__), diff --git a/snappy_wrappers/wrappers/variant_filtration/filter_denovo/wrapper.py b/snappy_wrappers/wrappers/variant_filtration/filter_denovo/wrapper.py index 08d895ab6..1810c5a9e 100644 --- a/snappy_wrappers/wrappers/variant_filtration/filter_denovo/wrapper.py +++ b/snappy_wrappers/wrappers/variant_filtration/filter_denovo/wrapper.py @@ -13,9 +13,11 @@ __author__ = "Manuel Holtgrewe " +a = getattr(snakemake.params, "args", {}) + # Build arguments ================================================================================== -besenbacher = snakemake.config["step_config"]["variant_denovo_filtration"]["params_besenbacher"] +besenbacher = a["params_besenbacher"] # Define arguments in a dictionary, will convert to namedtuple below. args = { @@ -34,7 +36,7 @@ "phase_paternal_first": True, } # Bulk-add besenbacher parameters -args.update(snakemake.config["step_config"]["variant_denovo_filtration"]["params_besenbacher"]) +args.update(a["params_besenbacher"]) args_t = collections.namedtuple("Arguments", args.keys())(**args) diff --git a/snappy_wrappers/wrappers/variant_filtration/filter_denovo_hard/wrapper.py b/snappy_wrappers/wrappers/variant_filtration/filter_denovo_hard/wrapper.py index c1926eeba..5489bb262 100644 --- a/snappy_wrappers/wrappers/variant_filtration/filter_denovo_hard/wrapper.py +++ b/snappy_wrappers/wrappers/variant_filtration/filter_denovo_hard/wrapper.py @@ -15,13 +15,14 @@ import os import sys +args = getattr(snakemake.params, "args", {}) region_filter = "" -if snakemake.config["step_config"]["variant_denovo_filtration"]["bad_region_expressions"]: +if args["bad_region_expressions"]: region_filter = " || " + " || ".join( map( lambda x: "({})".format(x), - snakemake.config["step_config"]["variant_denovo_filtration"]["bad_region_expressions"], + args["bad_region_expressions"], ) ).replace("$sample_index", snakemake.wildcards.index_library) @@ -31,8 +32,8 @@ set -euo pipefail samples="{snakemake.wildcards.index_library}" -samples+=",{snakemake.params.args[father]}" -samples+=",{snakemake.params.args[mother]}" +samples+=",{args[father]}" +samples+=",{args[mother]}" # Perform Hard-Filtration -------------------------------------------------------------------------- diff --git a/snappy_wrappers/wrappers/variant_filtration/filter_frequency/wrapper.py b/snappy_wrappers/wrappers/variant_filtration/filter_frequency/wrapper.py index 86e1a7187..fdadcd569 100644 --- a/snappy_wrappers/wrappers/variant_filtration/filter_frequency/wrapper.py +++ b/snappy_wrappers/wrappers/variant_filtration/filter_frequency/wrapper.py @@ -16,8 +16,10 @@ # Get path to this file's (wrapper.py) directory. base_dir = os.path.dirname(os.path.realpath(__file__)) +args = getattr(snakemake.params, "args", {}) + # Short-circuit in case of performing no filtration -if snakemake.wildcards.frequency == "freq_all": +if args["filter_mode"] == "freq_all": shell( r""" # Frequency set to "freq_all", just copy out the data. @@ -35,14 +37,14 @@ # Actual Filtration ------------------------------------------------------------------------------- # Get shortcut to frequencies set. -frequencies = snakemake.config["step_config"]["variant_filtration"]["frequencies"] +frequencies = args["filter_config"] shell( r""" set -x ## do we have to separate de novo and dom freq? I don't think so. -if [[ "{snakemake.wildcards.frequency}" == dominant_freq ]]; then +if [[ "{args[filter_mode]}" == dominant_freq ]]; then # Perform filtration for de novo variants include='((GNOMAD_GENOMES_AC_ALL <= {frequencies[ac_dominant]}) | (GNOMAD_GENOMES_AC_ALL = "."))' include+=' && ((GNOMAD_GENOMES_AF_POPMAX < {frequencies[af_dominant]}) | (GNOMAD_GENOMES_AF_POPMAX = "."))' @@ -53,7 +55,7 @@ -O z \ -o {snakemake.output.vcf} \ {snakemake.input.vcf} -elif [[ "{snakemake.wildcards.frequency}" == recessive_freq ]]; then +elif [[ "{args[filter_mode]}" == recessive_freq ]]; then # Perform filtration for homozygous variants, heterozygous in both father and mother include='((GNOMAD_GENOMES_AF_ALL < {frequencies[af_recessive]}) | (GNOMAD_GENOMES_AF_ALL = "."))' include+=' && ((GNOMAD_GENOMES_AF_POPMAX < {frequencies[af_recessive]}) | (GNOMAD_GENOMES_AF_POPMAX = "."))' diff --git a/snappy_wrappers/wrappers/variant_filtration/filter_het_comp/wrapper.py b/snappy_wrappers/wrappers/variant_filtration/filter_het_comp/wrapper.py index bae647c0c..1858637d5 100644 --- a/snappy_wrappers/wrappers/variant_filtration/filter_het_comp/wrapper.py +++ b/snappy_wrappers/wrappers/variant_filtration/filter_het_comp/wrapper.py @@ -16,8 +16,10 @@ # Get path to this file's (wrapper.py) directory. base_dir = os.path.dirname(os.path.realpath(__file__)) +args = getattr(snakemake.params, "args", {}) + # Short-circuit in case of performing no filtration -if snakemake.wildcards.het_comp == "passthrough": +if args["filter_mode"] == "passthrough": shell( r""" # Het. comp. mode set to "passthrough", just copy out the data. @@ -44,7 +46,7 @@ source {base_dir}/../../wgs_sv_filtration/funcs.sh # Get name and number of index, father, and mother. -index={snakemake.wildcards.index_library} +index={args[index_library]} father=$(awk '($2 == "'$index'") {{ print $3; }}' {snakemake.input.ped}) mother=$(awk '($2 == "'$index'") {{ print $4; }}' {snakemake.input.ped}) @@ -84,25 +86,25 @@ ### Determine intervals to use for hetcomp criteria -if [[ "{snakemake.wildcards.het_comp}" == tads ]]; then - intervals_bed={snakemake.config[step_config][variant_filtration][region_beds][all_tads]} +if [[ "{args[filter_mode]}" == tads ]]; then + intervals_bed={args[filter_config][all_tads]} -elif [[ "{snakemake.wildcards.het_comp}" == intervals500 ]]; then +elif [[ "{args[filter_mode]}" == intervals500 ]]; then ### is it ok to use any parent? zcat $TMPDIR/par1.SNV.vcf.gz \ | {{ grep -v ^\# || true; }} \ | awk -F'\t' 'BEGIN {{ OFS = FS }} {{ left = 500; if (left > $2) {{ left = $2 }} print $1, $2 - left, $2 + 500 }}' \ > $TMPDIR/par1.SNV.bed intervals_bed=$TMPDIR/par1.SNV.bed -elif [[ "{snakemake.wildcards.het_comp}" == gene ]]; then - intervals_bed={snakemake.config[step_config][variant_filtration][region_beds][all_genes]} +elif [[ "{args[filter_mode]}" == gene ]]; then + intervals_bed={args[filter_config][all_genes]} fi echo $intervals_bed ### Add exonic + effect filter for ARHC in genes -if [[ "{snakemake.wildcards.het_comp}" == gene ]]; then +if [[ "{args[filter_mode]}" == gene ]]; then bcftools filter \ -i '(INFO/ANN ~ "MODERATE") || (INFO/ANN ~ "HIGH")' \ -O z \ diff --git a/snappy_wrappers/wrappers/variant_filtration/filter_inheritance/wrapper.py b/snappy_wrappers/wrappers/variant_filtration/filter_inheritance/wrapper.py index 890e9a054..6cc735460 100644 --- a/snappy_wrappers/wrappers/variant_filtration/filter_inheritance/wrapper.py +++ b/snappy_wrappers/wrappers/variant_filtration/filter_inheritance/wrapper.py @@ -15,6 +15,8 @@ # Get path to this file's (wrapper.py) directory. base_dir = os.path.dirname(os.path.realpath(__file__)) +args = getattr(snakemake.params, "args", {}) + # Actual Filtration ------------------------------------------------------------------------------- shell( @@ -25,7 +27,7 @@ source {base_dir}/../../wgs_sv_filtration/funcs.sh # Get name and number of index, father, and mother. -index={snakemake.wildcards.index_library} +index={args[index_library]} father=$(awk '($2 == "'$index'") {{ print $3; }}' {snakemake.input.ped}) mother=$(awk '($2 == "'$index'") {{ print $4; }}' {snakemake.input.ped}) @@ -39,7 +41,7 @@ exit 1 fi -if [[ "{snakemake.wildcards.inheritance}" == de_novo ]]; then +if [[ "{args[filter_mode]}" == de_novo ]]; then # Perform filtration for de novo variants # # Build the include filter string. @@ -60,7 +62,7 @@ -O z \ -o {snakemake.output.vcf} \ {snakemake.input.vcf} -elif [[ "{snakemake.wildcards.inheritance}" == recessive_hom ]]; then +elif [[ "{args[filter_mode]}" == recessive_hom ]]; then # Perform filtration for homozygous variants, heterozygous in both father and mother # # Build the include filter string. @@ -79,7 +81,7 @@ ## TODO: separate mother and father calls?? - easier to combine later on ## OR will be used in filtering step 6 --> then separate both? ## TODO: this is actually a dominant filtration filter. Should be renamed? -elif [[ "{snakemake.wildcards.inheritance}" == dominant ]]; then +elif [[ "{args[filter_mode]}" == dominant ]]; then # Perform filtration for de novo variants # # Build the inclusion filter string (left and rigt hand side separatdly). diff --git a/snappy_wrappers/wrappers/variant_filtration/filter_quality/wrapper.py b/snappy_wrappers/wrappers/variant_filtration/filter_quality/wrapper.py index f715d3650..250d9286f 100644 --- a/snappy_wrappers/wrappers/variant_filtration/filter_quality/wrapper.py +++ b/snappy_wrappers/wrappers/variant_filtration/filter_quality/wrapper.py @@ -16,8 +16,10 @@ # Get path to this file's (wrapper.py) directory. base_dir = os.path.dirname(os.path.realpath(__file__)) +args = getattr(snakemake.params, "args", {}) + # Short-circuit in case of performing no filtration -if snakemake.wildcards.thresholds == "no_filter": +if args["filter_mode"] == "no_filter": shell( r""" # Thresholds set to "no_filter", just link out the data. @@ -33,9 +35,7 @@ # Actual Filtration ------------------------------------------------------------------------------- # Get shortcut to threshold set. -thresholds = snakemake.config["step_config"]["variant_filtration"]["thresholds"][ - snakemake.wildcards.thresholds -] +thresholds = args["filter_config"][args["filter_mode"]] # Get more filter expressions. if not thresholds.get("include_expressions"): @@ -55,13 +55,13 @@ source {base_dir}/../../wgs_sv_filtration/funcs.sh # Get name and number of index, father, and mother. -index={snakemake.wildcards.index_library} +index={args[index_library]} index_no=$(get_index {snakemake.input.vcf} "$index") # Perform the filtration. -if [[ "{snakemake.wildcards.thresholds}" == conservative ]]; then +if [[ "{args[filter_mode]}" == conservative ]]; then include="(GQ[$index_no] >= {thresholds[min_gq]})" include+=" && (" include+=" ((GT[$index_no] == \"het\") && (DP[$index_no] >= {thresholds[min_dp_het]})) " diff --git a/snappy_wrappers/wrappers/variant_filtration/filter_regions/wrapper.py b/snappy_wrappers/wrappers/variant_filtration/filter_regions/wrapper.py index 16b065302..f67ca4128 100644 --- a/snappy_wrappers/wrappers/variant_filtration/filter_regions/wrapper.py +++ b/snappy_wrappers/wrappers/variant_filtration/filter_regions/wrapper.py @@ -16,8 +16,10 @@ # Get path to this file's (wrapper.py) directory. base_dir = os.path.dirname(os.path.realpath(__file__)) +args = getattr(snakemake.params, "args", {}) + # Short-circuit in case of performing no filtration -if snakemake.wildcards.regions == "whole_genome": +if args["filter_mode"] == "whole_genome": shell( r""" # Regions set to "whole_genome", just link out the data. @@ -32,12 +34,10 @@ # Actual Filtration ------------------------------------------------------------------------------- -if snakemake.wildcards.regions == "whole_genome": +if args["filter_mode"] == "whole_genome": path_bed = "/dev/null" else: - path_bed = snakemake.config["step_config"]["variant_filtration"]["region_beds"][ - snakemake.wildcards.regions - ] + path_bed = args["filter_config"][args["filter_mode"]] shell( r""" @@ -46,7 +46,7 @@ # Load library with helper functions. source {base_dir}/../../wgs_sv_filtration/funcs.sh -if [[ "{snakemake.wildcards.regions}" != whole_genome ]]; then +if [[ "{args[filter_mode]}" != whole_genome ]]; then bedtools intersect -u -header -wa -a {snakemake.input.vcf} -b {path_bed} \ | bcftools norm --remove-duplicates \ | bcftools sort -o {snakemake.output.vcf} -O z diff --git a/snappy_wrappers/wrappers/variant_filtration/filter_scores/wrapper.py b/snappy_wrappers/wrappers/variant_filtration/filter_scores/wrapper.py index d1b804d7f..f538cc3cd 100644 --- a/snappy_wrappers/wrappers/variant_filtration/filter_scores/wrapper.py +++ b/snappy_wrappers/wrappers/variant_filtration/filter_scores/wrapper.py @@ -16,8 +16,10 @@ # Get path to this file's (wrapper.py) directory. base_dir = os.path.dirname(os.path.realpath(__file__)) +args = getattr(snakemake.params, "args", {}) + # Short-circuit in case of performing no filtration -if snakemake.wildcards.scores == "score_all": +if args["filter_mode"] == "score_all": shell( r""" # Scores set to "score_all", just link out the data. @@ -33,12 +35,10 @@ # Actual Filtration ------------------------------------------------------------------------------- # Get shortcut to scores set. -if snakemake.wildcards.scores == "all_scores": +if args["filter_mode"] == "all_scores": scores = {"require_coding": False, "require_gerpp_gt2": False, "min_cadd": None} else: - scores = snakemake.config["step_config"]["variant_filtration"]["score_thresholds"][ - snakemake.wildcards.scores - ] + scores = args["filter_config"][args["filter_mode"]] shell( r""" @@ -48,7 +48,7 @@ source {base_dir}/../../wgs_sv_filtration/funcs.sh # Get name and number of index, father, and mother. -index={snakemake.wildcards.index_library} +index={args[index_library]} father=$(awk '($2 == "'$index'") {{ print $3; }}' {snakemake.input.ped}) mother=$(awk '($2 == "'$index'") {{ print $4; }}' {snakemake.input.ped}) @@ -77,7 +77,7 @@ # Perform filtration (or copy file if all are passing). -if [[ "$filter" == "" ]] || [[ "{snakemake.wildcards.scores}" == all_scores ]]; then +if [[ "$filter" == "" ]] || [[ "{args[filter_mode]}" == all_scores ]]; then cp -L {snakemake.input.vcf} {snakemake.output.vcf} else bcftools view \ diff --git a/snappy_wrappers/wrappers/vcf2maf/vcf2maf/environment.yaml b/snappy_wrappers/wrappers/vcf2maf/vcf2maf/environment.yaml deleted file mode 100644 index 389c2e439..000000000 --- a/snappy_wrappers/wrappers/vcf2maf/vcf2maf/environment.yaml +++ /dev/null @@ -1,10 +0,0 @@ -channels: - - conda-forge - - bioconda - - nodefaults -dependencies: - - vcf2maf==1.6.18 - - ensembl-vep==100.4 - - samtools==1.9 - - htslib==1.9 - - bcftools==1.9 diff --git a/snappy_wrappers/wrappers/vcf2maf/vcf2maf/wrapper.py b/snappy_wrappers/wrappers/vcf2maf/vcf2maf/wrapper.py deleted file mode 100644 index fed545703..000000000 --- a/snappy_wrappers/wrappers/vcf2maf/vcf2maf/wrapper.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -"""Wrapper for running VCF2MAF incl VEP variant annotation""" - -import pprint - -from snakemake.shell import shell - -params = snakemake.params.args -step = snakemake.config["pipeline_step"]["name"] -config = snakemake.config["step_config"][step] -reference = snakemake.config["static_data_config"]["reference"]["path"] - - -pprint.pprint(params) - -shell( - r""" -set -x -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -# Also pipe stderr to log file -if [[ -n "{snakemake.log}" ]]; then - if [[ "$(set +e; tty; set -e)" != "" ]]; then - rm -f "{snakemake.log}" && mkdir -p $(dirname {snakemake.log}) - exec 2> >(tee -a "{snakemake.log}" >&2) - else - rm -f "{snakemake.log}" && mkdir -p $(dirname {snakemake.log}) - echo "No tty, logging disabled" >"{snakemake.log}" - fi -fi - -CONDAVEP=$(which vep) # Now using bioconda VEP instead of anaconda's (?), it used to be: $(which variant_effect_predictor.pl) -CONDAVEP=$(dirname $CONDAVEP) - -zcat -f {snakemake.input.vcf} > $TMPDIR/vcf.vcf - -# The cache version must be synchronised with the environment requirements for vep, and of course with the vep_data_path in the config -# WARNING- filter_vcf only works with file ExAC_nonTCGA.r0.3.1.sites.vep.vcf.gz (vcf2maf limitation) -vcf2maf.pl --input-vcf $TMPDIR/vcf.vcf \ - --output-maf {snakemake.output.maf} \ - --tmp-dir $TMPDIR \ - --tumor-id {params[tumor_id]} \ - --normal-id {params[normal_id]} \ - --vcf-tumor-id {params[tumor_sample]} \ - --vcf-normal-id {params[normal_sample]} \ - --vep-path $CONDAVEP \ - --vep-data {config[vep_data_path]} \ - --ref-fasta {reference} \ - --ncbi-build {config[ncbi_build]} \ - --cache-version {config[cache_version]} \ - --filter-vcf {config[filter_vcf]} # /fast/groups/cubi/projects/biotools/VEP/static_data/ExAC/ExAC_nonTCGA.r0.3.1.sites.vep.vcf.gz - -pushd $(dirname {snakemake.output.maf}) -md5sum $(basename {snakemake.output.maf}) > $(basename {snakemake.output.maf}).md5 -popd -""" -) diff --git a/snappy_wrappers/wrappers/vcf_cnv_filter/environment.yaml b/snappy_wrappers/wrappers/vcf_cnv_filter/environment.yaml deleted file mode 100644 index 622c15b3b..000000000 --- a/snappy_wrappers/wrappers/vcf_cnv_filter/environment.yaml +++ /dev/null @@ -1,10 +0,0 @@ -channels: - - conda-forge - - bioconda - - nodefaults -dependencies: - - python==3.6.15 - - vcfpy==0.13.3 - - pytabix==0.0.2 - - bcftools==1.9 - - htslib==1.9 diff --git a/snappy_wrappers/wrappers/vcf_cnv_filter/vcf_cnv_filter.py b/snappy_wrappers/wrappers/vcf_cnv_filter/vcf_cnv_filter.py deleted file mode 100755 index 51cfe23e8..000000000 --- a/snappy_wrappers/wrappers/vcf_cnv_filter/vcf_cnv_filter.py +++ /dev/null @@ -1,455 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -"""Apply soft filters to CNV VCF file. - -The following fields will be added to the ``INFO`` column: - -- ``AFFECTED_CARRIERS``: Number of affected samples (from pedigree) that - carry the variant -- ``UNAFFECTED_CARRIERS``: Number of unaffected samples (from pedigree) that - carry the variant -- ``BACKGROUND_CARRIERS``: Number of samples outside family that carry the - variant. - -- ``INHERITANCE``: Information on compatible inheritance modes, comma-separated - list of strings. As the genotyping of SVs is usually - problematic, the precise genotype is ignored, only carrier - and non-carriers are differentiated. - - ``DE_NOVO``: Individual has parents, individual carries variants and - parents do not. - - ``DOMINANT``: Individual has parents, individual carries variants and - exactly one parent carries the variant. -""" - -# TODO: consolidate with vcf_sv_filter. -# TODO: annotate with database overlap? -# XXX: affecteds can have no entry! - -import argparse -import functools -import itertools -import logging -import sys -import warnings - -import pysam -import vcfpy - -# White-listed chromosomes. -_CHROMS = tuple(itertools.chain(map(str, range(1, 23)), ("X", "Y"))) -CHROMS = tuple(itertools.chain(_CHROMS, ["chr" + c for c in _CHROMS])) - - -class PedigreeMember: - """Representation of one PED file line""" - - UNKNOWN = "0" - MALE = "1" - FEMALE = "2" - UNAFFECTED = "1" - AFFECTED = "2" - - @classmethod - def parse_line(klass, line): - arr = line.strip().split("\t") - return PedigreeMember(*arr[0:6], data=arr[:6]) - - def __init__(self, family, name, father, mother, gender, disease, data=[]): - self.family = family - self.name = name - self.father = father - self.mother = mother - self.gender = gender - self.disease = disease - self.data = list(data) - - def __str__(self): - return "Pedigree({})".format( - ", ".join( - map( - str, - [self.family, self.name, self.father, self.mother, self.gender, self.disease], - ) - ) - ) - - -class Pedigree: - """Representation of a pedigree""" - - @classmethod - def parse(klass, f): - """Parse from file-like object""" - members = [] - for line in f: - line = line.strip() - if not line: - continue # skip empty lines - members.append(PedigreeMember.parse_line(line)) - return Pedigree(members) - - def __init__(self, members=[]): - self.members = list(members) - self.by_name = {m.name: m for m in self.members} - - @property - @functools.lru_cache(maxsize=1) - def affecteds(self): - """Return list of affected individuals""" - return [m for m in self.members if m.disease == PedigreeMember.AFFECTED] - - @property - @functools.lru_cache(maxsize=1) - def affecteds_names(self): - """Return list of names of affected individuals""" - return [m.name for m in self.affecteds] - - -def full_chromosomes(reader): - """Return list of regions of all chromosomes of VCF reader.""" - for line in reader.header.get_lines("contig"): - if line.id in CHROMS: - name = line.id - length = line.length or 1_000_000_000 - yield "{}:{}-{}".format(name, 1, length) - - -class FilterStep: - """Base class for filter step.""" - - def __init__(self, owner, args, inner=None): - #: Owner App object - self.owner = owner - #: Command line arguments to pass in. - self.args = args - #: Inner filter to pass through, ``None`` if no inner filter. - self.inner = inner - #: Whether or not active by configuration. - self.active = False - #: Hook for implementing setup - self._setup() - - def apply(self, record): - if self.inner: - for proc_record in self.inner.apply(record): - yield from self._apply(proc_record) - else: - yield from self._apply(record) - - def _setup(self): - """Perform extended setup.""" - - def _apply(self, record): - """Actual implementation.""" - raise NotImplementedError("Implement me!") - - -class InheritanceAnnoFilterStep(FilterStep): - def _setup(self): - self.active = True - - def _apply(self, record): - for member in self.owner.pedigree.members: - if member in record.call_for_sample: - compatible = [] - if self._compatible_de_novo(record, member): - compatible.append("DE_NOVO") - if self._compatible_dominant(record, member): - compatible.append("DOMINANT") - if compatible: - record.add_format("INHERITANCE", []) - record.call_for_sample[member.name].data["INHERITANCE"] = compatible - yield record - - def _compatible_de_novo(self, record, member): - if member.father == "0" or member.mother == "0": - return False - if ( - member.father not in record.call_for_sample - or member.mother not in record.call_for_sample - ): - return False - parent_variants = { - not record.call_for_sample[member.father].gt_type, - not record.call_for_sample[member.mother].gt_type, - } - return parent_variants == set([False]) - - def _compatible_dominant(self, record, member): - if member.father == "0" or member.mother == "0": - return False - if ( - member.father not in record.call_for_sample - or member.mother not in record.call_for_sample - ): - return False - parent_variants = { - not record.call_for_sample[member.father].gt_type, - not record.call_for_sample[member.mother].gt_type, - } - return len(parent_variants) == 2 - - -class AnnotateCarrierCountsFilterStep(FilterStep): - def _setup(self): - self.active = True - self.tabixes = { - anno_args["info"]: pysam.TabixFile(anno_args["path"]) - for anno_args in self.args.annotation_beds - } - - def _apply(self, record): - for key, tabix in self.tabixes.items(): - cnv_start = record.affected_start - cnv_end = record.INFO.get("END", cnv_start) - genes = [] - try: - tbx_iter = tabix.fetch(record.CHROM, cnv_start, cnv_end) - except ValueError: - tbx_iter = [] - for line in tbx_iter: - arr = line.rstrip().split("\t") - start = int(arr[1]) - end = int(arr[2]) - if start < cnv_end and end > cnv_start: - genes.append(arr[3]) - record.INFO[key] = genes - yield record - - -class AnnotateOverlappingGenesFilter(FilterStep): - def _setup(self): - self.active = True - # Create shortcuts of samples in pedigree and samples in affected. - self.in_pedigree = set() - self.affected = set() - for member in self.owner.pedigree.members: - self.in_pedigree.add(member.name) - if member.disease == PedigreeMember.AFFECTED: - self.affected.add(member.name) - - def _apply(self, record): - # Count carriers. - ped_carriers = 0 - affected_carriers = 0 - all_carriers = 0 - for call in record: - if not call.is_variant: - continue # skip - all_carriers += 1 - if call.sample in self.in_pedigree: - ped_carriers += 1 - if call.sample in self.affected: - affected_carriers += 1 - # Write out counts - record.INFO["AFFECTED_CARRIERS"] = affected_carriers - record.INFO["UNAFFECTED_CARRIERS"] = ped_carriers - affected_carriers - record.INFO["BACKGROUND_CARRIERS"] = all_carriers - ped_carriers - yield record - - -#: The filters to apply. -FILTERS = ( - # Add annotation for compatibility with inheritance. - InheritanceAnnoFilterStep, - # Annotate with carrier counts. - AnnotateCarrierCountsFilterStep, - # Annotate with overlapping genes. - AnnotateOverlappingGenesFilter, -) - - -class VcfFilterApp: - """Container class for storing the application's state. - - We are using a class for the implementation in contrast to global - functions because this gives us a way to store the arguments etc. - implicitely without passing it as parameters. - """ - - def __init__(self, args, filters=FILTERS): - #: Command line arguments. - self.args = args - #: Pedigree to use. - self.pedigree = self._load_pedigree() - # Setup the logging. - self._setup_logging() - #: Filter steps to run. - self.filters = filters - #: Setup the filter chain. - self.filter_chain = self._build_filter_chain() - - def _load_pedigree(self): - logging.info("Loading pedigree file %s", self.args.ped_file) - with open(self.args.ped_file, "rt") as pedf: - return Pedigree.parse(pedf) - - def _setup_logging(self): - logging.basicConfig( - format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s", datefmt="%m-%d %H:%M" - ) - logger = logging.getLogger("") - if self.args.verbose: - logger.setLevel(logging.DEBUG) - else: - logger.setLevel(logging.INFO) - - def _build_filter_chain(self): - """Build the filter chain.""" - result = None - for klass in self.filters: - tmp = klass(self, self.args, result) - logging.info("%s %s", klass, tmp.active) - if tmp.active: - result = tmp - return result or (lambda x: x) - - def _print_header(self): - logging.info("SV VCF filter") - logging.info("Arguments: %s", self.args) - - def _process_region(self, region, writer): - """Process a single region and write its result to the writer.""" - - def _augment_header(self, header): - """Augment header information""" - header = self._augment_filter(header) - header = self._augment_info(header) - header = self._augment_format(header) - return header - - def _augment_filter(self, header): - """Augment header for FILTER column""" - return header - - def _augment_info(self, header): - """Augment header for INFO column""" - # Information on carriers - header.add_info_line( - vcfpy.OrderedDict( - [ - ("ID", "AFFECTED_CARRIERS"), - ("Number", "1"), - ("Type", "Integer"), - ("Description", "Number of affected samples from pedigree that are carriers"), - ] - ) - ) - header.add_info_line( - vcfpy.OrderedDict( - [ - ("ID", "UNAFFECTED_CARRIERS"), - ("Number", "1"), - ("Type", "Integer"), - ("Description", "Number of unaffected samples from pedigree that are carriers"), - ] - ) - ) - header.add_info_line( - vcfpy.OrderedDict( - [ - ("ID", "BACKGROUND_CARRIERS"), - ("Number", "1"), - ("Type", "Integer"), - ("Description", "Number of background samples that are carriers"), - ] - ) - ) - for anno_args in self.args.annotation_beds: - header.add_info_line( - vcfpy.OrderedDict( - [ - ("ID", anno_args["info"]), - ("Number", "."), - ("Type", "String"), - ("Description", anno_args["description"]), - ] - ) - ) - return header - - def _augment_format(self, header): - """Augment header for FORMAT column""" - header.add_format_line( - vcfpy.OrderedDict( - [ - ("ID", "INHERITANCE"), - ("Number", "."), - ("Type", "String"), - ("Description", "Compatible modes of inheritance"), - ] - ) - ) - return header - - def run(self): - self._print_header() - with vcfpy.Reader.from_path(self.args.input_vcf) as reader: - # If no regions are given, fall back to all chromosomes. - regions = list(self.args.regions or full_chromosomes(reader)) - # Extend header with new lines. - header = self._augment_header(reader.header) - # Open the VCF writer for writing and process each region. - with vcfpy.Writer.from_path(self.args.output_vcf, header) as writer: - for region in regions: - logging.info("Processing %s", region) - try: - records = reader.fetch(region) - except ValueError: - logging.warning("Could not fetch records for %s", region) - else: - for record in records: - for proc_record in self.filter_chain.apply(record): - if proc_record.REF == ".": # fix non-validating VCF - proc_record.REF = "N" - writer.write_record(proc_record) - - -def main(argv=None): - # Suppress warnings on missing "length" attribute of contig lines. - warnings.filterwarnings(action="once") - - parser = argparse.ArgumentParser(description="Targeted CNV call annotation tool") - - # ----------------------------------------------------------------------- - group = parser.add_argument_group("General Options") - group.add_argument("-v", "--verbose", default=0, action="count") - - group = parser.add_argument_group("Input / Output Options") - group.add_argument("--input-vcf", required=True, help="input VCF file") - group.add_argument("--output-vcf", help="output VCF file", default="/dev/stdout") - group.add_argument("--ped-file", required=True, help="Path to PED file to use.") - group.add_argument( - "--region", - type=str, - required=False, - default=[], - action="append", - dest="regions", - nargs="+", - help=("region(s) to limit analysis to"), - ) - group.add_argument( - "--annotation-bed", - type=str, - required=False, - default=[], - action="append", - dest="annotation_beds", - help="Annotate overlaps with these regions", - ) - - args = parser.parse_args(argv) - - for i, xs in enumerate(args.annotation_beds): - keys = ("info", "description", "path") - arr = xs.split("|") - assert len(arr) == 3 - args.annotation_beds[i] = dict(zip(keys, arr)) - - args.regions = [r for lst in args.regions for r in lst] - return VcfFilterApp(args).run() - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/snappy_wrappers/wrappers/vcf_cnv_filter/wrapper.py b/snappy_wrappers/wrappers/vcf_cnv_filter/wrapper.py deleted file mode 100644 index 19bc3c62b..000000000 --- a/snappy_wrappers/wrappers/vcf_cnv_filter/wrapper.py +++ /dev/null @@ -1,122 +0,0 @@ -# -*- coding: utf-8 -*- -"""CUBI+Snakemake wrapper code for soft-annotating CNV VCF files - -- Add annotations based on background. -""" - -import os - -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -shell.executable("/bin/bash") - -# Get path to this file's (wrapper.py) directory. -base_dir = os.path.dirname(os.path.realpath(__file__)) - -bed_files = [] -for bed_file in snakemake.config["step_config"][snakemake.params.step_name].get("bed_files", []): - bed_files.append( - "{}|{}|{}".format(bed_file["name"], bed_file["description"], bed_file["path"]).replace( - " ", "_" - ) - ) -bed_files = " ".join(map(lambda x: repr(x), bed_files)) - -# TODO: implement support for more than trios - -shell( - r""" -set -x - -## Write out information about conda installation. -#conda list >{{snakemake.log.conda_list}} -#conda info >{{snakemake.log.conda_info}} -#md5sum {{snakemake.log.conda_list}} >{{snakemake.log.conda_list_md5}} -#md5sum {{snakemake.log.conda_info}} >{{snakemake.log.conda_info_md5}} -# -## Also pipe stderr to log file -#if [[ -n "{{snakemake.log.log}}" ]]; then -# if [[ "$(set +e; tty; set -e)" != "" ]]; then -# rm -f "{{snakemake.log.log}}" && mkdir -p $(dirname {{snakemake.log.log}}) -# exec 2> >(tee -a "{{snakemake.log.log}}" >&2) -# else -# rm -f "{{snakemake.log.log}}" && mkdir -p $(dirname {{snakemake.log.log}}) -# echo "No tty, logging disabled" >"{{snakemake.log.log}}" -# fi -#fi - -export TMPDIR=$(mktemp -d) -# trap "rm -rf $TMPDIR" EXIT - -# Load library with helper functions. -source {base_dir}/../wgs_sv_filtration/funcs.sh - -# Get name and number of index, father, and mother. -index={snakemake.wildcards.index_ngs_library} -father=$(awk '($2 == "'$index'") {{ print $3; }}' {snakemake.input.ped}) -mother=$(awk '($2 == "'$index'") {{ print $4; }}' {snakemake.input.ped}) - -index_no=$(get_index {snakemake.input.vcf} "$index") -father_no=$(get_index {snakemake.input.vcf} "$father") -mother_no=$(get_index {snakemake.input.vcf} "$mother") - -include="(GT[$index_no] == \"alt\")" -test -n "$father_no" && include+="|| (GT[$father_no] == \"alt\")" -test -n "$mother_no" && include+="|| (GT[$mother_no] == \"alt\")" - -# Limit to variants present in family. -bcftools view \ - --threads 4 \ - --force-samples \ - -i "$include" \ - {snakemake.input.vcf} \ -| bgzip -c \ -> $TMPDIR/tmp.vcf.gz - -tabix -f $TMPDIR/tmp.vcf.gz - -# Run through VCF SV filter and limit to samples in family. - -set +e -samples=$( - samples_vcf_ped {snakemake.input.vcf} {snakemake.input.ped} \ - | tr '\n' ',' \ - | sed -e 's/,$//' -) -set -e - -if [[ -n "$samples" ]]; then - time python3 {base_dir}/vcf_cnv_filter.py \ - --ped-file {snakemake.input.ped} \ - --input-vcf $TMPDIR/tmp.vcf.gz \ - $(for bed_file in {bed_files}; do \ - echo --annotation-bed $bed_file - done) \ - | bcftools view \ - -s "$samples" \ - -O z \ - -o {snakemake.output.vcf} -else - bcftools view -s "$samples" --force-samples $TMPDIR/tmp.vcf.gz \ - | grep '^#' \ - > $TMPDIR/tmp2.vcf - - echo '##FORMAT=' \ - > $TMPDIR/header.txt - - bcftools annotate \ - -h $TMPDIR/header.txt \ - -Oz \ - -o {snakemake.output.vcf} \ - $TMPDIR/tmp2.vcf -fi - -tabix -f {snakemake.output.vcf} - -pushd $(dirname {snakemake.output.vcf}) -md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf}).md5 -md5sum $(basename {snakemake.output.vcf_tbi}) >$(basename {snakemake.output.vcf_tbi}).md5 -""" -) diff --git a/snappy_wrappers/wrappers/vcf_mei_filter/environment.yaml b/snappy_wrappers/wrappers/vcf_mei_filter/environment.yaml deleted file mode 100644 index 160ed610d..000000000 --- a/snappy_wrappers/wrappers/vcf_mei_filter/environment.yaml +++ /dev/null @@ -1,10 +0,0 @@ -channels: - - conda-forge - - bioconda - - nodefaults -dependencies: - - python==3.6.15 - - vcfpy==0.11.0 - - pytabix==0.0.2 - - bcftools==1.8 - - htslib==1.8 diff --git a/snappy_wrappers/wrappers/vcf_mei_filter/vcf_mei_filter.py b/snappy_wrappers/wrappers/vcf_mei_filter/vcf_mei_filter.py deleted file mode 100755 index 3637ccf21..000000000 --- a/snappy_wrappers/wrappers/vcf_mei_filter/vcf_mei_filter.py +++ /dev/null @@ -1,425 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -"""Apply soft filters to MEI VCF file. - -The following fields will be added to the ``INFO`` column: - -- ``AFFECTED_CARRIERS``: Number of affected samples (from pedigree) that - carry the variant -- ``UNAFFECTED_CARRIERS``: Number of unaffected samples (from pedigree) that - carry the variant -- ``BACKGROUND_CARRIERS``: Number of samples outside family that carry the - variant. - -- ``INHERITANCE``: Information on compatible inheritance modes, comma-separated - list of strings. As the genotyping of SVs is usually - problematic, the precise genotype is ignored, only carrier - and non-carriers are differentiated. - - ``DE_NOVO``: Individual has parents, individual carries variants and - parents do not. - - ``DOMINANT``: Individual has parents, individual carries variants and - exactly one parent carries the variant. -""" - -# TODO: consolidate with vcf_sv_filter. -# TODO: annotate with database overlap? -# XXX: affecteds can have no entry! - -import argparse -import functools -import itertools -import logging -import sys - -import vcfpy - -# White-listed chromosomes. -_CHROMS = tuple(itertools.chain(map(str, range(1, 23)), ("X", "Y"))) -CHROMS = tuple(itertools.chain(_CHROMS, ["chr" + c for c in _CHROMS])) - - -class PedigreeMember: - """Representation of one PED file line""" - - UNKNOWN = "0" - MALE = "1" - FEMALE = "2" - UNAFFECTED = "1" - AFFECTED = "2" - - @classmethod - def parse_line(klass, line): - arr = line.strip().split("\t") - return PedigreeMember(*arr[0:6], data=arr[:6]) - - def __init__(self, family, name, father, mother, gender, disease, data=[]): - self.family = family - self.name = name - self.father = father - self.mother = mother - self.gender = gender - self.disease = disease - self.data = list(data) - - def __str__(self): - return "Pedigree({})".format( - ", ".join( - map( - str, - [self.family, self.name, self.father, self.mother, self.gender, self.disease], - ) - ) - ) - - -class Pedigree: - """Representation of a pedigree""" - - @classmethod - def parse(klass, f): - """Parse from file-like object""" - members = [] - for line in f: - line = line.strip() - if not line: - continue # skip empty lines - members.append(PedigreeMember.parse_line(line)) - return Pedigree(members) - - def __init__(self, members=[]): - self.members = list(members) - self.by_name = {m.name: m for m in self.members} - - @property - @functools.lru_cache(maxsize=1) - def affecteds(self): - """Return list of affected individuals""" - return [m for m in self.members if m.disease == PedigreeMember.AFFECTED] - - @property - @functools.lru_cache(maxsize=1) - def affecteds_names(self): - """Return list of names of affected individuals""" - return [m.name for m in self.affecteds] - - -def full_chromosomes(reader): - """Return list of regions of all chromosomes of VCF reader.""" - for line in reader.header.get_lines("contig"): - if line.id in CHROMS: - name = line.id - length = line.length - yield "{}:{}-{}".format(name, 1, length) - - -class FilterStep: - """Base class for filter step.""" - - def __init__(self, owner, args, inner=None): - #: Owner App object - self.owner = owner - #: Command line arguments to pass in. - self.args = args - #: Inner filter to pass through, ``None`` if no inner filter. - self.inner = inner - #: Whether or not active by configuration. - self.active = False - #: Hook for implementing setup - self._setup() - - def apply(self, record): - if self.inner: - record = self.inner.apply(record) - return self._apply(record) - - def _setup(self): - """Perform extended setup.""" - - def _apply(self, record): - """Actual implementation.""" - raise NotImplementedError("Implement me!") - - -class GenotypeQualityAnnoFilterStep(FilterStep): - """Annotate records with genotype qualities.""" - - def _setup(self): - self.active = True - - def _apply(self, record): - record.add_format("LR") - for call in record.calls: - call.data["LR"] = self._compute_lr(call.data.get("GL")) - return record - - def _compute_lr(self, gl): - if not gl or len(gl) < 3: - return None - else: - r0 = gl[0] - gl[1] # 0/1 - r1 = gl[0] - gl[2] # 1/1 - return int(round(-1.0 * min(r0, r1))) - - -class InheritanceAnnoFilterStep(FilterStep): - def _setup(self): - self.active = True - - def _apply(self, record): - for member in self.owner.pedigree.members: - compatible = [] - if self._compatible_de_novo(record, member): - compatible.append("DE_NOVO") - if self._compatible_dominant(record, member): - compatible.append("DOMINANT") - if compatible: - record.add_format("INHERITANCE", []) - record.call_for_sample[member.name].data["INHERITANCE"] = compatible - return record - - def _compatible_de_novo(self, record, member): - if member.father == "0" or member.mother == "0": - return False - if ( - member.father not in record.call_for_sample - or member.mother not in record.call_for_sample - ): - return False - parent_variants = { - record.call_for_sample[member.father].is_variant, - record.call_for_sample[member.mother].is_variant, - } - return parent_variants == set([False]) - - def _compatible_dominant(self, record, member): - if member.father == "0" or member.mother == "0": - return False - if ( - member.father not in record.call_for_sample - or member.mother not in record.call_for_sample - ): - return False - parent_variants = { - record.call_for_sample[member.father].is_variant, - record.call_for_sample[member.mother].is_variant, - } - return len(parent_variants) == 2 - - -class AnnotateCarrierCountsFilterStep(FilterStep): - def _setup(self): - self.active = True - # Create shortcuts of samples in pedigree and samples in affected. - self.in_pedigree = set() - self.affected = set() - for member in self.owner.pedigree.members: - self.in_pedigree.add(member.name) - if member.disease == PedigreeMember.AFFECTED: - self.affected.add(member.name) - - def _apply(self, record): - # Count carriers. - ped_carriers = 0 - affected_carriers = 0 - all_carriers = 0 - for call in record: - if not call.is_variant: - continue # skip - all_carriers += 1 - if call.sample in self.in_pedigree: - ped_carriers += 1 - if call.sample in self.affected: - affected_carriers += 1 - # Write out counts - record.INFO["AFFECTED_CARRIERS"] = affected_carriers - record.INFO["UNAFFECTED_CARRIERS"] = ped_carriers - affected_carriers - record.INFO["BACKGROUND_CARRIERS"] = all_carriers - ped_carriers - return record - - -#: The filters to apply. -FILTERS = ( - # Add genotype quality column. - GenotypeQualityAnnoFilterStep, - # Add annotation for compatibility with inheritance. - InheritanceAnnoFilterStep, - # Annotate with carrier counts. - AnnotateCarrierCountsFilterStep, -) - - -class VcfFilterApp: - """Container class for storing the application's state. - - We are using a class for the implementation in contrast to global - functions because this gives us a way to store the arguments etc. - implicitely without passing it as parameters. - """ - - def __init__(self, args, filters=FILTERS): - #: Command line arguments. - self.args = args - #: Pedigree to use. - self.pedigree = self._load_pedigree() - # Setup the logging. - self._setup_logging() - #: Filter steps to run. - self.filters = filters - #: Setup the filter chain. - self.filter_chain = self._build_filter_chain() - - def _load_pedigree(self): - logging.info("Loading pedigree file %s", self.args.ped_file) - with open(self.args.ped_file, "rt") as pedf: - return Pedigree.parse(pedf) - - def _setup_logging(self): - logging.basicConfig( - format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s", datefmt="%m-%d %H:%M" - ) - logger = logging.getLogger("") - if self.args.verbose: - logger.setLevel(logging.DEBUG) - else: - logger.setLevel(logging.INFO) - - def _build_filter_chain(self): - """Build the filter chain.""" - result = None - for klass in self.filters: - tmp = klass(self, self.args, result) - logging.info("%s %s", klass, tmp.active) - if tmp.active: - result = tmp - return result or (lambda x: x) - - def _print_header(self): - logging.info("SV VCF filter") - logging.info("Arguments: %s", self.args) - - def _process_region(self, region, writer): - """Process a single region and write its result to the writer.""" - - def _augment_header(self, header): - """Augment header information""" - header = self._augment_filter(header) - header = self._augment_info(header) - header = self._augment_format(header) - return header - - def _augment_filter(self, header): - """Augment header for FILTER column""" - return header - - def _augment_info(self, header): - """Augment header for INFO column""" - # Information on carriers - header.add_info_line( - vcfpy.OrderedDict( - [ - ("ID", "AFFECTED_CARRIERS"), - ("Number", "1"), - ("Type", "Integer"), - ("Description", "Number of affected samples from pedigree that are carriers"), - ] - ) - ) - header.add_info_line( - vcfpy.OrderedDict( - [ - ("ID", "UNAFFECTED_CARRIERS"), - ("Number", "1"), - ("Type", "Integer"), - ("Description", "Number of unaffected samples from pedigree that are carriers"), - ] - ) - ) - header.add_info_line( - vcfpy.OrderedDict( - [ - ("ID", "BACKGROUND_CARRIERS"), - ("Number", "1"), - ("Type", "Integer"), - ("Description", "Number of background samples that are carriers"), - ] - ) - ) - return header - - def _augment_format(self, header): - """Augment header for FORMAT column""" - header.add_format_line( - vcfpy.OrderedDict( - [ - ("ID", "LR"), - ("Number", "1"), - ("Type", "Integer"), - ( - "Description", - "Negative, log-10-scaled variant likelihood ratio (hom. over any var.)", - ), - ] - ) - ) - header.add_format_line( - vcfpy.OrderedDict( - [ - ("ID", "INHERITANCE"), - ("Number", "."), - ("Type", "String"), - ("Description", "Compatible modes of inheritance"), - ] - ) - ) - return header - - def run(self): - self._print_header() - with vcfpy.Reader.from_path(self.args.input_vcf) as reader: - # If no regions are given, fall back to all chromosomes. - regions = self.args.regions or full_chromosomes(reader) - # Extend header with new lines. - header = self._augment_header(reader.header) - # Open the VCF writer for writing and process each region. - with vcfpy.Writer.from_path(self.args.output_vcf, header) as writer: - for region in regions: - logging.info("Processing %s", region) - try: - records = reader.fetch(region) - except ValueError: - records = [] - logging.warning("Could not fetch records for %s", region) - for record in records: - record = self.filter_chain.apply(record) - writer.write_record(record) - - -def main(argv=None): - parser = argparse.ArgumentParser(description="(MELT) MEI VCF soft-filter application tool") - - # ----------------------------------------------------------------------- - group = parser.add_argument_group("General Options") - group.add_argument("-v", "--verbose", default=0, action="count") - - group = parser.add_argument_group("Input / Output Options") - group.add_argument("--input-vcf", required=True, help="input VCF file") - group.add_argument("--output-vcf", help="output VCF file", default="/dev/stdout") - group.add_argument("--ped-file", required=True, help="Path to PED file to use.") - group.add_argument( - "--region", - type=str, - required=False, - default=[], - action="append", - dest="regions", - nargs="+", - help=("region(s) to limit analysis to"), - ) - - args = parser.parse_args(argv) - args.regions = [r for lst in args.regions for r in lst] - return VcfFilterApp(args).run() - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/snappy_wrappers/wrappers/vcf_mei_filter/wrapper.py b/snappy_wrappers/wrappers/vcf_mei_filter/wrapper.py deleted file mode 100644 index c2d0cae43..000000000 --- a/snappy_wrappers/wrappers/vcf_mei_filter/wrapper.py +++ /dev/null @@ -1,76 +0,0 @@ -# -*- coding: utf-8 -*- -"""CUBI+Snakemake wrapper code for soft-annotating MEI VCF files - -- Add annotations based on background. -""" - -import os - -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -shell.executable("/bin/bash") - -base_dir = os.path.dirname(os.path.realpath(__file__)) - -# Get path to this file's (wrapper.py) directory. -base_dir = os.path.dirname(os.path.realpath(__file__)) - -# TODO: implement support for more than trios - -shell( - r""" -set -x - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -# Load library with helper functions. -source {base_dir}/../wgs_sv_filtration/funcs.sh - -# Get name and number of index, father, and mother. -index={snakemake.wildcards.index_ngs_library} -father=$(awk '($2 == "'$index'") {{ print $3; }}' {snakemake.input.ped}) -mother=$(awk '($2 == "'$index'") {{ print $4; }}' {snakemake.input.ped}) - -index_no=$(get_index {snakemake.input.vcf} "$index") -father_no=$(get_index {snakemake.input.vcf} "$father") -mother_no=$(get_index {snakemake.input.vcf} "$mother") - -include="(GT[$index_no] ~ \"1\")" -test -n "$father_no" && include+="|| (GT[$father_no] ~ \"1\")" -test -n "$mother_no" && include+="|| (GT[$mother_no] ~ \"1\")" - -# Limit to variants present in family. -bcftools view \ - --threads 4 \ - --force-samples \ - -i "$include" \ - {snakemake.input.vcf} \ -| bgzip -c \ -> $TMPDIR/tmp.vcf.gz - -tabix -f $TMPDIR/tmp.vcf.gz - -# Run through VCF SV filter and limit to samples in family. - -time python3 {base_dir}/vcf_mei_filter.py \ - --ped-file {snakemake.input.ped} \ - --input-vcf $TMPDIR/tmp.vcf.gz \ -| bcftools view \ - -s "$( - samples_vcf_ped {snakemake.input.vcf} {snakemake.input.ped} \ - | tr '\n' ',' \ - | sed -e 's/,$//' - )" \ - -O z \ - -o {snakemake.output.vcf} - -tabix -f {snakemake.output.vcf} - -pushd $(dirname {snakemake.output.vcf}) -md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf}).md5 -md5sum $(basename {snakemake.output.vcf_tbi}) >$(basename {snakemake.output.vcf_tbi}).md5 -""" -) diff --git a/snappy_wrappers/wrappers/vcf_sv_filter/environment.yaml b/snappy_wrappers/wrappers/vcf_sv_filter/environment.yaml deleted file mode 100644 index 1c21eb669..000000000 --- a/snappy_wrappers/wrappers/vcf_sv_filter/environment.yaml +++ /dev/null @@ -1,12 +0,0 @@ -channels: - - conda-forge - - bioconda - - nodefaults -dependencies: - - vcfpy==0.11.1 - - pytabix==0.0.2 - - intervaltree==2.1.0 - - parallel==20240122 - - bcftools==1.11 - - htslib==1.11 - - samtools==1.11 diff --git a/snappy_wrappers/wrappers/vcf_sv_filter/vcf_sv_filter.py b/snappy_wrappers/wrappers/vcf_sv_filter/vcf_sv_filter.py deleted file mode 100755 index cdde5dd10..000000000 --- a/snappy_wrappers/wrappers/vcf_sv_filter/vcf_sv_filter.py +++ /dev/null @@ -1,1305 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -"""Apply soft filters to SV caller file. - -Currently supported callers are Delly2 and PopDel. - -The following values will be added to the ``FILTER`` columns: - -- ``ALU_OVL``: variant overlaps with ALU element -- ``DB_OVL``: variant overlaps with a variant from database - -The following fields will be added to the ``INFO`` column: - -- ``SIZE_CLASS``: size class of the SV -- ``BEST_DB_JACCARD``: Jaccard value for best overlap with DB -- ``BEST_ALU_JACCARD``: Jaccad value for the best overlap ALU DB -- ``AFFECTED_CARRIERS``: Number of affected samples (from pedigree) that - carry the variant -- ``UNAFFECTED_CARRIERS``: Number of unaffected samples (from pedigree) that - carry the variant -- ``BACKGROUND_CARRIERS``: Number of samples outside family that carry the - variant. - -The following fields will be added to the ``FORMAT`` column/genotypes: - -- ``FT``: list of strings with filter values for the genotype: - - ``HET_SNVS``: too may heterozygous SNVs in deletion (only applicable - to deletions) - - ``MIN_PE_COUNT``: does not pass minimal paired-end support count filter - - ``MIN_PE_AAF``: does not pass minimal paired-end support AAF filter - - ``MIN_SR_COUNT``: does not pass minimal split read support count filter - - ``MIN_SR_AAF``: does not pass minimal split read support AAF filter -- ``PE_AAF``: alternate allele support in fraction of paired-end read pairs -- ``PE_COUNT``: alternate allele support in number of paired-end read pairs -- ``SR_AAF``: alternate allele support in fraction of split reads -- ``SR_COUNT``: alternate allele support in number of split reads -- ``HET_SNVS``: number of heterozygous SNVs in deletion (only applicable - to deletions) -- ``INHERITANCE``: Information on compatible inheritance modes, comma-separated - list of strings. As the genotyping of SVs is usually - problematic, the precise genotype is ignored, only carrier - and non-carriers are differentiated. - - ``DE_NOVO``: Individual has parents, individual carries variants and - parents do not. - - ``DOMINANT``: Individual has parents, individual carries variants and - exactly one parent carries the variant. -""" - -# XXX: affecteds can have no entry! - -import argparse -import functools -import itertools -import logging -import re -import sys - -import tabix -import vcfpy - -# The tools supported by the filter. -TOOLS = ("delly", "manta") -# The size classes. -SIZE_CLASSES = ("SMALL", "MEDIUM", "LARGE") -# The SV types that are considered. -SV_TYPES = ("DEL", "DUP", "INV", "INS", "BND") -# White-listed chromosomes. -_CHROMS = tuple(itertools.chain(map(str, range(1, 23)), ("X", "Y"))) -CHROMS = tuple(itertools.chain(_CHROMS, ["chr" + c for c in _CHROMS])) - - -def append_unique(lst, elem): - """Append element to FILTER list if not present. - - If present, ``"PASS"`` will be removed. - """ - # Remove PASS if present. - if "PASS" in lst: - lst.remove("PASS") - # Add new element. - if elem not in lst: - lst.append(elem) - - -class GenomeRegion: - """Genome region with half-open intervals""" - - @staticmethod - def from_bed_line(line): - """Return GenomeRegion from BED line""" - chrom, begin, end = line.strip().split("\t") - return GenomeRegion(chrom, int(begin), int(end)) - - @staticmethod - def parse(user_readable): - user_readable = user_readable.replace(",", "") - m = re.match("^(?P.*?):(?P[0-9]+)-(?P[0-9]+)$", user_readable) - if not m: - raise ValueError("Invalid region string: {}".format(user_readable)) - else: - return GenomeRegion(m.group("chrom"), int(m.group("start")), int(m.group("end"))) - - def __init__(self, chrom, begin, end): - self.chrom = chrom - self.begin = begin - self.end = end - - def as_bed(self): - """Return BED reprentation (half-open intervals)""" - tpl = "{}\t{}\t{}" - return tpl.format(self.chrom, self.begin, self.end) - - def human_readable(self): - """Return human readable string""" - tpl = "{}:{:,}-{:,}" - return tpl.format(self.chrom, self.begin + 1, self.end) - - @property - def length(self): - """Return length""" - return self.end - self.begin - - def __str__(self): - tpl = "GenomeRegion({}, {}, {})" - return tpl.format(*list(map(repr, [self.chrom, self.begin, self.end]))) - - def __repr__(self): - return str(self) - - def overlaps(self, other): - """Return whether the region overlas with ``other``""" - # TODO: write test for me - if self.chrom != other.chrom: - return False - if self.begin < other.end and other.begin < self.end: - return True - else: - return False - - def extend(self, by): - return GenomeRegion(self.chrom, max(0, self.begin - by), self.end + by) - - def jaccard(self, other): - """Return jaccard index between ``self`` and ``other``""" - if self.chrom != other.chrom: - return 0 - pos_begin = max(self.begin, other.begin) - pos_end = min(self.end, other.end) - if pos_end >= pos_begin: - overlap = pos_end - pos_begin - else: - overlap = 0 - union = self.length + other.length - overlap - return overlap / union - - def __eq__(self, other): - if self is other: - return True - else: - return (self.chrom, self.begin, self.end) == (other.chrom, other.begin, other.end) - - -class PedigreeMember: - """Representation of one PED file line""" - - UNKNOWN = "0" - MALE = "1" - FEMALE = "2" - UNAFFECTED = "1" - AFFECTED = "2" - - @classmethod - def parse_line(klass, line): - arr = line.strip().split("\t") - return PedigreeMember(*arr[0:6], data=arr[:6]) - - def __init__(self, family, name, father, mother, gender, disease, data=[]): - self.family = family - self.name = name - self.father = father - self.mother = mother - self.gender = gender - self.disease = disease - self.data = list(data) - - def __str__(self): - return "Pedigree({})".format( - ", ".join( - map( - str, - [self.family, self.name, self.father, self.mother, self.gender, self.disease], - ) - ) - ) - - -class Pedigree: - """Representation of a pedigree""" - - @classmethod - def parse(klass, f): - """Parse from file-like object""" - members = [] - for line in f: - line = line.strip() - if not line: - continue # skip empty lines - members.append(PedigreeMember.parse_line(line)) - return Pedigree(members) - - def __init__(self, members=[]): - self.members = list(members) - self.by_name = {m.name: m for m in self.members} - - @property - @functools.lru_cache(maxsize=1) - def affecteds(self): - """Return list of affected individuals""" - return [m for m in self.members if m.disease == PedigreeMember.AFFECTED] - - @property - @functools.lru_cache(maxsize=1) - def affecteds_names(self): - """Return list of names of affected individuals""" - return [m.name for m in self.affecteds] - - -def full_chromosomes(reader): - """Return list of regions of all chromosomes of VCF reader.""" - for line in reader.header.get_lines("contig"): - if line.id in CHROMS: - name = line.id - length = line.length - yield "{}:{}-{}".format(name, 1, length) - - -class GenotypeMetrics: - """Unified description of a variant's metrics - - Required as the variant callers use different FORMAT entries and different - conditions for including them. - """ - - def __init__(self): - #: split reads supporting wild-type - self.wt_split_reads = None - #: paired reads supporting wild-type - self.wt_paired_reads = None - #: split reads supporting alternative allele - self.alt_split_reads = None - #: paired reads supporting alternative allele - self.alt_paired_reads = None - - def sr_aaf(self): - """Return alternative allele fraction for SR - - Return None if there is not enough information. - """ - if self.alt_split_reads is None or self.wt_split_reads is None: - return None - if self.alt_split_reads + self.wt_split_reads == 0: - return 0 - else: - return self.alt_split_reads / (self.alt_split_reads + self.wt_split_reads) - - def pe_aaf(self): - """Return alternative allele fraction for PR - - Return None if there is not enough information. - """ - if self.alt_paired_reads is None or self.wt_paired_reads is None: - return None - if self.alt_paired_reads + self.wt_paired_reads == 0: - return 0 - else: - return self.alt_paired_reads / (self.alt_paired_reads + self.wt_paired_reads) - - -class GenotypeMetricsBuilder: - """Base class for helper classes that generate ``GenotypeMetrics`` - objects from calls in VCF files - """ - - def build_call_metrics(self, record): - raise NotImplementedError("Write me!") - - def get_length(self, record): - """Return length of the the variant - - Return `None` for non-linear changes. - """ - raise NotImplementedError("Write me!") - - def get_inner_region(self, record): - """Return GenomeRegion inside the CI - - In the case of an empty region, the approximate positions 10% are - removed from each side. None if empty. - """ - raise NotImplementedError("Write me!") - - -class DellyGenotypeMetricsBuilder(GenotypeMetricsBuilder): - """Generate ``GenotypeMetrics`` for Delly VCF records""" - - def build_call_metrics(self, call): - result = GenotypeMetrics() - result.wt_split_reads = call.data["RR"] - result.alt_split_reads = call.data["RV"] - result.wt_paired_reads = call.data["DR"] - result.alt_paired_reads = call.data["DV"] - return result - - def get_length(self, record): - if record.INFO.get("SVTYPE") == "INS": - return record.INFO.get("INSLEN", 0) - if record.INFO.get("CHR2") and record.INFO["CHR2"] != record.CHROM: - return None - elif not record.INFO.get("END"): - return None - else: - return record.INFO.get("END") - record.POS + 1 - - def get_inner_region(self, record): - length = self.get_length(record) - if length is None: - return None - pos_begin = record.POS - 1 - pos_end = pos_begin + length - ci_pos = record.INFO.get("CIPOS") - ci_end = record.INFO.get("CIEND") - if abs(ci_pos[1]) + abs(ci_end[0]) >= length: - # confidence intervals overlap - pos_begin += int(0.1 * length) - pos_end -= int(0.1 * length) - if pos_end >= pos_begin: - return None - else: - return GenomeRegion(record.CHROM, pos_begin, pos_end) - else: - # confidence intervals don't overlap - pos_begin += ci_pos[1] - pos_end += ci_pos[0] # negative - return GenomeRegion(record.CHROM, pos_begin, pos_end) - - -class PopDelGenotypeMetricsBuilder(GenotypeMetricsBuilder): - """Generate ``GenotypeMetrics`` for PopDel VCF records""" - - def build_call_metrics(self, call): - result = GenotypeMetrics() - lad = call.data["LAD"] - result.wt_paired_reads = lad[0] - result.alt_paired_reads = lad[2] - return result - - def get_length(self, record): - return -int(record.INFO["SVLEN"]) - - def get_inner_region(self, record): - length = self.get_length(record) - pos_begin = record.POS - 1 - pos_end = pos_begin + length - return GenomeRegion(record.CHROM, pos_begin, pos_end) - - -class MantaGenotypeMetricsBuilder(GenotypeMetricsBuilder): - """Generate ``GenotypeMetrics`` for Manta VCF records""" - - def build_call_metrics(self, call): - result = GenotypeMetrics() - srs = call.data.get("SR", [0, 0]) - result.wt_split_reads = srs[0] - result.alt_split_reads = srs[1] - prs = call.data.get("PR", [0, 0]) - result.wt_paired_reads = prs[0] - result.alt_paired_reads = prs[1] - return result - - def get_length(self, record): - if record.INFO.get("SVTYPE") == "INS": - return record.INFO.get("SVLEN", [0])[0] - elif not record.INFO.get("END"): - return None - else: - return record.INFO.get("END") - record.POS + 1 - - def get_inner_region(self, record): - # TODO: dupe from Delly - length = self.get_length(record) - if length is None: - return None - pos_begin = record.POS - 1 - pos_end = pos_begin + length - ci_pos = record.INFO.get("CIPOS") or [0, 0] - ci_end = record.INFO.get("CIEND") or [0, 0] - if abs(ci_pos[1]) + abs(ci_end[0]) >= length: - # confidence intervals overlap - pos_begin += int(0.1 * length) - pos_end -= int(0.1 * length) - if pos_end >= pos_begin: - return None - else: - return GenomeRegion(record.CHROM, pos_begin, pos_end) - else: - # confidence intervals don't overlap - pos_begin += ci_pos[1] - pos_end += ci_end[0] # negative - return GenomeRegion(record.CHROM, pos_begin, pos_end) - - -class FilterStep: - """Base class for filter step.""" - - def __init__(self, owner, args, inner=None): - #: Owner App object - self.owner = owner - #: Command line arguments to pass in. - self.args = args - #: Inner filter to pass through, ``None`` if no inner filter. - self.inner = inner - #: Whether or not active by configuration. - self.active = False - #: Hook for implementing setup - self._setup() - #: Metric builder to use, built on first usage. - self._metric_builder = None - - @property - def metric_builder(self): - if not self._metric_builder: - if "RR" in self.owner.vcf_header.format_ids(): - self._metric_builder = DellyGenotypeMetricsBuilder() - elif "PR" in self.owner.vcf_header.format_ids(): - self._metric_builder = MantaGenotypeMetricsBuilder() - else: - self._metric_builder = PopDelGenotypeMetricsBuilder() - return self._metric_builder - - def apply(self, record): - if self.inner: - record = self.inner.apply(record) - return self._apply(record) - - def _setup(self): - """Perform extended setup.""" - - def _apply(self, record): - """Actual implementation.""" - raise NotImplementedError("Implement me!") - - -class AnnotateSizeClassFilterStep(FilterStep): - """Add ``INFO/SIZE_CLASS`` entry.""" - - def _setup(self): - self.active = True - - def _apply(self, record): - # Compute length and exit if no length given. - length = self.metric_builder.get_length(record) - if length is None: - return record - # Add INFO/SVLEN if not existing. - if "SVLEN" not in record.INFO: - record.INFO["SVLEN"] = [length] - # Compute length by thresholds. - if length <= self.args.small_sv_max_size: - record.INFO["SIZE_CLASS"] = "SMALL" - elif length <= self.args.medium_sv_max_size: - record.INFO["SIZE_CLASS"] = "MEDIUM" - else: - record.INFO["SIZE_CLASS"] = "LARGE" - return record - - -class HetSnvFilterStep(FilterStep): - """Filter by existence of het. SNVs in deletions.""" - - def _setup(self): - if self.args.small_var_vcf: - self.active = True - self.reader = vcfpy.Reader.from_path(self.args.small_var_vcf) - - def _apply(self, record): - # Handle cases of non-deletion or large deletions. - assert self.reader - if record.INFO.get("SIZE_CLASS") not in ("SMALL", "MEDIUM"): - return record # ignore, LARGE or non-linear - elif record.INFO.get("SVTYPE") != "DEL": - return record # ignore, not deletion - # Get region to process. - inner_region = self.metric_builder.get_inner_region(record) - if not inner_region: - return record # empty inner region, skip - # Check whether the deletion calls pass the het. SNV filter - counters = dict((a.name, 0) for a in self.owner.pedigree.affecteds) - for vcf_record in self.reader.fetch( - inner_region.chrom, inner_region.begin, inner_region.end - ): - if not vcf_record.is_snv(): - continue # only consider SNVs - for sample in self.owner.pedigree.affecteds: - if sample.name not in vcf_record.call_for_sample: - continue # skip, sample not in file - call = vcf_record.call_for_sample[sample.name] - if call.gt_type != 1: - continue # only consider HET calls - if (call.data.get("GQ", 0) or 0) <= self.args.snv_min_gq: - continue # ignore, GQ too bad - if (call.data.get("DP", 0) or 0) <= self.args.snv_min_dp: - continue # ignore, DP too low - # TODO: limit on alternative allele fraction? - counters[call.sample] += 1 # have het. SNV for sample - # Augment FORMAT and sample data - record.add_format("HET_SNVS", 0) - record.add_format("FT", ["PASS"]) - for sample, num in counters.items(): - if sample not in record.call_for_sample: - continue - call = record.call_for_sample[sample] - call.data["HET_SNVS"] = num - if num >= self.args.snv_min_count: - call = record.call_for_sample[sample] - if "FT" not in call.data: - call.data["FT"] = ["PASS"] - append_unique(call.data["FT"], "HET_SNVS") - return record - - -class AlternateAlleleSupportFilterStep(FilterStep): - def _setup(self): - self.active = True - self.count_mults = { - "DEL": self.args.sv_del_count_mult, - "DUP": self.args.sv_dup_count_mult, - "INV": self.args.sv_inv_count_mult, - "INS": self.args.sv_ins_count_mult, - "BND": self.args.sv_bnd_count_mult, - } - - def _apply(self, record): - # Precompute multiplicator to use - count_mult = self.count_mults.get(record.INFO["SVTYPE"], 1) - # Add defaults to FORMAT field if necessary - record.add_format("PE_COUNT", 0) - record.add_format("PE_AAF", 0) - record.add_format("SR_COUNT", 0) - record.add_format("SR_AAF", 0) - record.add_format("FT", ["PASS"]) - for call in record: - if "FT" not in call.data: - call.data["FT"] = ["PASS"] - metrics = self.metric_builder.build_call_metrics(call) - if metrics.pe_aaf() is not None: - call.data["PE_AAF"] = metrics.pe_aaf() - if metrics.alt_paired_reads is not None: - call.data["PE_COUNT"] = metrics.alt_paired_reads - if metrics.sr_aaf() is not None: - call.data["SR_AAF"] = metrics.sr_aaf() - if metrics.alt_split_reads is not None: - call.data["SR_COUNT"] = metrics.alt_split_reads - if call.sample in self.owner.pedigree.affecteds_names: - if ( - metrics.pe_aaf() is not None - and metrics.pe_aaf() < self.args.affected_min_pe_aaf - ): - append_unique(call.data["FT"], "MIN_PE_AAF") - if ( - metrics.sr_aaf() is not None - and metrics.sr_aaf() < self.args.affected_min_sr_aaf - ): - append_unique(call.data["FT"], "MIN_SR_AAF") - if ( - metrics.alt_paired_reads is not None - and metrics.alt_paired_reads < self.args.affected_min_pe_count * count_mult - ): - append_unique(call.data["FT"], "MIN_PE_COUNT") - if ( - metrics.alt_split_reads is not None - and metrics.alt_split_reads < self.args.affected_min_sr_count * count_mult - ): - append_unique(call.data["FT"], "MIN_SR_COUNT") - else: - if ( - metrics.pe_aaf() is not None - and metrics.pe_aaf() > self.args.unaffected_max_pe_aaf - ): - append_unique(call.data["FT"], "MAX_PE_AAF") - if ( - metrics.sr_aaf() is not None - and metrics.sr_aaf() > self.args.unaffected_max_sr_aaf - ): - append_unique(call.data["FT"], "MAX_SR_AAF") - if ( - metrics.alt_paired_reads is not None - and metrics.alt_paired_reads > self.args.unaffected_max_pe_count * count_mult - ): - append_unique(call.data["FT"], "MAX_PE_COUNT") - if ( - metrics.alt_split_reads is not None - and metrics.alt_split_reads > self.args.unaffected_max_sr_count * count_mult - ): - append_unique(call.data["FT"], "MAX_SR_COUNT") - return record - - -class InheritanceAnnoFilterStep(FilterStep): - def _setup(self): - self.active = True - - def _apply(self, record): - for member in self.owner.pedigree.members: - compatible = [] - if self._compatible_de_novo(record, member): - compatible.append("DE_NOVO") - if self._compatible_dominant(record, member): - compatible.append("DOMINANT") - if compatible: - record.add_format("INHERITANCE", []) - record.call_for_sample[member.name].data["INHERITANCE"] = compatible - return record - - def _compatible_de_novo(self, record, member): - if member.father == "0" or member.mother == "0": - return False - if ( - member.father not in record.call_for_sample - or member.mother not in record.call_for_sample - ): - return False - parent_variants = { - record.call_for_sample[member.father].is_variant, - record.call_for_sample[member.mother].is_variant, - } - return parent_variants == set([False]) - - def _compatible_dominant(self, record, member): - if member.father == "0" or member.mother == "0": - return False - if ( - member.father not in record.call_for_sample - or member.mother not in record.call_for_sample - ): - return False - parent_variants = { - record.call_for_sample[member.father].is_variant, - record.call_for_sample[member.mother].is_variant, - } - return len(parent_variants) == 2 - - -class AnnotateAluOverlapFilterStep(FilterStep): - """Add ``ALU_OVL`` filter to SVs overlapping with ALU over a Jaccard - threshold. - - Only ``SMALL`` and ``MEDIUM`` variants are considered in this step - for performance reasons. - """ - - def _setup(self): - if self.args.alu_bed: - self.active = True - self.tabix = tabix.open(self.args.alu_bed) - - def _apply(self, record): - if not self.tabix: - return record # no ALU database - if record.INFO.get("SIZE_CLASS") not in ("SMALL", "MEDIUM"): - return record # ignore, is LARGE or non-linear - if not record.INFO.get("SVTYPE") == "DEL": - return record # ignore, is not deletion - chrom = record.CHROM - pos_begin = record.POS - 1 - pos_end = pos_begin + self.metric_builder.get_length(record) - sv_region = GenomeRegion(chrom, pos_begin, pos_end) - try: - best_jaccard = -1 - for bed_record in self.tabix.query(chrom, pos_begin, pos_end): - record_region = GenomeRegion(bed_record[0], int(bed_record[1]), int(bed_record[2])) - if sv_region.jaccard(record_region) > best_jaccard: - best_jaccard = sv_region.jaccard(record_region) - if best_jaccard >= self.args.alu_jaccard_threshold: - record.add_filter("ALU_OVL") - if "ALU_OVL" not in record.FILTER: - record.add_filter("ALU_OVL") - if best_jaccard >= 0: - record.INFO["BEST_ALU_JACCARD"] = round(best_jaccard, 4) - except tabix.TabixError: - pass # swallow, probably unknown contig - return record - - -class AnnotateDatabaseOverlapFilterStep(FilterStep): - """Add DB_OVL filter to SVs overlapping with DB entry over a Jaccard - treshold - - Currently, this is only implemented for deletions. - - Also, this is only activated for small and medium deletions because of - performance. - """ - - def _setup(self): - if self.args.db_bed: - self.active = True - self.tabix = [tabix.open(p) for p in self.args.db_bed] - - def _apply(self, record): - if record.INFO.get("SIZE_CLASS") not in ("SMALL", "MEDIUM"): - return record # ignore, is LARGE or non-linear - if not record.INFO.get("SVTYPE") == "DEL": - return record # ignore, is not deletion - chrom = record.CHROM - pos_begin = record.POS - 1 - pos_end = pos_begin + self.metric_builder.get_length(record) - sv_region = GenomeRegion(chrom, pos_begin, pos_end) - best_jaccard = -1 - for tbx in self.tabix: - try: - for bed_record in tbx.query(chrom, pos_begin, pos_end): - record_region = GenomeRegion( - bed_record[0], int(bed_record[1]), int(bed_record[2]) - ) - if sv_region.jaccard(record_region) >= best_jaccard: - best_jaccard = sv_region.jaccard(record_region) - except tabix.TabixError: - pass # swallow, probably unknown contig - if best_jaccard >= self.args.db_jaccard_threshold: - if "DB_OVL" not in record.FILTER: - record.add_filter("DB_OVL") - if best_jaccard >= 0: - record.INFO["BEST_DB_JACCARD"] = round(best_jaccard, 4) - return record - - -class AnnotateCarrierCountsFilterStep(FilterStep): - def _setup(self): - self.active = True - # Create shortcuts of samples in pedigree and samples in affected. - self.in_pedigree = set() - self.affected = set() - for member in self.owner.pedigree.members: - self.in_pedigree.add(member.name) - if member.disease == PedigreeMember.AFFECTED: - self.affected.add(member.name) - - def _apply(self, record): - # Count carriers. - ped_carriers = 0 - affected_carriers = 0 - all_carriers = 0 - for call in record: - if not call.is_variant: - continue # skip - all_carriers += 1 - if call.sample in self.in_pedigree: - ped_carriers += 1 - if call.sample in self.affected: - affected_carriers += 1 - # Write out counts - record.INFO["AFFECTED_CARRIERS"] = affected_carriers - record.INFO["UNAFFECTED_CARRIERS"] = ped_carriers - affected_carriers - record.INFO["BACKGROUND_CARRIERS"] = all_carriers - ped_carriers - return record - - -#: The filters to apply. -FILTERS = ( - # The first step is always the annotation with the size class. - AnnotateSizeClassFilterStep, - # Filter based on het. SNV calls within the deletion. - HetSnvFilterStep, - # Filter based on alternate allele support. - AlternateAlleleSupportFilterStep, - # Add annotation for compatibility with inheritance. - InheritanceAnnoFilterStep, - # Overlaps with ALUs - AnnotateAluOverlapFilterStep, - # Overlaps with DBV gold standard database - AnnotateDatabaseOverlapFilterStep, - # Annotate with carrier counts. - AnnotateCarrierCountsFilterStep, -) - - -class VcfFilterApp: - """Container class for storing the application's state. - - We are using a class for the implementation in contrast to global - functions because this gives us a way to store the arguments etc. - implicitely without passing it as parameters. - """ - - def __init__(self, args, filters=FILTERS): - #: Command line arguments. - self.args = args - #: Pedigree to use. - self.pedigree = self._load_pedigree() - # Setup the logging. - self._setup_logging() - #: Filter steps to run. - self.filters = filters - #: Setup the filter chain. - self.filter_chain = self._build_filter_chain() - #: The VCF header, set after construction. - self.vcf_header = None - - def _load_pedigree(self): - logging.info("Loading pedigree file %s", self.args.ped_file) - with open(self.args.ped_file, "rt") as pedf: - return Pedigree.parse(pedf) - - def _setup_logging(self): - logging.basicConfig( - format="%(asctime)s %(name)-12s %(levelname)-8s %(message)s", datefmt="%m-%d %H:%M" - ) - logger = logging.getLogger("") - if self.args.verbose: - logger.setLevel(logging.DEBUG) - else: - logger.setLevel(logging.INFO) - - def _build_filter_chain(self): - """Build the filter chain.""" - result = None - for klass in self.filters: - tmp = klass(self, self.args, result) - logging.info("%s %s", klass, tmp.active) - if tmp.active: - result = tmp - return result or (lambda x: x) - - def _print_header(self): - logging.info("SV VCF filter") - logging.info("Arguments: %s", self.args) - - def _process_region(self, region, writer): - """Process a single region and write its result to the writer.""" - - def _augment_header(self, header): - """Augment header information""" - header = self._augment_filter(header) - header = self._augment_info(header) - header = self._augment_format(header) - return header - - def _augment_filter(self, header): - """Augment header for FILTER column""" - # Record-wise FILTER entries - # - # VCF header FILTER entry for ALU overlap - header.add_filter_line( - vcfpy.OrderedDict( - [ - ("ID", "ALU_OVL"), - ( - "Description", - "Jaccard index with an ALU element >= {}".format( - self.args.alu_jaccard_threshold - ), - ), - ] - ) - ) - # VCF header FILTER entry for overlap with database - header.add_filter_line( - vcfpy.OrderedDict( - [ - ("ID", "DB_OVL"), - ( - "Description", - "Jaccard index with an database element >= {}".format( - self.args.db_jaccard_threshold - ), - ), - ] - ) - ) - # Genotype-wise FILTER entries - # - header.add_filter_line( - vcfpy.OrderedDict( - [ - ("ID", "HET_SNVS"), - ( - "Description", - "Overlaps with at least {} heterozygous SNVs".format( - self.args.snv_min_count - ), - ), - ] - ) - ) - header.add_filter_line( - vcfpy.OrderedDict( - [ - ("ID", "MIN_PE_COUNT"), - ("Description", "Does not pass minimal paired read support in affected"), - ] - ) - ) - header.add_filter_line( - vcfpy.OrderedDict( - [ - ("ID", "MIN_PE_AAF"), - ("Description", "Does not pass minimal paired read support in affected"), - ] - ) - ) - header.add_filter_line( - vcfpy.OrderedDict( - [ - ("ID", "MIN_SR_COUNT"), - ("Description", "Does not pass minimal split read support in affected"), - ] - ) - ) - header.add_filter_line( - vcfpy.OrderedDict( - [ - ("ID", "MIN_SR_AAF"), - ("Description", "Does not pass minimal split read support in affected"), - ] - ) - ) - header.add_filter_line( - vcfpy.OrderedDict( - [ - ("ID", "MAX_PE_COUNT"), - ( - "Description", - "Does not pass maximal paired read support in unaffected/background", - ), - ] - ) - ) - header.add_filter_line( - vcfpy.OrderedDict( - [ - ("ID", "MAX_PE_AAF"), - ( - "Description", - "Does not pass maximal paired read support in unaffected/background", - ), - ] - ) - ) - header.add_filter_line( - vcfpy.OrderedDict( - [ - ("ID", "MAX_SR_COUNT"), - ( - "Description", - "Does not pass maximal split read support in unaffected/background", - ), - ] - ) - ) - header.add_filter_line( - vcfpy.OrderedDict( - [ - ("ID", "MAX_SR_AAF"), - ( - "Description", - "Does not pass maximal split read support in unaffected/background", - ), - ] - ) - ) - return header - - def _augment_info(self, header): - """Augment header for INFO column""" - # VCF header SVLEN giving SV size. - header.add_info_line( - vcfpy.OrderedDict( - [ - ("ID", "SVLEN"), - ("Number", "."), - ("Type", "Integer"), - ("Description", "Difference in length between REF and ALT alleles"), - ] - ) - ) - # VCF header SIZE_CLASS entry for describing SV size - header.add_info_line( - vcfpy.OrderedDict( - [ - ("ID", "SIZE_CLASS"), - ("Number", 1), - ("Type", "String"), - ( - "Description", - ("SV size class, one of SMALL (<= {}), MEDIUM (<= {}), LARGE").format( - self.args.small_sv_max_size, self.args.medium_sv_max_size - ), - ), - ] - ) - ) - # Highest Jaccard value for DB overlap - header.add_info_line( - vcfpy.OrderedDict( - [ - ("ID", "BEST_DB_JACCARD"), - ("Number", 1), - ("Type", "Float"), - ("Description", "Best Jaccard value for DB overlap"), - ] - ) - ) - # Highest Jaccard value for ALU overlap - header.add_info_line( - vcfpy.OrderedDict( - [ - ("ID", "BEST_ALU_JACCARD"), - ("Number", 1), - ("Type", "Float"), - ("Description", "Best Jaccard value for ALU overlap"), - ] - ) - ) - # Information on carriers - header.add_info_line( - vcfpy.OrderedDict( - [ - ("ID", "AFFECTED_CARRIERS"), - ("Number", "1"), - ("Type", "Integer"), - ("Description", "Number of affected samples from pedigree that are carriers"), - ] - ) - ) - header.add_info_line( - vcfpy.OrderedDict( - [ - ("ID", "UNAFFECTED_CARRIERS"), - ("Number", "1"), - ("Type", "Integer"), - ("Description", "Number of unaffected samples from pedigree that are carriers"), - ] - ) - ) - header.add_info_line( - vcfpy.OrderedDict( - [ - ("ID", "BACKGROUND_CARRIERS"), - ("Number", "1"), - ("Type", "Integer"), - ("Description", "Number of background samples that are carriers"), - ] - ) - ) - return header - - def _augment_format(self, header): - """Augment header for FORMAT column""" - header = vcfpy.header_without_lines(header, (("FORMAT", "FT"),)) - header.add_format_line( - vcfpy.OrderedDict( - [ - ("ID", "FT"), - ("Number", "1"), - ("Type", "String"), - ("Description", "Semicolon-separated list of filters"), - ] - ) - ) - header.add_format_line( - vcfpy.OrderedDict( - [ - ("ID", "INHERITANCE"), - ("Number", "."), - ("Type", "String"), - ("Description", "List of compatible inheritance modes (DE_NOVO, DOMINANT)"), - ] - ) - ) - header.add_format_line( - vcfpy.OrderedDict( - [ - ("ID", "HET_SNVS"), - ("Number", "1"), - ("Type", "Integer"), - ("Description", "Number of overlapping heterozygous SNVs"), - ] - ) - ) - header.add_format_line( - vcfpy.OrderedDict( - [ - ("ID", "PE_AAF"), - ("Number", 1), - ("Type", "Float"), - ("Description", "Paired-end support of variant as alternate allele fraction"), - ] - ) - ) - header.add_format_line( - vcfpy.OrderedDict( - [ - ("ID", "PE_COUNT"), - ("Number", 1), - ("Type", "Float"), - ("Description", "Paired-end support of variant as read pair count"), - ] - ) - ) - header.add_format_line( - vcfpy.OrderedDict( - [ - ("ID", "SR_AAF"), - ("Number", 1), - ("Type", "Float"), - ("Description", "Split-end support of variant as alternate allele fraction"), - ] - ) - ) - header.add_format_line( - vcfpy.OrderedDict( - [ - ("ID", "SR_COUNT"), - ("Number", 1), - ("Type", "Float"), - ("Description", "Split-end support of variant as read pair count"), - ] - ) - ) - return header - - def run(self): - self._print_header() - with vcfpy.Reader.from_path(self.args.input_vcf) as reader: - # If no regions are given, fall back to all chromosomes. - regions = self.args.regions or full_chromosomes(reader) - # Extend header with new lines. - header = self._augment_header(reader.header) - # Store header in ``self.vcf_header``. - self.vcf_header = header - # Open the VCF writer for writing and process each region. - with vcfpy.Writer.from_path(self.args.output_vcf, header) as writer: - for region in regions: - logging.info("Processing %s", region) - try: - records = reader.fetch(region) - except ValueError: - records = [] - logging.warning("Could not fetch records for %s", region) - for record in records: - record = self.filter_chain.apply(record) - writer.write_record(record) - - -def main(argv=None): - parser = argparse.ArgumentParser(description="(Delly) SV VCF soft-filter application tool") - - # ----------------------------------------------------------------------- - group = parser.add_argument_group("General Options") - group.add_argument("-v", "--verbose", default=0, action="count") - - group = parser.add_argument_group("Input / Output Options") - group.add_argument("--input-vcf", required=True, help="input VCF file") - group.add_argument("--output-vcf", help="output VCF file", default="/dev/stdout") - group.add_argument("--ped-file", required=True, help="Path to PED file to use.") - group.add_argument( - "--region", - type=str, - required=False, - default=[], - action="append", - dest="regions", - nargs="+", - help=("region(s) to limit analysis to"), - ) - - # ----------------------------------------------------------------------- - group = parser.add_argument_group("SR/PR Support Configuration") - group.add_argument( - "--unaffected-max-pe-count", - type=int, - default=4, - help=("Maximal support for unaffected samples; default is 4"), - ) - group.add_argument( - "--unaffected-max-pe-aaf", - type=float, - default=0.2, - help=("Maximal support for unaffected samples; default is 0.2"), - ) - group.add_argument( - "--unaffected-max-sr-count", - type=int, - default=4, - help=("Maximal support for unaffected samples; default is 4"), - ) - group.add_argument( - "--unaffected-max-sr-aaf", - type=float, - default=0.2, - help=("Maximal support for unaffected samples; default is 0.2"), - ) - group.add_argument( - "--affected-min-pe-count", - type=int, - default=4, - help=("Minimal support for affected samples; default is 4"), - ) - group.add_argument( - "--affected-min-pe-aaf", - type=float, - default=0.2, - help=("Minimal support for affected samples; default is 0.2"), - ) - group.add_argument( - "--affected-min-sr-count", - type=int, - default=4, - help=("Minimal support for affected samples; default is 4"), - ) - group.add_argument( - "--affected-min-sr-aaf", - type=float, - default=0.2, - help=("Minimal support for affected samples; default is 0.2"), - ) - - group = parser.add_argument_group("Multipliers for PE/SR counts") - group.add_argument( - "--sv-del-count-mult", type=int, default=1, help="Modifying multiplicator for counting" - ) - group.add_argument( - "--sv-dup-count-mult", type=int, default=2, help="Modifying multiplicator for counting" - ) - group.add_argument( - "--sv-inv-count-mult", type=int, default=2, help="Modifying multiplicator for counting" - ) - group.add_argument( - "--sv-ins-count-mult", type=int, default=1, help="Modifying multiplicator for counting" - ) - group.add_argument( - "--sv-bnd-count-mult", type=int, default=2, help="Modifying multiplicator for counting" - ) - - # ----------------------------------------------------------------------- - group = parser.add_argument_group("SV Size Configuration") - group.add_argument( - "--small-sv-max-size", - type=int, - default=400, - help='Maximal SV length to be classified as "SMALL"', - ) - group.add_argument( - "--medium-sv-max-size", - type=int, - default=50000, - help='Maximal SV length to be classified as "MEDIUM"', - ) - - # ----------------------------------------------------------------------- - group = parser.add_argument_group("ALU / Variant Database Overlap") - group.add_argument( - "--alu-jaccard-threshold", - type=float, - default=0.7, - help=("Jaccard index threshold for flagging as ALU overlap; default: 0.7"), - ) - group.add_argument("--alu-bed", type=str, help="Path to ALU BED file") - group.add_argument( - "--db-jaccard-threshold", - type=float, - default=0.7, - help=("Jaccard index threshold for flagging as DB overlap; default: 0.7"), - ) - group.add_argument( - "--db-bed", - type=str, - default=[], - action="append", - help="Path to BED database with known variants", - ) - - # ----------------------------------------------------------------------- - group = parser.add_argument_group("Het. SNV filter") - group.add_argument("--small-var-vcf", type=str, help="Path to small variant VCF file") - group.add_argument( - "--snv-min-count", - type=int, - default=2, - help="Smallest number of SNVs to use for subtraction", - ) - group.add_argument( - "--snv-min-dp", type=int, default=10, help="Small variant minimal DP for subtraction" - ) - group.add_argument( - "--snv-min-gq", type=int, default=60, help="Small variant minimal GQ for subtraction" - ) - - args = parser.parse_args(argv) - args.regions = [r for lst in args.regions for r in lst] - return VcfFilterApp(args).run() - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/snappy_wrappers/wrappers/vcf_sv_filter/wrapper.py b/snappy_wrappers/wrappers/vcf_sv_filter/wrapper.py deleted file mode 100644 index e73e683f5..000000000 --- a/snappy_wrappers/wrappers/vcf_sv_filter/wrapper.py +++ /dev/null @@ -1,83 +0,0 @@ -# -*- coding: utf-8 -*- -"""CUBI+Snakemake wrapper code for vcf_sv_filter - -- Remove LowQual variants -- Add various annotations -""" - -import os - -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -shell.executable("/bin/bash") - -base_dir = os.path.dirname(os.path.realpath(__file__)) - -shell( - r""" -set -x - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -# Load library with helper functions. -source {base_dir}/../wgs_sv_filtration/funcs.sh - -# Run through VCF SV filter and limit to samples in family. - -set +e -samples=$( - samples_vcf_ped {snakemake.input.sv_bcf} {snakemake.input.ped} \ - | tr '\n' ',' \ - | sed -e 's/,$//' -) -set -e - -if [[ {snakemake.wildcards.caller} == delly2 ]]; then - bcftools view --threads 4 --force-samples -s $samples -e 'FILTER == "LowQual"' {snakemake.input.sv_bcf} -else - bcftools view --threads 4 --force-samples -s $samples {snakemake.input.sv_bcf} -fi \ -| bcftools query -f "%ID\n" -i 'GT ~ "1"' \ -> $TMPDIR/ids.txt - -( \ - bcftools view -h {snakemake.input.sv_bcf}; - bcftools view --threads 4 -H {snakemake.input.sv_bcf} \ - | grep -w -F -f $TMPDIR/ids.txt \ -) \ -| bgzip --threads 4 -c \ -> $TMPDIR/tmp.vcf.gz - -tabix -f $TMPDIR/tmp.vcf.gz - -# Run through VCF SV filter - -time python3 {base_dir}/vcf_sv_filter.py \ - --ped-file {snakemake.input.ped} \ - --input-vcf $TMPDIR/tmp.vcf.gz \ - --small-var-vcf {snakemake.input.var_vcf} \ - $(if [[ -n "{snakemake.config[step_config][wgs_sv_annotation][path_alu_bed]}" ]]; then echo --alu-bed {snakemake.config[step_config][wgs_sv_annotation][path_alu_bed]}; fi) \ - $(if [[ -n "{snakemake.config[step_config][wgs_sv_annotation][path_db_bed]}" ]]; then echo --db-bed {snakemake.config[step_config][wgs_sv_annotation][path_db_bed]}; fi) \ -| bgzip -c \ -> $TMPDIR/tmp2.vcf.gz - -bcftools view \ - --threads 4 \ - --force-samples \ - -s $samples \ - $TMPDIR/tmp2.vcf.gz \ -| bcftools view \ - -i 'GT ~ "1"' \ - -O z \ - -o {snakemake.output.vcf} - -tabix -f {snakemake.output.vcf} - -pushd $(dirname {snakemake.output.vcf}) -md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf}).md5 -md5sum $(basename {snakemake.output.vcf_tbi}) >$(basename {snakemake.output.vcf_tbi}).md5 -""" -) diff --git a/snappy_wrappers/wrappers/vcfpy/add_bed/wrapper.py b/snappy_wrappers/wrappers/vcfpy/add_bed/wrapper.py index 466ea97d8..627a4e899 100644 --- a/snappy_wrappers/wrappers/vcfpy/add_bed/wrapper.py +++ b/snappy_wrappers/wrappers/vcfpy/add_bed/wrapper.py @@ -8,8 +8,7 @@ import tabix import vcfpy -step = snakemake.config["pipeline_step"]["name"] -config = snakemake.config["step_config"][step] +config = getattr(snakemake.params, "args", {}) tempdir = tempfile.mkdtemp() diff --git a/snappy_wrappers/wrappers/vep/post_filter/wrapper.py b/snappy_wrappers/wrappers/vep/post_filter/wrapper.py deleted file mode 100644 index f3b1bcb7c..000000000 --- a/snappy_wrappers/wrappers/vep/post_filter/wrapper.py +++ /dev/null @@ -1,195 +0,0 @@ -# -*- coding: utf-8 -*- - -import re - -from snakemake import shell -import vcfpy - -inp = snakemake.input.vcf -out = snakemake.output.vcf -escape = snakemake.config["step_config"][snakemake.config["pileline_step"]["name"]]["vep"]["escape"] - -# Nomenclature taken from ENSEMBL 110 (http://www.ensembl.org/info/genome/variation/prediction/predicted_data.html) -variant_classes_vep = ( - "transcript_ablation", - "splice_acceptor_variant", - "splice_donor_variant", - "stop_gained", - "frameshift_variant", - "stop_lost", - "start_lost", - "transcript_amplification", - "feature_elongation", - "feature_truncation", - "__end_of_HIGH_IMPACT", - "inframe_insertion", - "inframe_deletion", - "missense_variant", - "protein_altering_variant", - "__end_of_MODERATE_IMPACT", - "splice_donor_5th_base_variant", - "splice_region_variant", - "splice_donor_region_variant", - "splice_polypyrimidine_tract_variant", - "incomplete_terminal_codon_variant", - "start_retained_variant", - "stop_retained_variant", - "synonymous_variant", - "__end_of_LOW_IMPACT", - "coding_sequence_variant", - "mature_miRNA_variant", - "5_prime_UTR_variant", - "3_prime_UTR_variant", - "non_coding_transcript_exon_variant", - "intron_variant", - "NMD_transcript_variant", - "non_coding_transcript_variant", - "coding_transcript_variant", - "upstream_gene_variant", - "downstream_gene_variant", - "TFBS_ablation", - "TFBS_amplification", - "TF_binding_site_variant", - "regulatory_region_ablation", - "regulatory_region_amplification", - "regulatory_region_variant", - "intergenic_variant", - "sequence_variant", - "__end_of_MODIFIER_IMPACT", - "", -) - -appris_codes = ("P1", "A1", "P2", "A2", "P3", "P4", "P5", "") -tsl_codes = ("1", "2", "3", "4", "5", "NA", "") -criteria = ("MANE", "Consequence", "APPRIS", "TSL") - -reduced_escape_mapping = [ - (";", "%3B"), - ("\r", "%0D"), - ("\n", "%0A"), - ("\t", "%09"), -] - -sep = re.compile("\|") -prefix = re.compile("^Consequence annotations from Ensembl VEP. Format: ") -ampersand = re.compile("&") - - -def get_value(criterion, x): - if criterion == "MANE": - return _get_value_MANE(x) - if criterion == "Consequence": - return _get_value_Consequence(x) - if criterion == "APPRIS": - return _get_value_APPRIS(x) - if criterion == "TSL": - return _get_value_TSL(x) - - -def _get_value_MANE(x): - if x: - return 0 - else: - return 1 - - -def _get_value_Consequence(x): - consequences = ampersand.split(x) - worst = None - for consequence in consequences: - try: - i = variant_classes_vep.index(consequence) - if worst is None or i < worst: - worst = i - except ValueError: - print("WARNING- unknown consequence {}".format(consequence)) - if worst is None: - return len(variant_classes_vep) - 1 - else: - return worst - - -def _get_value_APPRIS(x): - try: - return appris_codes.index(x) - except ValueError: - print("WARNING- unknown APPRIS code {}".format(x)) - return len(appris_codes) - 1 - - -def _get_value_TSL(x): - try: - return tsl_codes.index(str(x)) - except ValueError: - print("WARNING- unknown TSL code {}".format(x)) - return len(tsl_codes) - 1 - - -reader = vcfpy.Reader.from_path(inp) -header = reader.header.copy() - -if ( - not escape - and header.lines[0].key == "fileformat" - and header.lines[0].value in ("VCFv4.1", "VCFv4.2") -): - vcfpy.record.ESCAPE_MAPPING = reduced_escape_mapping - -csq = header.get_info_field_info("CSQ") -titles = sep.split(prefix.sub("", header.get_info_field_info("CSQ").description)) -csq.mapping["Number"] = 1 - -writer = vcfpy.Writer.from_path(out, header) - -for record in reader: - codes = { - "MANE": 1, - "Consequence": len(variant_classes_vep) - 1, - "APPRIS": len(appris_codes) - 1, - "TSL": len(tsl_codes) - 1, - } - chosen = None - first = None - for oneAnnotation in record.INFO["CSQ"]: - fields = sep.split(str(oneAnnotation)) - assert len(fields) == len(titles) - fields = {titles[i]: fields[i] for i in range(len(fields))} - - if not first: - first = fields - - selected = False - for criterion in criteria: - result = get_value(criterion, fields[criterion]) - if result is None or result == codes[criterion]: - continue - selected = result < codes[criterion] - break - - if selected: - for criterion in criteria: - codes[criterion] = get_value(criterion, fields[criterion]) - chosen = fields.values() - - assert first is not None - if chosen is None: - chosen = first.values() - - record.INFO["CSQ"] = "|".join(chosen) - - writer.write_record(record) - -writer.close() - -shell( - r""" -tabix {snakemake.output.vcf} - -d=$(dirname {snakemake.output.vcf}) -pushd -f=$(basename {snakemake.output.vcf}) -md5sum $f > $f.md5 -md5sum $f.tbi > $f.tbi.md5 -popd -""" -) diff --git a/snappy_wrappers/wrappers/vep/run/wrapper.py b/snappy_wrappers/wrappers/vep/run/wrapper.py index 84da509d6..de6ade9e3 100644 --- a/snappy_wrappers/wrappers/vep/run/wrapper.py +++ b/snappy_wrappers/wrappers/vep/run/wrapper.py @@ -6,9 +6,10 @@ __author__ = "Eric Blanc" __email__ = "eric.blanc@bih-charite.de" +args = getattr(snakemake.params, "args", {}) +vep_config = args["config"] + # Get shortcuts to step configuration -current_step = snakemake.config["pipeline_step"]["name"] -vep_config = snakemake.config["step_config"][current_step]["vep"] pick_order = ",".join(vep_config["pick_order"]) script_output_options = " ".join(["--" + x for x in vep_config["output_options"]]) @@ -44,7 +45,7 @@ fi) \ {script_output_options} \ --{vep_config[tx_flag]} \ - --fasta {snakemake.config[static_data_config][reference][path]} \ + --fasta {args[reference]} \ --input_file {snakemake.input.vcf} --format vcf \ --output_file {full} --vcf --compress_output bgzip tabix {full} @@ -65,7 +66,7 @@ {script_output_options} \ --pick --pick_order {pick_order} \ --{vep_config[tx_flag]} \ - --fasta {snakemake.config[static_data_config][reference][path]} \ + --fasta {args[reference]} \ --input_file {snakemake.input.vcf} --format vcf \ --output_file {snakemake.output.vcf} --vcf --compress_output bgzip tabix {snakemake.output.vcf} diff --git a/snappy_wrappers/wrappers/vep/wrapper.py b/snappy_wrappers/wrappers/vep/wrapper.py index bcd34f7fb..c273f2747 100644 --- a/snappy_wrappers/wrappers/vep/wrapper.py +++ b/snappy_wrappers/wrappers/vep/wrapper.py @@ -2,6 +2,9 @@ __author__ = "Manuel Holtgrewe " +args = getattr(snakemake.params, "args", {}) +config = args.get("config", {}) + DEF_HELPER_FUNCS = r""" compute-md5() { @@ -53,7 +56,7 @@ # Run actual tools -------------------------------------------------------------------------------- vep --verbose \ - --fasta {snakemake.config[static_data_config][reference][path]} \ + --fasta {args[reference]} \ --input_file {snakemake.input.vcf} \ --output_file {snakemake.output.vcf} \ --compress_output bgzip \ @@ -63,16 +66,16 @@ --hgvs \ --cache \ --offline \ - --{snakemake.config[step_config][variant_annotation][vep][tx_flag]} \ + --{config[tx_flag]} \ --force_overwrite \ - --buffer_size {snakemake.config[step_config][variant_annotation][vep][buffer_size]} \ - $(if [[ ! -z "{snakemake.config[step_config][variant_annotation][vep][cache_dir]}" ]]; then \ - echo --dir_cache {snakemake.config[step_config][variant_annotation][vep][cache_dir]}; \ + --buffer_size {config[buffer_size]} \ + $(if [[ ! -z "{config[cache_dir]}" ]]; then \ + echo --dir_cache {config[cache_dir]}; \ fi) \ - --cache_version {snakemake.config[step_config][variant_annotation][vep][cache_version]} \ - --assembly {snakemake.config[step_config][variant_annotation][vep][assembly]} \ - --fork {snakemake.config[step_config][variant_annotation][vep][num_threads]} \ - {snakemake.config[step_config][variant_annotation][vep][more_flags]} + --cache_version {config[cache_version]} \ + --assembly {config[assembly]} \ + --fork {config[num_threads]} \ + {config[more_flags]} tabix -f {snakemake.output.vcf} compute-md5 {snakemake.output.vcf} {snakemake.output.vcf_md5} diff --git a/snappy_wrappers/wrappers/wgs_cnv_filtration/environment.yaml b/snappy_wrappers/wrappers/wgs_cnv_filtration/environment.yaml deleted file mode 100644 index f7d919074..000000000 --- a/snappy_wrappers/wrappers/wgs_cnv_filtration/environment.yaml +++ /dev/null @@ -1,8 +0,0 @@ -channels: - - conda-forge - - bioconda - - nodefaults -dependencies: - - bcftools==1.8 - - htslib==1.8 - - bedtools==2.27.0 diff --git a/snappy_wrappers/wrappers/wgs_cnv_filtration/filter_inheritance/environment.yaml b/snappy_wrappers/wrappers/wgs_cnv_filtration/filter_inheritance/environment.yaml deleted file mode 100644 index f7d919074..000000000 --- a/snappy_wrappers/wrappers/wgs_cnv_filtration/filter_inheritance/environment.yaml +++ /dev/null @@ -1,8 +0,0 @@ -channels: - - conda-forge - - bioconda - - nodefaults -dependencies: - - bcftools==1.8 - - htslib==1.8 - - bedtools==2.27.0 diff --git a/snappy_wrappers/wrappers/wgs_cnv_filtration/filter_inheritance/wrapper.py b/snappy_wrappers/wrappers/wgs_cnv_filtration/filter_inheritance/wrapper.py deleted file mode 100644 index 7af2a013b..000000000 --- a/snappy_wrappers/wrappers/wgs_cnv_filtration/filter_inheritance/wrapper.py +++ /dev/null @@ -1,191 +0,0 @@ -# -*- coding: utf-8 -*- -"""CUBI+Snakemake wrapper for filtering WGS SV results for mode of inheritance.""" - -# TODO: works for trios, singletons but NOT FOR MORE COMPLICATED CASES -# TODO: how to make work if only one parent present? - -import os - -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -# shell.executable('/bin/bash') # XXX - -base_dir = os.path.dirname(os.path.realpath(__file__)) - -shell( - r""" -set -x - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -# Load library with helper functions. -source {base_dir}/../funcs.sh - -# Get name and number of index, father, and mother ------------------------------------------------ - -index={snakemake.wildcards.index_library} -father=$(awk '($2 == "'$index'") {{ print $3; }}' {snakemake.input.ped}) -mother=$(awk '($2 == "'$index'") {{ print $4; }}' {snakemake.input.ped}) - -index_no=$(get_index {snakemake.input.vcf} "$index") -father_no=$(get_index {snakemake.input.vcf} "$father") -mother_no=$(get_index {snakemake.input.vcf} "$mother") - -# Definition of the filtration functions ---------------------------------------------------------- - -# Dominant/de novo/recessive hom can be solved with a simple filter expression. -simple_filter() {{ - bcftools view \ - -i "$1" \ - -O z \ - -o {snakemake.output.vcf} \ - {snakemake.input.vcf} -}} - -# Recessive het. comp. in the case of only one parent -rec_hc_one_parent() {{ - # Number of parents present in pedigree - parents=0 - [[ ! -z "$1" ]] && parents+=1 - [[ ! -z "$2" ]] && parents+=1 - - # One or both parents missing, give up on being smart. - bcftools view \ - -i "(UNAFFECTED_CARRIERS == $parents) && (BACKGROUND_CARRIERS == 0)" \ - -O z \ - -o {snakemake.output.vcf} \ - {snakemake.input.vcf} -}} - -# Perform filtration for ARHC variants (2SVs affecting the same tad, one from father and one -# from mother) -rec_hc_two_parents() {{ - index_no=$1 - father_no=$2 - mother_no=$3 - - tads_bed={snakemake.config[step_config][wgs_cnv_filtration][region_beds][all_tads]} - - # Create mother SV file - exp_mother="(GT[$index_no] == \"alt\")" - exp_mother+=" && (UNAFFECTED_CARRIERS == 1)" - exp_mother+=" && (BACKGROUND_CARRIERS == 0)" - exp_mother+=" && (GT[$mother_no] == \"alt\")" - - bcftools view \ - -i "$exp_mother" \ - -O z \ - -o $TMPDIR/mother_SVs.vcf.gz \ - {snakemake.input.vcf} - - # Create father SV file - exp_father="(GT[$index_no] == \"alt\")" - exp_father+=" && (UNAFFECTED_CARRIERS == 1)" - exp_father+=" && (BACKGROUND_CARRIERS == 0)" - exp_father+=" && (GT[$father_no] == \"alt\")" - - bcftools view \ - -i "$exp_father" \ - -O z \ - -o $TMPDIR/father_SVs.vcf.gz \ - {snakemake.input.vcf} - - # Perform interval intersections to derive final AR het.comp. set of SVs. - bedtools intersect -wa -a $tads_bed -b $TMPDIR/mother_SVs.vcf.gz \ - | sort -k1,1V -k2,2n \ - | uniq \ - > $TMPDIR/mother_SVs.tads.bed - - bedtools intersect -wa -a $tads_bed -b $TMPDIR/father_SVs.vcf.gz \ - | sort -k1,1V -k2,2n \ - | uniq \ - > $TMPDIR/father_SVs.tads.bed - - sort $TMPDIR/mother_SVs.tads.bed \ - > $TMPDIR/mother_SVs.tads.sorted.bed - - sort $TMPDIR/father_SVs.tads.bed \ - > $TMPDIR/father_SVs.tads.sorted.bed - - comm -12 $TMPDIR/mother_SVs.tads.sorted.bed $TMPDIR/father_SVs.tads.sorted.bed \ - > $TMPDIR/commom_tads.bed - - sort -k1,1 -k2,2n $TMPDIR/commom_tads.bed \ - > $TMPDIR/commom_tads.sorted.bed - - bedtools intersect -header -wa -a $TMPDIR/mother_SVs.vcf.gz -b $TMPDIR/commom_tads.sorted.bed \ - | sort -k1,1V -k2,2n \ - | uniq \ - > $TMPDIR/ARHC.frommother.vcf - - bedtools intersect -header -wa -a $TMPDIR/father_SVs.vcf.gz -b $TMPDIR/commom_tads.sorted.bed \ - | sort -k1,1V -k2,2n \ - | uniq \ - > $TMPDIR/ARHC.fromfather.vcf - - bgzip $TMPDIR/ARHC.frommother.vcf - bgzip $TMPDIR/ARHC.fromfather.vcf - - tabix -f $TMPDIR/ARHC.frommother.vcf.gz - tabix -f $TMPDIR/ARHC.fromfather.vcf.gz - - bcftools concat -a -O z -o {snakemake.output.vcf} \ - $TMPDIR/ARHC.frommother.vcf.gz $TMPDIR/ARHC.fromfather.vcf.gz -}} - -# Actual filtration ------------------------------------------------------------------------------- - -## TODO: think whether to generate two files (father / mother) adapt the snakemake.wilcards.inheritance somewhere -## or one and filter for father/mother at the ARHC step - -# One key assumption here is that the variant should be exclusive to the family. - -case "{snakemake.wildcards.inheritance}" in - dominant) - # Perform filtration for variants dominantly transmitted by father or mother - filter="(BACKGROUND_CARRIERS == 0) && (GT[$index_no] == \"het\")" - if [[ -z "$father_no" ]] && [[ ! -z "$mother_no" ]]; then - filter+=" && (GT[$mother_no] == \"alt\")" - elif [[ ! -z "$father_no" ]] && [[ -z "$mother_no" ]]; then - filter+=" && (GT[$father_no] == \"alt\")" - elif [[ ! -z "$father_no" ]] && [[ ! -z "$mother_no" ]]; then - filter+=" && (((GT[$father_no] == \"alt\") && (GT[$mother_no] != \"alt\"))" - filter+=" || ((GT[$father_no] != \"alt\") && (GT[$mother_no] == \"alt\")))" - fi - - simple_filter "$filter" - ;; - de_novo) - # Perform filtration for de novo variants - simple_filter "(GT[$index_no] == \"alt\") && (UNAFFECTED_CARRIERS == 0) && (BACKGROUND_CARRIERS == 0)" - ;; - recessive_hom) - # Perform filtration for homozygous variant present in both parents - filter="(BACKGROUND_CARRIERS == 0) && (GT[$index_no] == \"alt\")" - [[ ! -z "$father_no" ]] && filter+=" && (GT[$father_no] == \"alt\")" - [[ ! -z "$mother_no" ]] && filter+=" && (GT[$mother_no] == \"alt\")" - simple_filter "$filter" - ;; - recessive_hc) - if [[ -z "$father_no" ]] || [[ -z "$mother_no" ]]; then - rec_hc_one_parent "$index_no" "$father_no" "$mother_no" - else - rec_hc_two_parents "$index_no" "$father_no" "$mother_no" - fi - ;; - *) # else, "all" - cp {snakemake.input.vcf} {snakemake.output.vcf} -esac - -tabix -f {snakemake.output.vcf} - -# Compute checksums ------------------------------------------------------------------------------- - -pushd $(dirname {snakemake.output.vcf}) -md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf}).md5 -md5sum $(basename {snakemake.output.vcf_tbi}) >$(basename {snakemake.output.vcf_tbi}).md5 -""" -) diff --git a/snappy_wrappers/wrappers/wgs_cnv_filtration/filter_quality/environment.yaml b/snappy_wrappers/wrappers/wgs_cnv_filtration/filter_quality/environment.yaml deleted file mode 100644 index f7d919074..000000000 --- a/snappy_wrappers/wrappers/wgs_cnv_filtration/filter_quality/environment.yaml +++ /dev/null @@ -1,8 +0,0 @@ -channels: - - conda-forge - - bioconda - - nodefaults -dependencies: - - bcftools==1.8 - - htslib==1.8 - - bedtools==2.27.0 diff --git a/snappy_wrappers/wrappers/wgs_cnv_filtration/filter_quality/wrapper.py b/snappy_wrappers/wrappers/wgs_cnv_filtration/filter_quality/wrapper.py deleted file mode 100644 index f6f25e81a..000000000 --- a/snappy_wrappers/wrappers/wgs_cnv_filtration/filter_quality/wrapper.py +++ /dev/null @@ -1,88 +0,0 @@ -# -*- coding: utf-8 -*- -"""CUBI+Snakemake wrapper for threshold-based filtration of WGS SV calls.""" - -# TODO: works for trios, singletons, or if only one parent available but NOT FOR MORE COMPLICATED CASES -# TODO: currently only works for ERDS+SV2 - -import os - -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -# shell.executable('/bin/bash') # XXX - -base_dir = os.path.dirname(os.path.realpath(__file__)) - -thresholds = snakemake.config["step_config"]["wgs_cnv_filtration"]["thresholds"][ - snakemake.wildcards.thresholds -] - -shell( - r""" -set -x - -# Load library with helper functions. -source {base_dir}/../funcs.sh - -# Get name and number of index, father, and mother ------------------------------------------------ - -index={snakemake.wildcards.index_library} -father=$(awk '($2 == "'$index'") {{ print $3; }}' {snakemake.input.ped}) -mother=$(awk '($2 == "'$index'") {{ print $4; }}' {snakemake.input.ped}) - -index_no=$(get_index {snakemake.input.vcf} "$index") -father_no=$(get_index {snakemake.input.vcf} "$father") -mother_no=$(get_index {snakemake.input.vcf} "$mother") - -# Actual filtration ------------------------------------------------------------------------------- - -case "{snakemake.wildcards.thresholds}" in - no_filter) - # Shortcut when we don't filter. - cp {snakemake.input.vcf} {snakemake.output.vcf} - cp {snakemake.input.vcf}.tbi {snakemake.output.vcf}.tbi - ;; - *) - # Build base filter expression for the index. - case "{thresholds[index_pass]}{thresholds[index_pass_de_novo]}" in - FalseFalse) - exp="(1 == 1)" - ;; - FalseTrue) - exp="(FORMAT/DFT[${{index_no}}:*] == \"PASS\")" - ;; - TrueFalse) - exp="(FORMAT/FT[${{index_no}}:*] == \"PASS\")" - ;; - TrueTrue) - exp="((FORMAT/FT[${{index_no}}:*] == \"PASS\")" - exp+=" && (FORMAT/DFT[${{index_no}}:*] == \"PASS\"))" - ;; - esac - # Extend filter expression if parents are present. - if [[ "{thresholds[parent_pass]}" == "True" ]] && [[ ! -z "$mother_no" ]]; then - exp+=" && (FORMAT/FT[${{mother_no}}:*] == \"PASS\")" - fi - if [[ "{thresholds[parent_pass]}" == "True" ]] && [[ ! -z "$father_no" ]]; then - exp+=" && (FORMAT/FT[${{father_no}}:*] == \"PASS\")" - fi - - # Perform filtration on the FT/DFT using the expression built above. - bcftools view \ - -i "$exp" \ - -O z \ - -o {snakemake.output.vcf} \ - {snakemake.input.vcf} - - tabix -f {snakemake.output.vcf} - ;; -esac - -# Compute checksums ------------------------------------------------------------------------------- - -pushd $(dirname {snakemake.output.vcf}) -md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf}).md5 -md5sum $(basename {snakemake.output.vcf_tbi}) >$(basename {snakemake.output.vcf_tbi}).md5 -""" -) diff --git a/snappy_wrappers/wrappers/wgs_cnv_filtration/filter_regions/environment.yaml b/snappy_wrappers/wrappers/wgs_cnv_filtration/filter_regions/environment.yaml deleted file mode 100644 index f7d919074..000000000 --- a/snappy_wrappers/wrappers/wgs_cnv_filtration/filter_regions/environment.yaml +++ /dev/null @@ -1,8 +0,0 @@ -channels: - - conda-forge - - bioconda - - nodefaults -dependencies: - - bcftools==1.8 - - htslib==1.8 - - bedtools==2.27.0 diff --git a/snappy_wrappers/wrappers/wgs_cnv_filtration/filter_regions/wrapper.py b/snappy_wrappers/wrappers/wgs_cnv_filtration/filter_regions/wrapper.py deleted file mode 100644 index 7482171ae..000000000 --- a/snappy_wrappers/wrappers/wgs_cnv_filtration/filter_regions/wrapper.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -"""CUBI+Snakemake wrapper for filtering WGS SV results for overlap with interesting regions.""" - -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -# shell.executable('/bin/bash') # XXX - -if snakemake.wildcards.regions == "whole_genome": - # We will not use path_bed below but need a value anyway. - path_bed = "/dev/null" -else: - path_bed = snakemake.config["step_config"]["wgs_cnv_filtration"]["region_beds"][ - snakemake.wildcards.regions - ] - -shell( - r""" -set -x - -case "{snakemake.wildcards.regions}" in - whole_genome) - cp {snakemake.input.vcf} {snakemake.output.vcf} - cp {snakemake.input.vcf}.tbi {snakemake.output.vcf}.tbi - ;; - *) - bedtools intersect -header -wa -a {snakemake.input.vcf} -b {path_bed} \ - | sort -k1,1V -k2,2n \ - | uniq \ - | bgzip -c \ - > {snakemake.output.vcf} - tabix -f {snakemake.output.vcf} - ;; -esac - -pushd $(dirname {snakemake.output.vcf}) -md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf}).md5 -md5sum $(basename {snakemake.output.vcf_tbi}) >$(basename {snakemake.output.vcf_tbi}).md5 -""" -) diff --git a/snappy_wrappers/wrappers/wgs_cnv_filtration/funcs.sh b/snappy_wrappers/wrappers/wgs_cnv_filtration/funcs.sh deleted file mode 100644 index 2ad825b22..000000000 --- a/snappy_wrappers/wrappers/wgs_cnv_filtration/funcs.sh +++ /dev/null @@ -1,54 +0,0 @@ -# Helper bash functions. - -# Function get_index() -------------------------------------------------------- -# -# numeric_index get_index(vcf_path, sample_name) -# -# Get (and print to stdout) the numeric index of $sample_name in the VCF file -# at $vcf_path. - -get_index() -{ - [[ "$#" -ne 2 ]] && return - - vcf=$1 - name=$2 - - pat=$( - bcftools view --header-only $vcf \ - | tail -n 1 \ - | cut -f 10- \ - | tr '\t' '|') - - set +o pipefail - bcftools view --header-only $vcf \ - | grep '^#CHROM' \ - | tr '\t' '\n' \ - | cat -n \ - | grep "\s$name$" \ - | egrep -w "$pat" \ - | awk '{ print $1 - 10; }' -} - -# Function samples_vcf_ped() -------------------------------------------------- -# -# samples samples_vcf_ped(vcf_path, ped_path) -# -# Get sample names (line by line) that are both in the PED and the VCF file. - -samples_vcf_ped() -{ - [[ "$#" -ne 2 ]] && return - - vcf=$1 - ped=$2 - - pat=$( - bcftools view --header-only $vcf \ - | tail -n 1 \ - | cut -f 10- \ - | tr '\t' '|') - - cut -f 2 $ped \ - | egrep -w "$pat" -} diff --git a/snappy_wrappers/wrappers/wgs_mei_filtration/environment.yaml b/snappy_wrappers/wrappers/wgs_mei_filtration/environment.yaml deleted file mode 120000 index f013a931d..000000000 --- a/snappy_wrappers/wrappers/wgs_mei_filtration/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../wgs_sv_filtration/environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/wgs_mei_filtration/filter_inheritance/environment.yaml b/snappy_wrappers/wrappers/wgs_mei_filtration/filter_inheritance/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/wgs_mei_filtration/filter_inheritance/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/wgs_mei_filtration/filter_inheritance/wrapper.py b/snappy_wrappers/wrappers/wgs_mei_filtration/filter_inheritance/wrapper.py deleted file mode 100644 index 902b0641f..000000000 --- a/snappy_wrappers/wrappers/wgs_mei_filtration/filter_inheritance/wrapper.py +++ /dev/null @@ -1,159 +0,0 @@ -# -*- coding: utf-8 -*- -"""CUBI+Snakemake wrapper code for inheritance filter for wgs_mei_filtration.""" - -# TODO: works for trios, singletons but NOT FOR MORE COMPLICATED CASES -# TODO: how to make work if only one parent present? -# TODO: consolidate with same filter for SV - -import os - -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -shell.executable("/bin/bash") - -base_dir = os.path.dirname(os.path.realpath(__file__)) - -shell( - r""" -set -x - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -# Load library with helper functions. -source {base_dir}/../../wgs_sv_filtration/funcs.sh - -# Get name and number of index, father, and mother. -index={snakemake.wildcards.index_library} -father=$(awk '($2 == "R13_1-N1-DNA1-WGS1") {{ print $3; }}' {snakemake.input.ped}) -mother=$(awk '($2 == "R13_1-N1-DNA1-WGS1") {{ print $4; }}' {snakemake.input.ped}) - -index_no=$(get_index {snakemake.input.vcf} "$index") -father_no=$(get_index {snakemake.input.vcf} "$father") -mother_no=$(get_index {snakemake.input.vcf} "$mother") - -# Perform the actual filtration -tads_bed={snakemake.config[step_config][wgs_mei_filtration][region_beds][all_tads]} - -## TODO: think whether to generate two files (father / mother) adapt the snakemake.wilcards.inheritance somewhere -## or one and filter for father/mother at the ARHC step - -if [[ "{snakemake.wildcards.inheritance}" == dominant ]]; then - # Perform filtration for variants dominantly transmitted by father or mother - bcftools view \ - -i '(UNAFFECTED_CARRIERS == 1) && (BACKGROUND_CARRIERS == 0)' \ - -O z \ - -o {snakemake.output.vcf} \ - {snakemake.input.vcf} -elif [[ "{snakemake.wildcards.inheritance}" == de_novo ]]; then - # Perform filtration for de novo variants - bcftools view \ - -i '(UNAFFECTED_CARRIERS == 0) && (BACKGROUND_CARRIERS == 0)' \ - -O z \ - -o {snakemake.output.vcf} \ - {snakemake.input.vcf} -elif [[ "{snakemake.wildcards.inheritance}" == recessive_hom ]]; then - # Perform filtration for homozygous variant present in both parents - bcftools view \ - -i '(UNAFFECTED_CARRIERS == 2) && (BACKGROUND_CARRIERS == 0)' \ - -O z \ - -o {snakemake.output.vcf} \ - {snakemake.input.vcf} -elif [[ "{snakemake.wildcards.inheritance}" == recessive_hc ]]; then - if [[ -z "$father_no" ]] || [[ -z "$father_no" ]]; then - # Number of parents present in pedigree - parents=0 - [[ ! -z "$father_no" ]] && parents+=1 - [[ ! -z "$mother_no" ]] && parents+=1 - - # One or both parents missing, give up on being smart. - bcftools view \ - -i "(UNAFFECTED_CARRIERS == $parents) && (BACKGROUND_CARRIERS == 0)" \ - -O z \ - -o {snakemake.output.vcf} \ - {snakemake.input.vcf} - else - # Perform filtration for ARHC variants (2MEIs affecting the same tad, one from father and one - # from mother) - - # Create mother MEI file - exp_mother="(UNAFFECTED_CARRIERS == 1)" - exp_mother+=" && (BACKGROUND_CARRIERS == 0)" - exp_mother+=" && (GT[$mother_no] != \"0/0\")" - - bcftools view \ - -i "$exp_mother" \ - -O z \ - -o $TMPDIR/mother_meis.vcf.gz \ - {snakemake.input.vcf} - - # Create father MEI file - exp_father="(UNAFFECTED_CARRIERS == 1)" - exp_father+=" && (BACKGROUND_CARRIERS == 0)" - exp_father+=" && (GT[$father_no] != \"0/0\")" - - bcftools view \ - -i "$exp_father" \ - -O z \ - -o $TMPDIR/father_meis.vcf.gz \ - {snakemake.input.vcf} - - # Perform interval intersections to derive final AR het.comp. set of MEIs. - bedtools intersect -wa -a $tads_bed -b $TMPDIR/mother_meis.vcf.gz \ - | sort -k1,1V -k2,2n \ - | uniq \ - > $TMPDIR/mother_meis.tads.bed - - bedtools intersect -wa -a $tads_bed -b $TMPDIR/father_meis.vcf.gz \ - | sort -k1,1V -k2,2n \ - | uniq \ - > $TMPDIR/father_meis.tads.bed - - sort $TMPDIR/mother_meis.tads.bed \ - > $TMPDIR/mother_meis.tads.sorted.bed - - sort $TMPDIR/father_meis.tads.bed \ - > $TMPDIR/father_meis.tads.sorted.bed - - comm -12 $TMPDIR/mother_meis.tads.sorted.bed $TMPDIR/father_meis.tads.sorted.bed \ - > $TMPDIR/commom_tads.bed - - sort -k1,1 -k2,2n $TMPDIR/commom_tads.bed \ - > $TMPDIR/commom_tads.sorted.bed - - bedtools intersect -header -wa -a $TMPDIR/mother_meis.vcf.gz -b $TMPDIR/commom_tads.sorted.bed \ - | sort -k1,1V -k2,2n \ - | uniq \ - > $TMPDIR/ARHC.frommother.vcf - - bedtools intersect -wa -a $TMPDIR/father_meis.vcf.gz -b $TMPDIR/commom_tads.sorted.bed \ - | sort -k1,1V -k2,2n \ - | uniq \ - > $TMPDIR/ARHC.fromfather.vcf - - grep ^\# $TMPDIR/ARHC.frommother.vcf \ - > $TMPDIR/ARHC.vcf - - cat $TMPDIR/ARHC.frommother.vcf $TMPDIR/ARHC.fromfather.vcf \ - | {{ grep -v ^\# || true; }} \ - | sort -k1,1V -k2,2n \ - >> $TMPDIR/ARHC.vcf - - cat $TMPDIR/ARHC.vcf \ - | bgzip -c \ - > $TMPDIR/ARHC.vcf.gz - cp $TMPDIR/ARHC.vcf.gz {snakemake.output.vcf} - fi -else # else, "all" - cp {snakemake.input.vcf} {snakemake.output.vcf} -fi - -tabix -f {snakemake.output.vcf} - -pushd $(dirname {snakemake.output.vcf}) -md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf}).md5 -md5sum $(basename {snakemake.output.vcf_tbi}) >$(basename {snakemake.output.vcf_tbi}).md5 -""" -) diff --git a/snappy_wrappers/wrappers/wgs_mei_filtration/filter_quality/environment.yaml b/snappy_wrappers/wrappers/wgs_mei_filtration/filter_quality/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/wgs_mei_filtration/filter_quality/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/wgs_mei_filtration/filter_quality/wrapper.py b/snappy_wrappers/wrappers/wgs_mei_filtration/filter_quality/wrapper.py deleted file mode 100644 index f12fa1310..000000000 --- a/snappy_wrappers/wrappers/wgs_mei_filtration/filter_quality/wrapper.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -"""CUBI+Snakemake wrapper code for quality filter for wgs_mei_filtration.""" - -# TODO: works for trios, singletons, or if only one parent available but NOT FOR MORE COMPLICATED CASES - -import os - -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -shell.executable("/bin/bash") - -base_dir = os.path.dirname(os.path.realpath(__file__)) - -shell( - r""" -set -x - -# Load library with helper functions. -source {base_dir}/../../wgs_sv_filtration/funcs.sh - -# Get name and number of index, father, and mother. -index={snakemake.wildcards.index_library} -father=$(awk '($2 == "'$index'") {{ print $3; }}' {snakemake.input.ped}) -mother=$(awk '($2 == "'$index'") {{ print $4; }}' {snakemake.input.ped}) - -index_no=$(get_index {snakemake.input.vcf} "$index") -father_no=$(get_index {snakemake.input.vcf} "$father") -mother_no=$(get_index {snakemake.input.vcf} "$mother") - -# Perform the actual filtration -lr_var={snakemake.config[step_config][wgs_mei_filtration][thresholds][conservative][lr_var]} -lr_ref={snakemake.config[step_config][wgs_mei_filtration][thresholds][conservative][lr_ref]} - -case "{snakemake.wildcards.thresholds}" in - conservative*) - # Build base filter expression for conservative case. - exp="(LR[${{index_no}}] >= $lr_var)" - - if [[ -n "$father_no" ]]; then - exp+="&& (" - exp+="(GT[$father_no] == \"alt\" && LR[$father_no] > $lr_var)" - exp+="|| (GT[$father_no] == \"ref\" && LR[$father_no] < $lr_ref)" - exp+=")" - fi - - if [[ -n "$mother_no" ]]; then - exp+="&& (" - exp+="(GT[$mother_no] == \"alt\" && LR[$mother_no] > $lr_var)" - exp+="|| (GT[$mother_no] == \"ref\" && LR[$mother_no] < $lr_ref)" - exp+=")" - fi - - bcftools view \ - -i "$exp" \ - -O z \ - -o {snakemake.output.vcf} \ - {snakemake.input.vcf} - ;; - *) - cp {snakemake.input.vcf} {snakemake.output.vcf} - ;; -esac - -tabix -f {snakemake.output.vcf} - -pushd $(dirname {snakemake.output.vcf}) -md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf}).md5 -md5sum $(basename {snakemake.output.vcf_tbi}) >$(basename {snakemake.output.vcf_tbi}).md5 -""" -) diff --git a/snappy_wrappers/wrappers/wgs_mei_filtration/filter_regions/environment.yaml b/snappy_wrappers/wrappers/wgs_mei_filtration/filter_regions/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/wgs_mei_filtration/filter_regions/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/wgs_mei_filtration/filter_regions/wrapper.py b/snappy_wrappers/wrappers/wgs_mei_filtration/filter_regions/wrapper.py deleted file mode 100644 index d5a90d10a..000000000 --- a/snappy_wrappers/wrappers/wgs_mei_filtration/filter_regions/wrapper.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -"""CUBI+Snakemake wrapper code for region filter for wgs_mei_filtration.""" - -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -shell.executable("/bin/bash") - -if snakemake.wildcards.regions == "whole_genome": - path_bed = "/dev/null" -else: - path_bed = snakemake.config["step_config"]["wgs_mei_filtration"]["region_beds"][ - snakemake.wildcards.regions - ] - -shell( - r""" -set -x - -if [[ "{snakemake.wildcards.regions}" != whole_genome ]]; then - bedtools intersect -header -wa -a {snakemake.input.vcf} -b {path_bed} \ - | sort -k1,1V -k2,2n \ - | uniq \ - | bgzip -c \ - > {snakemake.output.vcf} -else # else, "all" - cp {snakemake.input.vcf} {snakemake.output.vcf} -fi - -tabix -f {snakemake.output.vcf} - -pushd $(dirname {snakemake.output.vcf}) -md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf}).md5 -md5sum $(basename {snakemake.output.vcf_tbi}) >$(basename {snakemake.output.vcf_tbi}).md5 -""" -) diff --git a/snappy_wrappers/wrappers/wgs_sv_filtration/environment.yaml b/snappy_wrappers/wrappers/wgs_sv_filtration/environment.yaml deleted file mode 100644 index f7d919074..000000000 --- a/snappy_wrappers/wrappers/wgs_sv_filtration/environment.yaml +++ /dev/null @@ -1,8 +0,0 @@ -channels: - - conda-forge - - bioconda - - nodefaults -dependencies: - - bcftools==1.8 - - htslib==1.8 - - bedtools==2.27.0 diff --git a/snappy_wrappers/wrappers/wgs_sv_filtration/filter_inheritance/environment.yaml b/snappy_wrappers/wrappers/wgs_sv_filtration/filter_inheritance/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/wgs_sv_filtration/filter_inheritance/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/wgs_sv_filtration/filter_inheritance/wrapper.py b/snappy_wrappers/wrappers/wgs_sv_filtration/filter_inheritance/wrapper.py deleted file mode 100644 index 6364d9f23..000000000 --- a/snappy_wrappers/wrappers/wgs_sv_filtration/filter_inheritance/wrapper.py +++ /dev/null @@ -1,173 +0,0 @@ -# -*- coding: utf-8 -*- -"""CUBI+Snakemake wrapper for filtering WGS SV results for mode of inheritance.""" - -# TODO: works for trios, singletons but NOT FOR MORE COMPLICATED CASES -# TODO: how to make work if only one parent present? - -import os - -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -# shell.executable('/bin/bash') # XXX - -base_dir = os.path.dirname(os.path.realpath(__file__)) - -shell( - r""" -set -x - -export TMPDIR=$(mktemp -d) -trap "rm -rf $TMPDIR" EXIT - -# Load library with helper functions. -source {base_dir}/../funcs.sh - -# Get name and number of index, father, and mother ------------------------------------------------ - -index={snakemake.wildcards.index_library} -father=$(awk '($2 == "R13_1-N1-DNA1-WGS1") {{ print $3; }}' {snakemake.input.ped}) -mother=$(awk '($2 == "R13_1-N1-DNA1-WGS1") {{ print $4; }}' {snakemake.input.ped}) - -index_no=$(get_index {snakemake.input.vcf} "$index") -father_no=$(get_index {snakemake.input.vcf} "$father") -mother_no=$(get_index {snakemake.input.vcf} "$mother") - -# Definition of the filtration functions ---------------------------------------------------------- - -# Dominant/de novo/recessive hom can be solved with a simple filter expression. -simple_filter() {{ - bcftools view \ - -i "$1" \ - -O z \ - -o {snakemake.output.vcf} \ - {snakemake.input.vcf} -}} - -# Recessive het. comp. in the case of only one parent -rec_hc_one_parent() {{ - # Number of parents present in pedigree - parents=0 - [[ ! -z "$1" ]] && parents+=1 - [[ ! -z "$2" ]] && parents+=1 - - # One or both parents missing, give up on being smart. - bcftools view \ - -i "(UNAFFECTED_CARRIERS == $parents) && (BACKGROUND_CARRIERS == 0)" \ - -O z \ - -o {snakemake.output.vcf} \ - {snakemake.input.vcf} -}} - -# Perform filtration for ARHC variants (2SVs affecting the same tad, one from father and one -# from mother) -rec_hc_two_parents() {{ - father_no=$1 - mother_no=$2 - - tads_bed={snakemake.config[step_config][wgs_sv_filtration][region_beds][all_tads]} - - # Create mother SV file - exp_mother="(UNAFFECTED_CARRIERS == 1)" - exp_mother+=" && (BACKGROUND_CARRIERS == 0)" - exp_mother+=" && (GT[$mother_no] != \"alt\")" - - bcftools view \ - -i "$exp_mother" \ - -O z \ - -o $TMPDIR/mother_SVs.vcf.gz \ - {snakemake.input.vcf} - - # Create father SV file - exp_father="(UNAFFECTED_CARRIERS == 1)" - exp_father+=" && (BACKGROUND_CARRIERS == 0)" - exp_father+=" && (GT[$father_no] == \"alt\")" - - bcftools view \ - -i "$exp_father" \ - -O z \ - -o $TMPDIR/father_SVs.vcf.gz \ - {snakemake.input.vcf} - - # Perform interval intersections to derive final AR het.comp. set of SVs. - bedtools intersect -wa -a $tads_bed -b $TMPDIR/mother_SVs.vcf.gz \ - | sort -k1,1V -k2,2n \ - | uniq \ - > $TMPDIR/mother_SVs.tads.bed - - bedtools intersect -wa -a $tads_bed -b $TMPDIR/father_SVs.vcf.gz \ - | sort -k1,1V -k2,2n \ - | uniq \ - > $TMPDIR/father_SVs.tads.bed - - sort $TMPDIR/mother_SVs.tads.bed \ - > $TMPDIR/mother_SVs.tads.sorted.bed - - sort $TMPDIR/father_SVs.tads.bed \ - > $TMPDIR/father_SVs.tads.sorted.bed - - comm -12 $TMPDIR/mother_SVs.tads.sorted.bed $TMPDIR/father_SVs.tads.sorted.bed \ - > $TMPDIR/commom_tads.bed - - sort -k1,1 -k2,2n $TMPDIR/commom_tads.bed \ - > $TMPDIR/commom_tads.sorted.bed - - bedtools intersect -header -wa -a $TMPDIR/mother_SVs.vcf.gz -b $TMPDIR/commom_tads.sorted.bed \ - | sort -k1,1V -k2,2n \ - | uniq \ - > $TMPDIR/ARHC.frommother.vcf - - bedtools intersect -header -wa -a $TMPDIR/father_SVs.vcf.gz -b $TMPDIR/commom_tads.sorted.bed \ - | sort -k1,1V -k2,2n \ - | uniq \ - > $TMPDIR/ARHC.fromfather.vcf - - bgzip $TMPDIR/ARHC.frommother.vcf - bgzip $TMPDIR/ARHC.fromfather.vcf - - tabix -f $TMPDIR/ARHC.frommother.vcf.gz - tabix -f $TMPDIR/ARHC.fromfather.vcf.gz - - bcftools concat -a -O z -o {snakemake.output.vcf} \ - $TMPDIR/ARHC.frommother.vcf.gz $TMPDIR/ARHC.fromfather.vcf.gz -}} - -# Actual filtration ------------------------------------------------------------------------------- - -## TODO: think whether to generate two files (father / mother) adapt the snakemake.wilcards.inheritance somewhere -## or one and filter for father/mother at the ARHC step - -case "{snakemake.wildcards.inheritance}" in - dominant) - # Perform filtration for variants dominantly transmitted by father or mother - simple_filter '(UNAFFECTED_CARRIERS == 1) && (BACKGROUND_CARRIERS == 0)' - ;; - de_novo) - # Perform filtration for de novo variants - simple_filter '(UNAFFECTED_CARRIERS == 0) && (BACKGROUND_CARRIERS == 0)' - ;; - recessive_hom) - # Perform filtration for homozygous variant present in both parents - simple_filter '(UNAFFECTED_CARRIERS == 2) && (BACKGROUND_CARRIERS == 0)' - ;; - recessive_hc) - if [[ -z "$father_no" ]] || [[ -z "$mother_no" ]]; then - rec_hc_one_parent "$father_no" "$mother_no" - else - rec_hc_two_parents "$father_no" "$mother_no" - fi - ;; - *) # else, "all" - cp {snakemake.input.vcf} {snakemake.output.vcf} -esac - -tabix -f {snakemake.output.vcf} - -# Compute checksums ------------------------------------------------------------------------------- - -pushd $(dirname {snakemake.output.vcf}) -md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf}).md5 -md5sum $(basename {snakemake.output.vcf_tbi}) >$(basename {snakemake.output.vcf_tbi}).md5 -""" -) diff --git a/snappy_wrappers/wrappers/wgs_sv_filtration/filter_quality/environment.yaml b/snappy_wrappers/wrappers/wgs_sv_filtration/filter_quality/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/wgs_sv_filtration/filter_quality/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/wgs_sv_filtration/filter_quality/wrapper.py b/snappy_wrappers/wrappers/wgs_sv_filtration/filter_quality/wrapper.py deleted file mode 100644 index 3eb279770..000000000 --- a/snappy_wrappers/wrappers/wgs_sv_filtration/filter_quality/wrapper.py +++ /dev/null @@ -1,70 +0,0 @@ -# -*- coding: utf-8 -*- -"""CUBI+Snakemake wrapper for threshold-based filtration of WGS SV calls.""" - -# TODO: works for trios, singletons, or if only one parent available but NOT FOR MORE COMPLICATED CASES - -import os - -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -# shell.executable('/bin/bash') # XXX - -base_dir = os.path.dirname(os.path.realpath(__file__)) - -thresholds = snakemake.config["step_config"]["wgs_sv_filtration"]["thresholds"][ - snakemake.wildcards.thresholds -] - -shell( - r""" -set -x - -# Load library with helper functions. -source {base_dir}/../funcs.sh - -# Get name and number of index, father, and mother ------------------------------------------------ - -index={snakemake.wildcards.index_library} -father=$(awk '($2 == "'$index'") {{ print $3; }}' {snakemake.input.ped}) -mother=$(awk '($2 == "'$index'") {{ print $4; }}' {snakemake.input.ped}) - -index_no=$(get_index {snakemake.input.vcf} "$index") -father_no=$(get_index {snakemake.input.vcf} "$father") -mother_no=$(get_index {snakemake.input.vcf} "$mother") - -# Actual filtration ------------------------------------------------------------------------------- - -case "{snakemake.wildcards.thresholds}" in - no_filter) - # Shortcut when we don't filter. - cp {snakemake.input.vcf} {snakemake.output.vcf} - cp {snakemake.input.vcf}.tbi {snakemake.output.vcf}.tbi - ;; - *) - # Build base filter expression for the index. - exp="(PE_AAF[${{index_no}}] >= {thresholds[min_pe_aaf_index]})" - exp+=" || (SR_AAF[${{index_no}}] >= {thresholds[min_sr_aaf_index]})" - # Extend filter expression if parents are present. - [[ ! -z "$mother_no" ]] && exp+=" | (SR_AAF[$mother_no] >= {thresholds[min_pe_aaf_parent]})" - [[ ! -z "$father_no" ]] && exp+=" | (SR_AAF[$father_no] >= {thresholds[min_pe_aaf_parent]})" - - # Perform filtration on PE and SR AAF using the expression built above. - bcftools view \ - -i "$exp" \ - -O z \ - -o {snakemake.output.vcf} \ - {snakemake.input.vcf} - - tabix -f {snakemake.output.vcf} - ;; -esac - -# Compute checksums ------------------------------------------------------------------------------- - -pushd $(dirname {snakemake.output.vcf}) -md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf}).md5 -md5sum $(basename {snakemake.output.vcf_tbi}) >$(basename {snakemake.output.vcf_tbi}).md5 -""" -) diff --git a/snappy_wrappers/wrappers/wgs_sv_filtration/filter_regions/environment.yaml b/snappy_wrappers/wrappers/wgs_sv_filtration/filter_regions/environment.yaml deleted file mode 120000 index 2e107ac86..000000000 --- a/snappy_wrappers/wrappers/wgs_sv_filtration/filter_regions/environment.yaml +++ /dev/null @@ -1 +0,0 @@ -../environment.yaml \ No newline at end of file diff --git a/snappy_wrappers/wrappers/wgs_sv_filtration/filter_regions/wrapper.py b/snappy_wrappers/wrappers/wgs_sv_filtration/filter_regions/wrapper.py deleted file mode 100644 index fd9ce0801..000000000 --- a/snappy_wrappers/wrappers/wgs_sv_filtration/filter_regions/wrapper.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -"""CUBI+Snakemake wrapper for filtering WGS SV results for overlap with interesting regions.""" - -from snakemake import shell - -__author__ = "Manuel Holtgrewe " - -# shell.executable('/bin/bash') # XXX - -if snakemake.wildcards.regions == "whole_genome": - # We will not use path_bed below but need a value anyway. - path_bed = "/dev/null" -else: - path_bed = snakemake.config["step_config"]["wgs_sv_filtration"]["region_beds"][ - snakemake.wildcards.regions - ] - -shell( - r""" -set -x - -case "{snakemake.wildcards.regions}" in - whole_genome) - cp {snakemake.input.vcf} {snakemake.output.vcf} - cp {snakemake.input.vcf}.tbi {snakemake.output.vcf}.tbi - ;; - *) - bedtools intersect -header -wa -a {snakemake.input.vcf} -b {path_bed} \ - | sort -k1,1V -k2,2n \ - | uniq \ - | bgzip -c \ - > {snakemake.output.vcf} - tabix -f {snakemake.output.vcf} - ;; -esac - -pushd $(dirname {snakemake.output.vcf}) -md5sum $(basename {snakemake.output.vcf}) >$(basename {snakemake.output.vcf}).md5 -md5sum $(basename {snakemake.output.vcf_tbi}) >$(basename {snakemake.output.vcf_tbi}).md5 -""" -) diff --git a/snappy_wrappers/wrappers/wgs_sv_filtration/funcs.sh b/snappy_wrappers/wrappers/wgs_sv_filtration/funcs.sh deleted file mode 100644 index 2ad825b22..000000000 --- a/snappy_wrappers/wrappers/wgs_sv_filtration/funcs.sh +++ /dev/null @@ -1,54 +0,0 @@ -# Helper bash functions. - -# Function get_index() -------------------------------------------------------- -# -# numeric_index get_index(vcf_path, sample_name) -# -# Get (and print to stdout) the numeric index of $sample_name in the VCF file -# at $vcf_path. - -get_index() -{ - [[ "$#" -ne 2 ]] && return - - vcf=$1 - name=$2 - - pat=$( - bcftools view --header-only $vcf \ - | tail -n 1 \ - | cut -f 10- \ - | tr '\t' '|') - - set +o pipefail - bcftools view --header-only $vcf \ - | grep '^#CHROM' \ - | tr '\t' '\n' \ - | cat -n \ - | grep "\s$name$" \ - | egrep -w "$pat" \ - | awk '{ print $1 - 10; }' -} - -# Function samples_vcf_ped() -------------------------------------------------- -# -# samples samples_vcf_ped(vcf_path, ped_path) -# -# Get sample names (line by line) that are both in the PED and the VCF file. - -samples_vcf_ped() -{ - [[ "$#" -ne 2 ]] && return - - vcf=$1 - ped=$2 - - pat=$( - bcftools view --header-only $vcf \ - | tail -n 1 \ - | cut -f 10- \ - | tr '\t' '|') - - cut -f 2 $ped \ - | egrep -w "$pat" -} diff --git a/tests/snappy_pipeline/workflows/test_workflows_adapter_trimming.py b/tests/snappy_pipeline/workflows/test_workflows_adapter_trimming.py index 044bf0c3a..9c195ef87 100644 --- a/tests/snappy_pipeline/workflows/test_workflows_adapter_trimming.py +++ b/tests/snappy_pipeline/workflows/test_workflows_adapter_trimming.py @@ -137,6 +137,98 @@ def test_bbduk_step_part_get_args_input(adapter_trimming_workflow): }, }, }, + "config": { + "adapter_sequences": ["/path/to/adapter_sequences.fa"], + "num_threads": 8, + "interleaved": "auto", + "qin": "auto", + "copyundefined": False, + "nzo": True, + "qout": "auto", + "statscolumns": 3, + "rename": False, + "refnames": False, + "trd": False, + "ordered": False, + "gcbins": "auto", + "maxhistlen": 6000, + "histbefore": True, + "idbins": 100, + "k": 21, + "rcomp": True, + "maskmiddle": True, + "minkmerhits": 1, + "minkmerfraction": 0.0, + "mincovfraction": 0.0, + "hammingdistance": 1, + "qhdist": 0, + "editdistance": 0, + "hammingdistance2": 0, + "qhdist2": 0, + "editdistance2": 0, + "forbidn": False, + "removeifeitherbad": True, + "trimfailures": False, + "findbestmatch": False, + "skipr1": False, + "skipr2": False, + "ecco": False, + "ktrim": "r", + "kmask": "", + "maskfullycovered": False, + "ksplit": False, + "mink": 11, + "qtrim": "rl", + "trimq": 25, + "minlength": 35, + "mlf": 0, + "minavgquality": 0, + "maqb": 0, + "minbasequality": 0, + "maxns": -1, + "mcb": 0, + "ottm": False, + "tp": 0, + "tbo": False, + "strictoverlap": True, + "minoverlap": 14, + "mininsert": 40, + "tpe": False, + "forcetrimleft": 0, + "forcetrimright": 0, + "forcetrimright2": 0, + "forcetrimmod": 5, + "restrictleft": 0, + "restrictright": 0, + "mingc": 0.0, + "maxgc": 1.0, + "gcpairs": True, + "tossjunk": False, + "swift": False, + "chastityfilter": False, + "barcodefilter": "f", + "barcodes": "", + "xmin": -1, + "ymin": -1, + "xmax": -1, + "ymax": -1, + "trimpolya": 0, + "trimpolygleft": 0, + "trimpolygright": 8, + "trimpolyg": 0, + "filterpolyg": 8, + "entropy": -1.0, + "entropywindow": 50, + "entropyk": 5, + "minbasefrequency": 0.0, + "entropytrim": "f", + "entropymask": "f", + "entropymark": False, + "cardinality": False, + "cardinalityout": False, + "loglogk": 31, + "loglogbuckets": 2048, + }, } assert actual == expected @@ -210,6 +302,54 @@ def test_fastp_step_part_get_args_input(adapter_trimming_workflow): }, }, }, + "config": { + "num_threads": 4, + "trim_front1": 0, + "trim_tail1": 0, + "max_len1": 0, + "trim_front2": 0, + "trim_tail2": 0, + "max_len2": 0, + "dedup": False, + "dup_calc_accuracy": 0, + "dont_eval_duplication": True, + "trim_poly_g": True, + "poly_g_min_len": 8, + "trim_poly_x": False, + "poly_x_min_len": 10, + "cut_front": False, + "cut_tail": False, + "cut_right": False, + "cut_front_window_size": 4, + "cut_front_mean_quality": 20, + "cut_tail_window_size": 4, + "cut_tail_mean_quality": 20, + "cut_right_window_size": 4, + "cut_right_mean_quality": 20, + "disable_quality_filtering": False, + "qualified_quality_phred": 15, + "unqualified_percent_limit": 40, + "n_base_limit": 5, + "average_qual": 0, + "disable_length_filtering": False, + "length_required": 15, + "length_limit": 0, + "low_complexity_filter": False, + "complexity_threshold": 30, + "filter_by_index1": "", + "filter_by_index2": "", + "filter_by_index_threshold": 0, + "correction": False, + "overlap_len_require": 30, + "overlap_diff_limit": 5, + "overlap_diff_percent_limit": 20, + "umi": False, + "umi_loc": "", + "umi_len": 0, + "umi_prefix": "", + "umi_skip": 0, + "overrepresentation_analysis": False, + }, } assert actual == expected diff --git a/tests/snappy_pipeline/workflows/test_workflows_repeat_expansion.py b/tests/snappy_pipeline/workflows/test_workflows_repeat_expansion.py index 31f907bcf..91ddab130 100644 --- a/tests/snappy_pipeline/workflows/test_workflows_repeat_expansion.py +++ b/tests/snappy_pipeline/workflows/test_workflows_repeat_expansion.py @@ -116,7 +116,12 @@ def test_repeat_expansion_workflow_files(repeat_expansion_workflow): def test_expansionhunter_run_step_part_get_input_files(repeat_expansion_workflow): """Tests ExpansionHunterStepPart._get_input_files_run()""" # Define expected - expected = ["NGS_MAPPING/output/bwa.P001-N1-DNA1-WGS1/out/bwa.P001-N1-DNA1-WGS1.bam"] + expected = { + "bam": "NGS_MAPPING/output/bwa.P001-N1-DNA1-WGS1/out/bwa.P001-N1-DNA1-WGS1.bam", + "bai": "NGS_MAPPING/output/bwa.P001-N1-DNA1-WGS1/out/bwa.P001-N1-DNA1-WGS1.bam.bai", + "reference": "/path/to/ref.fa", + "repeat_catalog": "DUMMY", + } # Get actual wildcards = Wildcards(fromdict={"mapper": "bwa", "library_name": "P001-N1-DNA1-WGS1"}) actual = repeat_expansion_workflow.get_input_files("expansionhunter", "run")(wildcards) diff --git a/tests/snappy_pipeline/workflows/test_workflows_somatic_variant_filtration.py b/tests/snappy_pipeline/workflows/test_workflows_somatic_variant_filtration.py index f2389dd72..83342a22e 100644 --- a/tests/snappy_pipeline/workflows/test_workflows_somatic_variant_filtration.py +++ b/tests/snappy_pipeline/workflows/test_workflows_somatic_variant_filtration.py @@ -36,7 +36,8 @@ def minimal_config(): tools_somatic_variant_calling: ['mutect2'] tools_somatic_variant_annotation: ['jannovar'] filtration_schema: sets - filter_sets: {} + filter_sets: + dkfz_and_ebfilter_and_oxog: {} path_somatic_variant: "../somatic_variant_annotation" data_sets: @@ -124,6 +125,13 @@ def test_dkfz_bias_filter_step_part_get_log_file(somatic_variant_filtration_work assert actual == expected +def test_dkfz_bias_filter_step_part_get_args(somatic_variant_filtration_workflow): + """Tests DkfzBiasFilterStepPart.get_log_file()""" + expected = {"reference": "/path/to/ref.fa"} + actual = somatic_variant_filtration_workflow.get_params("dkfz_bias_filter", "run") + assert actual == expected + + def test_dkfz_bias_filter_step_part_get_resource_usage(somatic_variant_filtration_workflow): """Tests DkfzBiasFilterStepPart.get_resource()""" # Define expected @@ -227,6 +235,23 @@ def test_eb_filter_step_part_get_log_file_write_panel(somatic_variant_filtration assert actual == expected +def test_eb_filter_step_part_get_params_run(somatic_variant_filtration_workflow): + """Tests EbFilterStepPart.get_params()""" + expected = { + "reference": "/path/to/ref.fa", + "ebfilter_threshold": 2.4, + "vaf_threshold": 0.08, + "coverage_threshold": 5, + "has_annotation": True, + "shuffle_seed": 1, + "panel_of_normals_size": 25, + "min_mapq": 20, + "min_baseq": 15, + } + actual = somatic_variant_filtration_workflow.get_params("eb_filter", "run") + assert actual == expected + + def test_eb_filter_step_part_get_resource_usage(somatic_variant_filtration_workflow): """Tests EbFilterStepPart.get_resource()""" # All actions @@ -565,15 +590,19 @@ def test_one_filter_step_part_get_log_file(somatic_variant_filtration_workflow_l assert actual == expected -def test_one_filter_step_part_get_params(somatic_variant_filtration_workflow_list): +def test_one_filter_step_part_get_args(somatic_variant_filtration_workflow_list): """Tests OneFilterStepPart.get_params()""" wildcards = Wildcards(fromdict={"filter_nb": 1}) - expected = {"filter_name": "dkfz_1"} - actual = somatic_variant_filtration_workflow_list.get_params("one_dkfz", "run")(wildcards) + expected = { + "reference": "/path/to/ref.fa", + "filter_name": "dkfz_1", + } + actual = somatic_variant_filtration_workflow_list.get_args("one_dkfz", "run")(wildcards) assert actual == expected wildcards = Wildcards(fromdict={"filter_nb": 2}) expected = { + "reference": "/path/to/ref.fa", "filter_name": "ebfilter_2", "ebfilter_threshold": 2.3, "has_annotation": True, @@ -582,22 +611,22 @@ def test_one_filter_step_part_get_params(somatic_variant_filtration_workflow_lis "min_mapq": 20, "min_baseq": 15, } - actual = somatic_variant_filtration_workflow_list.get_params("one_ebfilter", "run")(wildcards) + actual = somatic_variant_filtration_workflow_list.get_args("one_ebfilter", "run")(wildcards) assert actual == expected wildcards = Wildcards(fromdict={"filter_nb": 3}) - expected = {"filter_name": "bcftools_3", "include": "include_statment"} - actual = somatic_variant_filtration_workflow_list.get_params("one_bcftools", "run")(wildcards) + expected = {"filter_name": "bcftools_3", "include": "include_statment", "exclude": ""} + actual = somatic_variant_filtration_workflow_list.get_args("one_bcftools", "run")(wildcards) assert actual == expected wildcards = Wildcards(fromdict={"filter_nb": 4}) - expected = {"filter_name": "regions_4", "exclude": "/path/to/regions.bed"} - actual = somatic_variant_filtration_workflow_list.get_params("one_regions", "run")(wildcards) + expected = {"filter_name": "regions_4", "exclude": "/path/to/regions.bed", "include": "", "path_bed": ""} + actual = somatic_variant_filtration_workflow_list.get_args("one_regions", "run")(wildcards) assert actual == expected wildcards = Wildcards(fromdict={"filter_nb": 5}) expected = {"filter_name": "protected_5", "path_bed": "/path/to/protected.bed"} - actual = somatic_variant_filtration_workflow_list.get_params("one_protected", "run")(wildcards) + actual = somatic_variant_filtration_workflow_list.get_args("one_protected", "run")(wildcards) assert actual == expected diff --git a/tests/snappy_pipeline/workflows/test_workflows_sv_calling_wgs.py b/tests/snappy_pipeline/workflows/test_workflows_sv_calling_wgs.py index 8b924b2f9..54d3ff508 100644 --- a/tests/snappy_pipeline/workflows/test_workflows_sv_calling_wgs.py +++ b/tests/snappy_pipeline/workflows/test_workflows_sv_calling_wgs.py @@ -9,7 +9,11 @@ from snappy_pipeline.workflows.sv_calling_wgs import SvCallingWgsWorkflow -from .common import get_expected_output_bcf_files_dict +from .common import ( + get_expected_output_bcf_files_dict, + get_expected_output_vcf_files_dict, + get_expected_log_files_dict, +) from .conftest import patch_module_fs __author__ = "Manuel Holtgrewe " @@ -115,6 +119,7 @@ def test_delly2_step_part_get_input_files_call(sv_calling_wgs_workflow): ngs_mapping_path = "NGS_MAPPING/output/bwa.P001-N1-DNA1-WGS1/out/" expected = { "bam": ngs_mapping_path + "bwa.P001-N1-DNA1-WGS1.bam", + "bai": ngs_mapping_path + "bwa.P001-N1-DNA1-WGS1.bam.bai", } actual = sv_calling_wgs_workflow.get_input_files("delly2", "call")(wildcards) assert actual == expected @@ -128,123 +133,126 @@ def test_delly2_step_part_get_output_files_call(sv_calling_wgs_workflow): assert actual == expected -# def test_delly2_step_part_get_log_file_call(sv_calling_wgs_workflow): -# """Tests Delly2StepPart.get_log_file() - call""" -# base_name = "work/{mapper}.delly2_call.{library_name}/log/{mapper}.delly2_call.{library_name}" -# expected = get_expected_log_files_dict(base_out=base_name) -# actual = sv_calling_wgs_workflow.get_log_file("delly2", "call") -# assert actual == expected - - -# # Tests for Delly2StepPart (merge_calls) ------------------ - - -# def test_delly2_step_part_get_input_files_merge_calls(sv_calling_wgs_workflow): -# """Tests Delly2StepPart._get_input_files_merge_calls()""" -# wildcards = Wildcards(fromdict={"mapper": "bwa", "index_ngs_library": "P001-N1-DNA1-WGS1"}) -# base_name = ( -# "work/bwa.delly2_call.P00{i}-N1-DNA1-WGS1/out/bwa.delly2_call.P00{i}-N1-DNA1-WGS1.bcf" -# ) -# expected = [base_name.format(i=i) for i in (1, 2, 3)] -# actual = sv_calling_wgs_workflow.get_input_files("delly2", "merge_calls")(wildcards) -# assert actual == expected - - -# def test_delly2_step_part_get_output_files_merge_calls(sv_calling_wgs_workflow): -# """Tests Delly2StepPart.get_output_files() - merge_calls""" -# base_name_out = ( -# r"work/{mapper,[^\.]+}.delly2_merge_calls.{index_ngs_library,[^\.]+}/out/" -# r"{mapper}.delly2_merge_calls.{index_ngs_library}" -# ) -# expected = get_expected_output_bcf_files_dict(base_out=base_name_out) -# actual = sv_calling_wgs_workflow.get_output_files("delly2", "merge_calls") -# assert actual == expected +def test_delly2_step_part_get_log_file_call(sv_calling_wgs_workflow): + """Tests Delly2StepPart.get_log_file() - call""" + base_name = "work/{mapper}.delly2_call.{library_name}/log/{mapper}.delly2_call.{library_name}.sv_calling" + expected = get_expected_log_files_dict(base_out=base_name, extended=True) + actual = sv_calling_wgs_workflow.get_log_file("delly2", "call") + assert actual == expected -# def test_delly2_step_part_get_log_file_merge_calls(sv_calling_wgs_workflow): -# """Tests Delly2StepPart.get_log_file() - merge_calls""" -# base_name = ( -# "work/{mapper}.delly2_merge_calls.{index_ngs_library}/log/" -# "{mapper}.delly2_merge_calls.{index_ngs_library}" -# ) -# expected = get_expected_log_files_dict(base_out=base_name) -# actual = sv_calling_wgs_workflow.get_log_file("delly2", "merge_calls") -# assert actual == expected +def test_delly2_step_part_get_input_files_merge_calls(sv_calling_wgs_workflow): + """Tests Delly2StepPart._get_input_files_merge_calls()""" + wildcards = Wildcards(fromdict={"mapper": "bwa", "library_name": "P001-N1-DNA1-WGS1"}) + base_name = ( + "work/bwa.delly2_call.P00{i}-N1-DNA1-WGS1/out/bwa.delly2_call.P00{i}-N1-DNA1-WGS1.bcf" + ) + expected = {"bcf": [base_name.format(i=i) for i in (1, 2, 3)]} + actual = sv_calling_wgs_workflow.get_input_files("delly2", "merge_calls")(wildcards) + assert actual == expected -# # Tests for Delly2StepPart (genotype) ------------------ +def test_delly2_step_part_get_output_files_merge_calls(sv_calling_wgs_workflow): + """Tests Delly2StepPart.get_output_files() - merge_calls""" + base_name_out = ( + r"work/{mapper}.delly2_merge_calls.{library_name}/out/" + r"{mapper}.delly2_merge_calls.{library_name}" + ) + expected = get_expected_output_bcf_files_dict(base_out=base_name_out) + actual = sv_calling_wgs_workflow.get_output_files("delly2", "merge_calls") + assert actual == expected -# def test_delly2_step_part_get_input_files_genotype(sv_calling_wgs_workflow): -# """Tests Delly2StepPart._get_input_files_genotype()""" -# wildcards = Wildcards(fromdict={"mapper": "bwa", "library_name": "P001-N1-DNA1-WGS1"}) -# actual = sv_calling_wgs_workflow.get_input_files("delly2", "genotype")(wildcards) -# expected = { -# "bai": "NGS_MAPPING/output/bwa.P001-N1-DNA1-WGS1/out/bwa.P001-N1-DNA1-WGS1.bam.bai", -# "bam": "NGS_MAPPING/output/bwa.P001-N1-DNA1-WGS1/out/bwa.P001-N1-DNA1-WGS1.bam", -# "bcf": ( -# "work/bwa.delly2_merge_calls.P001-N1-DNA1-WGS1/out/" -# "bwa.delly2_merge_calls.P001-N1-DNA1-WGS1.bcf" -# ), -# } -# assert actual == expected +def test_delly2_step_part_get_log_file_merge_calls(sv_calling_wgs_workflow): + """Tests Delly2StepPart.get_log_file() - merge_calls""" + base_name = ( + "work/{mapper}.delly2_merge_calls.{library_name}/log/" + "{mapper}.delly2_merge_calls.{library_name}.sv_calling" + ) + expected = get_expected_log_files_dict(base_out=base_name, extended=True) + actual = sv_calling_wgs_workflow.get_log_file("delly2", "merge_calls") + assert actual == expected -# def test_delly2_step_part_get_output_files_genotype(sv_calling_wgs_workflow): -# """Tests Delly2StepPart._get_output_files_genotype()""" -# base_name = ( -# "work/{mapper}.delly2_genotype.{library_name}/out/{mapper}.delly2_genotype.{library_name}" -# ) -# expected = get_expected_output_bcf_files_dict(base_out=base_name) -# actual = sv_calling_wgs_workflow.get_output_files("delly2", "genotype") -# assert actual == expected +def test_delly2_step_part_get_input_files_genotype(sv_calling_wgs_workflow): + """Tests Delly2StepPart._get_input_files_genotype()""" + wildcards = Wildcards(fromdict={"mapper": "bwa", "library_name": "P001-N1-DNA1-WGS1"}) + actual = sv_calling_wgs_workflow.get_input_files("delly2", "genotype")(wildcards) + expected = { + "bai": "NGS_MAPPING/output/bwa.P001-N1-DNA1-WGS1/out/bwa.P001-N1-DNA1-WGS1.bam.bai", + "bam": "NGS_MAPPING/output/bwa.P001-N1-DNA1-WGS1/out/bwa.P001-N1-DNA1-WGS1.bam", + "bcf": ( + "work/bwa.delly2_merge_calls.P001-N1-DNA1-WGS1/out/" + "bwa.delly2_merge_calls.P001-N1-DNA1-WGS1.bcf" + ), + } + assert actual == expected -# def test_delly2_step_part_get_log_file_genotype(sv_calling_wgs_workflow): -# """Tests Delly2StepPart.get_log_file() - genotype""" -# base_name = ( -# "work/{mapper}.delly2_genotype.{library_name}/log/{mapper}.delly2_genotype.{library_name}" -# ) -# expected = get_expected_log_files_dict(base_out=base_name) -# actual = sv_calling_wgs_workflow.get_log_file("delly2", "genotype") -# assert actual == expected +def test_delly2_step_part_get_output_files_genotype(sv_calling_wgs_workflow): + """Tests Delly2StepPart._get_output_files_genotype()""" + base_name = ( + "work/{mapper}.delly2_genotype.{library_name}/out/{mapper}.delly2_genotype.{library_name}" + ) + expected = get_expected_output_bcf_files_dict(base_out=base_name) + actual = sv_calling_wgs_workflow.get_output_files("delly2", "genotype") + assert actual == expected -# # Tests for Delly2StepPart (merge_genotypes) ------------------ +def test_delly2_step_part_get_log_file_genotype(sv_calling_wgs_workflow): + """Tests Delly2StepPart.get_log_file() - genotype""" + base_name = ( + "work/{mapper}.delly2_genotype.{library_name}/log/{mapper}.delly2_genotype.{library_name}.sv_calling" + ) + expected = get_expected_log_files_dict(base_out=base_name, extended=True) + actual = sv_calling_wgs_workflow.get_log_file("delly2", "genotype") + assert actual == expected -# def test_delly2_step_part_get_input_files_merge_genotypes(sv_calling_wgs_workflow): -# """Tests Delly2StepPart._get_input_files_merge_genotypes()""" -# wildcards = Wildcards(fromdict={"mapper": "bwa", "index_ngs_library": "P001-N1-DNA1-WGS1"}) -# base_name = ( -# "work/bwa.delly2_genotype.P00{i}-N1-DNA1-WGS1/out/" -# "bwa.delly2_genotype.P00{i}-N1-DNA1-WGS1.bcf" -# ) -# expected = [base_name.format(i=i) for i in (1, 2, 3)] -# actual = sv_calling_wgs_workflow.get_input_files("delly2", "merge_genotypes")(wildcards) -# assert actual == expected +def test_delly2_step_part_get_input_files_merge_genotypes(sv_calling_wgs_workflow): + """Tests Delly2StepPart._get_input_files_merge_genotypes()""" + wildcards = Wildcards(fromdict={"mapper": "bwa", "library_name": "P001-N1-DNA1-WGS1"}) + base_name = ( + "work/bwa.delly2_genotype.P00{i}-N1-DNA1-WGS1/out/" + "bwa.delly2_genotype.P00{i}-N1-DNA1-WGS1.bcf" + ) + expected = {"bcf": [base_name.format(i=i) for i in (1, 2, 3)]} + actual = sv_calling_wgs_workflow.get_input_files("delly2", "merge_genotypes")(wildcards) + assert actual == expected -# def test_delly2_step_part_get_output_files_merge_genotypes(sv_calling_wgs_workflow): -# """Tests Delly2StepPart._get_output_files_merge_genotypes()""" -# base_name_out = ( -# r"work/{mapper}.delly2_merge_genotypes.{index_ngs_library}/out/" -# r"{mapper}.delly2_merge_genotypes.{index_ngs_library}" -# ) -# expected = get_expected_output_bcf_files_dict(base_out=base_name_out) -# actual = sv_calling_wgs_workflow.get_output_files("delly2", "merge_genotypes") -# assert actual == expected +def test_delly2_step_part_get_output_files_merge_genotypes(sv_calling_wgs_workflow): + """Tests Delly2StepPart._get_output_files_merge_genotypes()""" + base_name_out = ( + r"work/{mapper}.delly2.{library_name}/out/{mapper}.delly2.{library_name}" + ) + vcfs = get_expected_output_vcf_files_dict(base_out=base_name_out) + output = [x.replace("work/", "output/") for x in vcfs.values()] + logs = [ + x.replace("work/", "output/") + for x in sv_calling_wgs_workflow.get_log_file("delly2", "merge_genotypes").values() + ] + expected = vcfs | {"output_links": output + logs} + actual = sv_calling_wgs_workflow.get_output_files("delly2", "merge_genotypes") + assert actual == expected -# def test_delly2_step_part_get_log_file_merge_genotypes(sv_calling_wgs_workflow): -# """Tests Delly2StepPart.get_log_file() - merge_genotypes""" -# base_name = ( -# "work/{mapper}.delly2_merge_genotypes.{index_ngs_library}/log/" -# "{mapper}.delly2_merge_genotypes.{index_ngs_library}" -# ) -# expected = get_expected_log_files_dict(base_out=base_name) -# actual = sv_calling_wgs_workflow.get_log_file("delly2", "merge_genotypes") -# assert actual == expected +def test_delly2_step_part_get_args(sv_calling_wgs_workflow): + """Tests Delly2StepPart.get_args() - all actions""" + expected = { + "genome": "/path/to/ref.fa", + "config": { + "path_exclude_tsv": None, + "map_qual": 1, + "geno_qual": 5, + "qual_tra": 20, + "mad_cutoff": 9, + "skip_libraries": [], + }, + } + for action in ("call", "merge_calls", "genotype", "merge_genotypes"): + actual = sv_calling_wgs_workflow.get_args("delly2", action) + assert actual == expected # # Global RunGcnvWgsStepPart Tests ------------------------------------------------------------------ diff --git a/uv.lock b/uv.lock deleted file mode 100644 index 2c2244641..000000000 --- a/uv.lock +++ /dev/null @@ -1,1748 +0,0 @@ -version = 1 -requires-python = ">=3.12" - -[[package]] -name = "alabaster" -version = "0.7.16" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c9/3e/13dd8e5ed9094e734ac430b5d0eb4f2bb001708a8b7856cbf8e084e001ba/alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65", size = 23776 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/32/34/d4e1c02d3bee589efb5dfa17f88ea08bdb3e3eac12bc475462aec52ed223/alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92", size = 13511 }, -] - -[[package]] -name = "altamisa" -version = "0.2.9" -source = { git = "https://github.com/bihealth/altamisa.git?rev=817dc491ff819e4c80686082bf3e5f602f1ac14c#817dc491ff819e4c80686082bf3e5f602f1ac14c" } -dependencies = [ - { name = "attrs" }, - { name = "typer" }, -] - -[[package]] -name = "annotated-types" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, -] - -[[package]] -name = "appdirs" -version = "1.4.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d7/d8/05696357e0311f5b5c316d7b95f46c669dd9c15aaeecbb48c7d0aeb88c40/appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", size = 13470 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/00/2344469e2084fb287c2e0b57b72910309874c3245463acd6cf5e3db69324/appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128", size = 9566 }, -] - -[[package]] -name = "attrs" -version = "24.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fc/0f/aafca9af9315aee06a89ffde799a10a582fe8de76c563ee80bbcdc08b3fb/attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346", size = 792678 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/21/5b6702a7f963e95456c0de2d495f67bf5fd62840ac655dc451586d23d39a/attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2", size = 63001 }, -] - -[[package]] -name = "babel" -version = "2.16.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2a/74/f1bc80f23eeba13393b7222b11d95ca3af2c1e28edca18af487137eefed9/babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316", size = 9348104 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/20/bc79bc575ba2e2a7f70e8a1155618bb1301eaa5132a8271373a6903f73f8/babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b", size = 9587599 }, -] - -[[package]] -name = "biomedsheets" -version = "0.11.7" -source = { git = "https://github.com/bihealth/biomedsheets.git?rev=4e0a8484850c39d1511036c3fe29ec0b4f9271f8#4e0a8484850c39d1511036c3fe29ec0b4f9271f8" } -dependencies = [ - { name = "jsonpath-rw" }, - { name = "jsonschema" }, - { name = "requests" }, - { name = "requests-file" }, - { name = "ruamel-yaml" }, -] - -[[package]] -name = "black" -version = "24.10.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "mypy-extensions" }, - { name = "packaging" }, - { name = "pathspec" }, - { name = "platformdirs" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d8/0d/cc2fb42b8c50d80143221515dd7e4766995bd07c56c9a3ed30baf080b6dc/black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875", size = 645813 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/90/04/bf74c71f592bcd761610bbf67e23e6a3cff824780761f536512437f1e655/black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3", size = 1644256 }, - { url = "https://files.pythonhosted.org/packages/4c/ea/a77bab4cf1887f4b2e0bce5516ea0b3ff7d04ba96af21d65024629afedb6/black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65", size = 1448534 }, - { url = "https://files.pythonhosted.org/packages/4e/3e/443ef8bc1fbda78e61f79157f303893f3fddf19ca3c8989b163eb3469a12/black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f", size = 1761892 }, - { url = "https://files.pythonhosted.org/packages/52/93/eac95ff229049a6901bc84fec6908a5124b8a0b7c26ea766b3b8a5debd22/black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8", size = 1434796 }, - { url = "https://files.pythonhosted.org/packages/d0/a0/a993f58d4ecfba035e61fca4e9f64a2ecae838fc9f33ab798c62173ed75c/black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981", size = 1643986 }, - { url = "https://files.pythonhosted.org/packages/37/d5/602d0ef5dfcace3fb4f79c436762f130abd9ee8d950fa2abdbf8bbc555e0/black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b", size = 1448085 }, - { url = "https://files.pythonhosted.org/packages/47/6d/a3a239e938960df1a662b93d6230d4f3e9b4a22982d060fc38c42f45a56b/black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2", size = 1760928 }, - { url = "https://files.pythonhosted.org/packages/dd/cf/af018e13b0eddfb434df4d9cd1b2b7892bab119f7a20123e93f6910982e8/black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b", size = 1436875 }, - { url = "https://files.pythonhosted.org/packages/8d/a7/4b27c50537ebca8bec139b872861f9d2bf501c5ec51fcf897cb924d9e264/black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d", size = 206898 }, -] - -[[package]] -name = "certifi" -version = "2024.8.30" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/ee/9b19140fe824b367c04c5e1b369942dd754c4c5462d5674002f75c4dedc1/certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9", size = 168507 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321 }, -] - -[[package]] -name = "cfgv" -version = "3.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249 }, -] - -[[package]] -name = "charset-normalizer" -version = "3.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d3/0b/4b7a70987abf9b8196845806198975b6aab4ce016632f817ad758a5aa056/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6", size = 194445 }, - { url = "https://files.pythonhosted.org/packages/50/89/354cc56cf4dd2449715bc9a0f54f3aef3dc700d2d62d1fa5bbea53b13426/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf", size = 125275 }, - { url = "https://files.pythonhosted.org/packages/fa/44/b730e2a2580110ced837ac083d8ad222343c96bb6b66e9e4e706e4d0b6df/charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db", size = 119020 }, - { url = "https://files.pythonhosted.org/packages/9d/e4/9263b8240ed9472a2ae7ddc3e516e71ef46617fe40eaa51221ccd4ad9a27/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1", size = 139128 }, - { url = "https://files.pythonhosted.org/packages/6b/e3/9f73e779315a54334240353eaea75854a9a690f3f580e4bd85d977cb2204/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03", size = 149277 }, - { url = "https://files.pythonhosted.org/packages/1a/cf/f1f50c2f295312edb8a548d3fa56a5c923b146cd3f24114d5adb7e7be558/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284", size = 142174 }, - { url = "https://files.pythonhosted.org/packages/16/92/92a76dc2ff3a12e69ba94e7e05168d37d0345fa08c87e1fe24d0c2a42223/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15", size = 143838 }, - { url = "https://files.pythonhosted.org/packages/a4/01/2117ff2b1dfc61695daf2babe4a874bca328489afa85952440b59819e9d7/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8", size = 146149 }, - { url = "https://files.pythonhosted.org/packages/f6/9b/93a332b8d25b347f6839ca0a61b7f0287b0930216994e8bf67a75d050255/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2", size = 140043 }, - { url = "https://files.pythonhosted.org/packages/ab/f6/7ac4a01adcdecbc7a7587767c776d53d369b8b971382b91211489535acf0/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719", size = 148229 }, - { url = "https://files.pythonhosted.org/packages/9d/be/5708ad18161dee7dc6a0f7e6cf3a88ea6279c3e8484844c0590e50e803ef/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631", size = 151556 }, - { url = "https://files.pythonhosted.org/packages/5a/bb/3d8bc22bacb9eb89785e83e6723f9888265f3a0de3b9ce724d66bd49884e/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b", size = 149772 }, - { url = "https://files.pythonhosted.org/packages/f7/fa/d3fc622de05a86f30beea5fc4e9ac46aead4731e73fd9055496732bcc0a4/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565", size = 144800 }, - { url = "https://files.pythonhosted.org/packages/9a/65/bdb9bc496d7d190d725e96816e20e2ae3a6fa42a5cac99c3c3d6ff884118/charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7", size = 94836 }, - { url = "https://files.pythonhosted.org/packages/3e/67/7b72b69d25b89c0b3cea583ee372c43aa24df15f0e0f8d3982c57804984b/charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9", size = 102187 }, - { url = "https://files.pythonhosted.org/packages/f3/89/68a4c86f1a0002810a27f12e9a7b22feb198c59b2f05231349fbce5c06f4/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114", size = 194617 }, - { url = "https://files.pythonhosted.org/packages/4f/cd/8947fe425e2ab0aa57aceb7807af13a0e4162cd21eee42ef5b053447edf5/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed", size = 125310 }, - { url = "https://files.pythonhosted.org/packages/5b/f0/b5263e8668a4ee9becc2b451ed909e9c27058337fda5b8c49588183c267a/charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250", size = 119126 }, - { url = "https://files.pythonhosted.org/packages/ff/6e/e445afe4f7fda27a533f3234b627b3e515a1b9429bc981c9a5e2aa5d97b6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920", size = 139342 }, - { url = "https://files.pythonhosted.org/packages/a1/b2/4af9993b532d93270538ad4926c8e37dc29f2111c36f9c629840c57cd9b3/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64", size = 149383 }, - { url = "https://files.pythonhosted.org/packages/fb/6f/4e78c3b97686b871db9be6f31d64e9264e889f8c9d7ab33c771f847f79b7/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23", size = 142214 }, - { url = "https://files.pythonhosted.org/packages/2b/c9/1c8fe3ce05d30c87eff498592c89015b19fade13df42850aafae09e94f35/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc", size = 144104 }, - { url = "https://files.pythonhosted.org/packages/ee/68/efad5dcb306bf37db7db338338e7bb8ebd8cf38ee5bbd5ceaaaa46f257e6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d", size = 146255 }, - { url = "https://files.pythonhosted.org/packages/0c/75/1ed813c3ffd200b1f3e71121c95da3f79e6d2a96120163443b3ad1057505/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88", size = 140251 }, - { url = "https://files.pythonhosted.org/packages/7d/0d/6f32255c1979653b448d3c709583557a4d24ff97ac4f3a5be156b2e6a210/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90", size = 148474 }, - { url = "https://files.pythonhosted.org/packages/ac/a0/c1b5298de4670d997101fef95b97ac440e8c8d8b4efa5a4d1ef44af82f0d/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b", size = 151849 }, - { url = "https://files.pythonhosted.org/packages/04/4f/b3961ba0c664989ba63e30595a3ed0875d6790ff26671e2aae2fdc28a399/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d", size = 149781 }, - { url = "https://files.pythonhosted.org/packages/d8/90/6af4cd042066a4adad58ae25648a12c09c879efa4849c705719ba1b23d8c/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482", size = 144970 }, - { url = "https://files.pythonhosted.org/packages/cc/67/e5e7e0cbfefc4ca79025238b43cdf8a2037854195b37d6417f3d0895c4c2/charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67", size = 94973 }, - { url = "https://files.pythonhosted.org/packages/65/97/fc9bbc54ee13d33dc54a7fcf17b26368b18505500fc01e228c27b5222d80/charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b", size = 102308 }, - { url = "https://files.pythonhosted.org/packages/bf/9b/08c0432272d77b04803958a4598a51e2a4b51c06640af8b8f0f908c18bf2/charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", size = 49446 }, -] - -[[package]] -name = "click" -version = "8.1.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "platform_system == 'Windows'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", size = 97941 }, -] - -[[package]] -name = "colorama" -version = "0.4.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, -] - -[[package]] -name = "configargparse" -version = "1.7" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/70/8a/73f1008adfad01cb923255b924b1528727b8270e67cb4ef41eabdc7d783e/ConfigArgParse-1.7.tar.gz", hash = "sha256:e7067471884de5478c58a511e529f0f9bd1c66bfef1dea90935438d6c23306d1", size = 43817 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/b3/b4ac838711fd74a2b4e6f746703cf9dd2cf5462d17dac07e349234e21b97/ConfigArgParse-1.7-py3-none-any.whl", hash = "sha256:d249da6591465c6c26df64a9f73d2536e743be2f244eb3ebe61114af2f94f86b", size = 25489 }, -] - -[[package]] -name = "connection-pool" -version = "0.0.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/df/c9b4e25dce00f6349fd28aadba7b6c3f7431cc8bd4308a158fbe57b6a22e/connection_pool-0.0.3.tar.gz", hash = "sha256:bf429e7aef65921c69b4ed48f3d48d3eac1383b05d2df91884705842d974d0dc", size = 3795 } - -[[package]] -name = "contourpy" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "numpy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/25/c2/fc7193cc5383637ff390a712e88e4ded0452c9fbcf84abe3de5ea3df1866/contourpy-1.3.1.tar.gz", hash = "sha256:dfd97abd83335045a913e3bcc4a09c0ceadbe66580cf573fe961f4a825efa699", size = 13465753 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/37/6b/175f60227d3e7f5f1549fcb374592be311293132207e451c3d7c654c25fb/contourpy-1.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0ffa84be8e0bd33410b17189f7164c3589c229ce5db85798076a3fa136d0e509", size = 271494 }, - { url = "https://files.pythonhosted.org/packages/6b/6a/7833cfae2c1e63d1d8875a50fd23371394f540ce809d7383550681a1fa64/contourpy-1.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805617228ba7e2cbbfb6c503858e626ab528ac2a32a04a2fe88ffaf6b02c32bc", size = 255444 }, - { url = "https://files.pythonhosted.org/packages/7f/b3/7859efce66eaca5c14ba7619791b084ed02d868d76b928ff56890d2d059d/contourpy-1.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade08d343436a94e633db932e7e8407fe7de8083967962b46bdfc1b0ced39454", size = 307628 }, - { url = "https://files.pythonhosted.org/packages/48/b2/011415f5e3f0a50b1e285a0bf78eb5d92a4df000553570f0851b6e309076/contourpy-1.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47734d7073fb4590b4a40122b35917cd77be5722d80683b249dac1de266aac80", size = 347271 }, - { url = "https://files.pythonhosted.org/packages/84/7d/ef19b1db0f45b151ac78c65127235239a8cf21a59d1ce8507ce03e89a30b/contourpy-1.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ba94a401342fc0f8b948e57d977557fbf4d515f03c67682dd5c6191cb2d16ec", size = 318906 }, - { url = "https://files.pythonhosted.org/packages/ba/99/6794142b90b853a9155316c8f470d2e4821fe6f086b03e372aca848227dd/contourpy-1.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efa874e87e4a647fd2e4f514d5e91c7d493697127beb95e77d2f7561f6905bd9", size = 323622 }, - { url = "https://files.pythonhosted.org/packages/3c/0f/37d2c84a900cd8eb54e105f4fa9aebd275e14e266736778bb5dccbf3bbbb/contourpy-1.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1bf98051f1045b15c87868dbaea84f92408337d4f81d0e449ee41920ea121d3b", size = 1266699 }, - { url = "https://files.pythonhosted.org/packages/3a/8a/deb5e11dc7d9cc8f0f9c8b29d4f062203f3af230ba83c30a6b161a6effc9/contourpy-1.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:61332c87493b00091423e747ea78200659dc09bdf7fd69edd5e98cef5d3e9a8d", size = 1326395 }, - { url = "https://files.pythonhosted.org/packages/1a/35/7e267ae7c13aaf12322ccc493531f1e7f2eb8fba2927b9d7a05ff615df7a/contourpy-1.3.1-cp312-cp312-win32.whl", hash = "sha256:e914a8cb05ce5c809dd0fe350cfbb4e881bde5e2a38dc04e3afe1b3e58bd158e", size = 175354 }, - { url = "https://files.pythonhosted.org/packages/a1/35/c2de8823211d07e8a79ab018ef03960716c5dff6f4d5bff5af87fd682992/contourpy-1.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:08d9d449a61cf53033612cb368f3a1b26cd7835d9b8cd326647efe43bca7568d", size = 220971 }, - { url = "https://files.pythonhosted.org/packages/9a/e7/de62050dce687c5e96f946a93546910bc67e483fe05324439e329ff36105/contourpy-1.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a761d9ccfc5e2ecd1bf05534eda382aa14c3e4f9205ba5b1684ecfe400716ef2", size = 271548 }, - { url = "https://files.pythonhosted.org/packages/78/4d/c2a09ae014ae984c6bdd29c11e74d3121b25eaa117eca0bb76340efd7e1c/contourpy-1.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:523a8ee12edfa36f6d2a49407f705a6ef4c5098de4f498619787e272de93f2d5", size = 255576 }, - { url = "https://files.pythonhosted.org/packages/ab/8a/915380ee96a5638bda80cd061ccb8e666bfdccea38d5741cb69e6dbd61fc/contourpy-1.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece6df05e2c41bd46776fbc712e0996f7c94e0d0543af1656956d150c4ca7c81", size = 306635 }, - { url = "https://files.pythonhosted.org/packages/29/5c/c83ce09375428298acd4e6582aeb68b1e0d1447f877fa993d9bf6cd3b0a0/contourpy-1.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:573abb30e0e05bf31ed067d2f82500ecfdaec15627a59d63ea2d95714790f5c2", size = 345925 }, - { url = "https://files.pythonhosted.org/packages/29/63/5b52f4a15e80c66c8078a641a3bfacd6e07106835682454647aca1afc852/contourpy-1.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fa36448e6a3a1a9a2ba23c02012c43ed88905ec80163f2ffe2421c7192a5d7", size = 318000 }, - { url = "https://files.pythonhosted.org/packages/9a/e2/30ca086c692691129849198659bf0556d72a757fe2769eb9620a27169296/contourpy-1.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ea9924d28fc5586bf0b42d15f590b10c224117e74409dd7a0be3b62b74a501c", size = 322689 }, - { url = "https://files.pythonhosted.org/packages/6b/77/f37812ef700f1f185d348394debf33f22d531e714cf6a35d13d68a7003c7/contourpy-1.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5b75aa69cb4d6f137b36f7eb2ace9280cfb60c55dc5f61c731fdf6f037f958a3", size = 1268413 }, - { url = "https://files.pythonhosted.org/packages/3f/6d/ce84e79cdd128542ebeb268f84abb4b093af78e7f8ec504676673d2675bc/contourpy-1.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:041b640d4ec01922083645a94bb3b2e777e6b626788f4095cf21abbe266413c1", size = 1326530 }, - { url = "https://files.pythonhosted.org/packages/72/22/8282f4eae20c73c89bee7a82a19c4e27af9b57bb602ecaa00713d5bdb54d/contourpy-1.3.1-cp313-cp313-win32.whl", hash = "sha256:36987a15e8ace5f58d4d5da9dca82d498c2bbb28dff6e5d04fbfcc35a9cb3a82", size = 175315 }, - { url = "https://files.pythonhosted.org/packages/e3/d5/28bca491f65312b438fbf076589dcde7f6f966b196d900777f5811b9c4e2/contourpy-1.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:a7895f46d47671fa7ceec40f31fae721da51ad34bdca0bee83e38870b1f47ffd", size = 220987 }, - { url = "https://files.pythonhosted.org/packages/2f/24/a4b285d6adaaf9746e4700932f579f1a7b6f9681109f694cfa233ae75c4e/contourpy-1.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:9ddeb796389dadcd884c7eb07bd14ef12408aaae358f0e2ae24114d797eede30", size = 285001 }, - { url = "https://files.pythonhosted.org/packages/48/1d/fb49a401b5ca4f06ccf467cd6c4f1fd65767e63c21322b29b04ec40b40b9/contourpy-1.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19c1555a6801c2f084c7ddc1c6e11f02eb6a6016ca1318dd5452ba3f613a1751", size = 268553 }, - { url = "https://files.pythonhosted.org/packages/79/1e/4aef9470d13fd029087388fae750dccb49a50c012a6c8d1d634295caa644/contourpy-1.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:841ad858cff65c2c04bf93875e384ccb82b654574a6d7f30453a04f04af71342", size = 310386 }, - { url = "https://files.pythonhosted.org/packages/b0/34/910dc706ed70153b60392b5305c708c9810d425bde12499c9184a1100888/contourpy-1.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4318af1c925fb9a4fb190559ef3eec206845f63e80fb603d47f2d6d67683901c", size = 349806 }, - { url = "https://files.pythonhosted.org/packages/31/3c/faee6a40d66d7f2a87f7102236bf4780c57990dd7f98e5ff29881b1b1344/contourpy-1.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14c102b0eab282427b662cb590f2e9340a9d91a1c297f48729431f2dcd16e14f", size = 321108 }, - { url = "https://files.pythonhosted.org/packages/17/69/390dc9b20dd4bb20585651d7316cc3054b7d4a7b4f8b710b2b698e08968d/contourpy-1.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05e806338bfeaa006acbdeba0ad681a10be63b26e1b17317bfac3c5d98f36cda", size = 327291 }, - { url = "https://files.pythonhosted.org/packages/ef/74/7030b67c4e941fe1e5424a3d988080e83568030ce0355f7c9fc556455b01/contourpy-1.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4d76d5993a34ef3df5181ba3c92fabb93f1eaa5729504fb03423fcd9f3177242", size = 1263752 }, - { url = "https://files.pythonhosted.org/packages/f0/ed/92d86f183a8615f13f6b9cbfc5d4298a509d6ce433432e21da838b4b63f4/contourpy-1.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:89785bb2a1980c1bd87f0cb1517a71cde374776a5f150936b82580ae6ead44a1", size = 1318403 }, - { url = "https://files.pythonhosted.org/packages/b3/0e/c8e4950c77dcfc897c71d61e56690a0a9df39543d2164040301b5df8e67b/contourpy-1.3.1-cp313-cp313t-win32.whl", hash = "sha256:8eb96e79b9f3dcadbad2a3891672f81cdcab7f95b27f28f1c67d75f045b6b4f1", size = 185117 }, - { url = "https://files.pythonhosted.org/packages/c1/31/1ae946f11dfbd229222e6d6ad8e7bd1891d3d48bde5fbf7a0beb9491f8e3/contourpy-1.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:287ccc248c9e0d0566934e7d606201abd74761b5703d804ff3df8935f523d546", size = 236668 }, -] - -[[package]] -name = "coverage" -version = "7.5.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ef/05/31553dc038667012853d0a248b57987d8d70b2d67ea885605f87bcb1baba/coverage-7.5.4.tar.gz", hash = "sha256:a44963520b069e12789d0faea4e9fdb1e410cdc4aab89d94f7f55cbb7fef0353", size = 793238 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6d/96/58bcb3417c2fd38fae862704599f7088451bb6c8786f5cec6887366e78d9/coverage-7.5.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:54317c2b806354cbb2dc7ac27e2b93f97096912cc16b18289c5d4e44fc663233", size = 205392 }, - { url = "https://files.pythonhosted.org/packages/2c/63/4f781db529b585a6ef3860ea01390951b006dbea9ada4ea3a3d830e325f4/coverage-7.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:042183de01f8b6d531e10c197f7f0315a61e8d805ab29c5f7b51a01d62782747", size = 205634 }, - { url = "https://files.pythonhosted.org/packages/57/50/c5aadf036078072f31d8f1ae1a6000cc70f3f6cf652939c2d77551174d77/coverage-7.5.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6bb74ed465d5fb204b2ec41d79bcd28afccf817de721e8a807d5141c3426638", size = 238754 }, - { url = "https://files.pythonhosted.org/packages/eb/a6/57c42994b1686461c7b0b29de3b6d3d60c5f23a656f96460f9c755a31506/coverage-7.5.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3d45ff86efb129c599a3b287ae2e44c1e281ae0f9a9bad0edc202179bcc3a2e", size = 235783 }, - { url = "https://files.pythonhosted.org/packages/88/52/7054710a881b09d295e93b9889ac204c241a6847a8c05555fc6e1d8799d5/coverage-7.5.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5013ed890dc917cef2c9f765c4c6a8ae9df983cd60dbb635df8ed9f4ebc9f555", size = 237865 }, - { url = "https://files.pythonhosted.org/packages/a0/c3/57ef08c70483b83feb4e0d22345010aaf0afbe442dba015da3b173076c36/coverage-7.5.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1014fbf665fef86cdfd6cb5b7371496ce35e4d2a00cda501cf9f5b9e6fced69f", size = 237340 }, - { url = "https://files.pythonhosted.org/packages/d8/44/465fa8f8edc11a18cbb83673f29b1af20ccf5139a66fbe2768ff67527ff0/coverage-7.5.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3684bc2ff328f935981847082ba4fdc950d58906a40eafa93510d1b54c08a66c", size = 235663 }, - { url = "https://files.pythonhosted.org/packages/ef/e5/829ddcfb29ad41661ba8e9cac7dc52100fd2c4853bb93d668a3ebde64862/coverage-7.5.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:581ea96f92bf71a5ec0974001f900db495488434a6928a2ca7f01eee20c23805", size = 237309 }, - { url = "https://files.pythonhosted.org/packages/98/f6/f9c96fbf9b36be3f4d8c252ab2b4944420d99425f235f492784498804182/coverage-7.5.4-cp312-cp312-win32.whl", hash = "sha256:73ca8fbc5bc622e54627314c1a6f1dfdd8db69788f3443e752c215f29fa87a0b", size = 207988 }, - { url = "https://files.pythonhosted.org/packages/0e/c1/2b7c7dcf4c273aac7676f12fb2b5524b133671d731ab91bd9a41c21675b9/coverage-7.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:cef4649ec906ea7ea5e9e796e68b987f83fa9a718514fe147f538cfeda76d7a7", size = 208756 }, -] - -[[package]] -name = "coveralls" -version = "4.0.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "coverage" }, - { name = "docopt" }, - { name = "requests" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/61/75/a454fb443eb6a053833f61603a432ffbd7dd6ae53a11159bacfadb9d6219/coveralls-4.0.1.tar.gz", hash = "sha256:7b2a0a2bcef94f295e3cf28dcc55ca40b71c77d1c2446b538e85f0f7bc21aa69", size = 12419 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/63/e5/6708c75e2a4cfca929302d4d9b53b862c6dc65bd75e6933ea3d20016d41d/coveralls-4.0.1-py3-none-any.whl", hash = "sha256:7a6b1fa9848332c7b2221afb20f3df90272ac0167060f41b5fe90429b30b1809", size = 13599 }, -] - -[[package]] -name = "cycler" -version = "0.12.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a9/95/a3dbbb5028f35eafb79008e7522a75244477d2838f38cbb722248dabc2a8/cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c", size = 7615 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321 }, -] - -[[package]] -name = "datrie" -version = "0.8.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9d/fe/db74bd405d515f06657f11ad529878fd389576dca4812bea6f98d9b31574/datrie-0.8.2.tar.gz", hash = "sha256:525b08f638d5cf6115df6ccd818e5a01298cd230b2dac91c8ff2e6499d18765d", size = 63278 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/44/02/53f0cf0bf0cd629ba6c2cc13f2f9db24323459e9c19463783d890a540a96/datrie-0.8.2-pp273-pypy_73-win32.whl", hash = "sha256:b07bd5fdfc3399a6dab86d6e35c72b1dbd598e80c97509c7c7518ab8774d3fda", size = 91292 }, -] - -[[package]] -name = "decorator" -version = "5.1.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/66/0c/8d907af351aa16b42caae42f9d6aa37b900c67308052d10fdce809f8d952/decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330", size = 35016 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186", size = 9073 }, -] - -[[package]] -name = "distlib" -version = "0.3.9" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973 }, -] - -[[package]] -name = "docopt" -version = "0.6.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/55/8f8cab2afd404cf578136ef2cc5dfb50baa1761b68c9da1fb1e4eed343c9/docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491", size = 25901 } - -[[package]] -name = "docutils" -version = "0.20.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1f/53/a5da4f2c5739cf66290fac1431ee52aff6851c7c8ffd8264f13affd7bcdd/docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b", size = 2058365 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/26/87/f238c0670b94533ac0353a4e2a1a771a0cc73277b88bff23d3ae35a256c1/docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6", size = 572666 }, -] - -[[package]] -name = "dpath" -version = "2.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b5/ce/e1fd64d36e4a5717bd5e6b2ad188f5eaa2e902fde871ea73a79875793fc9/dpath-2.2.0.tar.gz", hash = "sha256:34f7e630dc55ea3f219e555726f5da4b4b25f2200319c8e6902c394258dd6a3e", size = 28266 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/05/d1/8952806fbf9583004ab479d8f58a9496c3d35f6b6009ddd458bdd9978eaf/dpath-2.2.0-py3-none-any.whl", hash = "sha256:b330a375ded0a0d2ed404440f6c6a715deae5313af40bbb01c8a41d891900576", size = 17618 }, -] - -[[package]] -name = "fasteners" -version = "0.17.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/f4/148f44998c1bdb064a508e7cbcf9e50b34572b3d36fcc378a5d61b7dc8c5/fasteners-0.17.3.tar.gz", hash = "sha256:a9a42a208573d4074c77d041447336cf4e3c1389a256fd3e113ef59cf29b7980", size = 18014 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f6/01/274da83334c20dc1ae7a48b1ea4ae50d3571d4e6aea65bb0368f841701ad/fasteners-0.17.3-py3-none-any.whl", hash = "sha256:cae0772df265923e71435cc5057840138f4e8b6302f888a567d06ed8e1cbca03", size = 18441 }, -] - -[[package]] -name = "fastjsonschema" -version = "2.21.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8b/50/4b769ce1ac4071a1ef6d86b1a3fb56cdc3a37615e8c5519e1af96cdac366/fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4", size = 373939 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/90/2b/0817a2b257fe88725c25589d89aec060581aabf668707a8d03b2e9e0cb2a/fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667", size = 23924 }, -] - -[[package]] -name = "filelock" -version = "3.16.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163 }, -] - -[[package]] -name = "fonttools" -version = "4.55.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f4/3a/6ab28db8f90c99e6b502436fb642912b590c352d5ba83e0b22b46db209da/fonttools-4.55.2.tar.gz", hash = "sha256:45947e7b3f9673f91df125d375eb57b9a23f2a603f438a1aebf3171bffa7a205", size = 3492954 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/62/7ac990a52c2bb249e9de6de0036a24eba5a5a8e8446819ab5a5751a0a45e/fonttools-4.55.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:18f082445b8fe5e91c53e6184f4c1c73f3f965c8bcc614c6cd6effd573ce6c1a", size = 2754521 }, - { url = "https://files.pythonhosted.org/packages/4a/bd/a8034bf5d685f825cec0aca6759639277b1d3b0b1d38842b5f30edfb4176/fonttools-4.55.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c0f91adbbd706e8acd1db73e3e510118e62d0ffb651864567dccc5b2339f90", size = 2287092 }, - { url = "https://files.pythonhosted.org/packages/70/ad/edf4f4e0efdda8205893007d30d62da09f92d3f0b0f1a3faf85bd5df9952/fonttools-4.55.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d8ccce035320d63dba0c35f52499322f5531dbe85bba1514c7cea26297e4c54", size = 4782490 }, - { url = "https://files.pythonhosted.org/packages/7a/5f/f757e5860cc4f187fdf8eacf53abc92613cdbc55355e13ba07e2c937d217/fonttools-4.55.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96e126df9615df214ec7f04bebcf60076297fbc10b75c777ce58b702d7708ffb", size = 4854787 }, - { url = "https://files.pythonhosted.org/packages/92/1b/c647b89e5603f9ae9b8f14885dfaf523351eb9d0b5dcbafaf1512d0d4d97/fonttools-4.55.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:508ebb42956a7a931c4092dfa2d9b4ffd4f94cea09b8211199090d2bd082506b", size = 4763330 }, - { url = "https://files.pythonhosted.org/packages/57/09/117e2b5b2d2fcd607b360e241939a652505577c752f9ca15b2fb9e4fc540/fonttools-4.55.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c1b9de46ef7b683d50400abf9f1578eaceee271ff51c36bf4b7366f2be29f498", size = 4990999 }, - { url = "https://files.pythonhosted.org/packages/b9/e5/9be5bd4bfb83187fb83f46b9be6676f653c08a430b975e0a3355fd248c37/fonttools-4.55.2-cp312-cp312-win32.whl", hash = "sha256:2df61d9fc15199cc86dad29f64dd686874a3a52dda0c2d8597d21f509f95c332", size = 2151234 }, - { url = "https://files.pythonhosted.org/packages/f3/c5/0eda5db19bd5fe3f6b8dc30ca5be512999b4923268b9b82fd14c211217b5/fonttools-4.55.2-cp312-cp312-win_amd64.whl", hash = "sha256:d337ec087da8216a828574aa0525d869df0a2ac217a2efc1890974ddd1fbc5b9", size = 2198133 }, - { url = "https://files.pythonhosted.org/packages/2d/94/f941fa68a1d4a0f2facd5e6476ae91c5683aea7b7cc30d3ef49187cbbc67/fonttools-4.55.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:10aff204e2edee1d312fa595c06f201adf8d528a3b659cfb34cd47eceaaa6a26", size = 2741975 }, - { url = "https://files.pythonhosted.org/packages/c5/61/00015fe3ccc8171b4d4afb0fa5155064f68948138ef5e1a5ac9cb49082c4/fonttools-4.55.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:09fe922a3eff181fd07dd724cdb441fb6b9fc355fd1c0f1aa79aca60faf1fbdd", size = 2280680 }, - { url = "https://files.pythonhosted.org/packages/4e/fe/9bb6cedc47a9e3872e138e4328475d4ff4faea7d87a2316dc5e5e4cd305e/fonttools-4.55.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:487e1e8b524143a799bda0169c48b44a23a6027c1bb1957d5a172a7d3a1dd704", size = 4760147 }, - { url = "https://files.pythonhosted.org/packages/a2/3a/5bbe1b2a01f6bdf911aca48941eb317a678b50fccf63a27298289af79023/fonttools-4.55.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b1726872e09268bbedb14dc02e58b7ea31ecdd1204c6073eda4911746b44797", size = 4834697 }, - { url = "https://files.pythonhosted.org/packages/43/21/6bb77d4c90e0333db2f5059476fe2f74ad706d9117e82508756c78c7b9be/fonttools-4.55.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6fc88cfb58b0cd7b48718c3e61dd0d0a3ee8e2c86b973342967ce09fbf1db6d4", size = 4743076 }, - { url = "https://files.pythonhosted.org/packages/90/0b/21392ffe6f9ffb1eefd06363401c68815434faed22cebf00337f513ee41f/fonttools-4.55.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e857fe1859901ad8c5cab32e0eebc920adb09f413d2d73b74b677cf47b28590c", size = 4965579 }, - { url = "https://files.pythonhosted.org/packages/f6/c8/c5aed715d3816977451d2eaf4ab3aaad48e8d8a3e25d28a4d29a07b0f841/fonttools-4.55.2-cp313-cp313-win32.whl", hash = "sha256:81ccd2b3a420b8050c7d9db3be0555d71662973b3ef2a1d921a2880b58957db8", size = 2149187 }, - { url = "https://files.pythonhosted.org/packages/c4/07/36df0ee4ba78b8eb4880b8bbc0d96cc97b98d358ff4a74b469bda851f63e/fonttools-4.55.2-cp313-cp313-win_amd64.whl", hash = "sha256:d559eb1744c7dcfa90ae60cb1a4b3595e898e48f4198738c321468c01180cd83", size = 2195113 }, - { url = "https://files.pythonhosted.org/packages/69/94/c4d8dfe26a971e00e34df99b46e9518425f59918c8993830e904171e21f9/fonttools-4.55.2-py3-none-any.whl", hash = "sha256:8e2d89fbe9b08d96e22c7a81ec04a4e8d8439c31223e2dc6f2f9fc8ff14bdf9f", size = 1100792 }, -] - -[[package]] -name = "gitdb" -version = "4.0.11" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "smmap" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/19/0d/bbb5b5ee188dec84647a4664f3e11b06ade2bde568dbd489d9d64adef8ed/gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b", size = 394469 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/5b/8f0c4a5bb9fd491c277c21eff7ccae71b47d43c4446c9d0c6cff2fe8c2c4/gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4", size = 62721 }, -] - -[[package]] -name = "gitpython" -version = "3.1.43" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "gitdb" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b6/a1/106fd9fa2dd989b6fb36e5893961f82992cf676381707253e0bf93eb1662/GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c", size = 214149 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/bd/cc3a402a6439c15c3d4294333e13042b915bbeab54edc457c723931fed3f/GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff", size = 207337 }, -] - -[[package]] -name = "humanfriendly" -version = "10.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyreadline3", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794 }, -] - -[[package]] -name = "identify" -version = "2.6.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1a/5f/05f0d167be94585d502b4adf8c7af31f1dc0b1c7e14f9938a88fdbbcf4a7/identify-2.6.3.tar.gz", hash = "sha256:62f5dae9b5fef52c84cc188514e9ea4f3f636b1d8799ab5ebc475471f9e47a02", size = 99179 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c9/f5/09644a3ad803fae9eca8efa17e1f2aef380c7f0b02f7ec4e8d446e51d64a/identify-2.6.3-py2.py3-none-any.whl", hash = "sha256:9edba65473324c2ea9684b1f944fe3191db3345e50b6d04571d10ed164f8d7bd", size = 99049 }, -] - -[[package]] -name = "idna" -version = "3.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, -] - -[[package]] -name = "imagesize" -version = "1.4.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769 }, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, -] - -[[package]] -name = "jinja2" -version = "3.1.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markupsafe" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ed/55/39036716d19cab0747a5020fc7e907f362fbf48c984b14e62127f7e68e5d/jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369", size = 240245 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d", size = 133271 }, -] - -[[package]] -name = "jsonpath-rw" -version = "1.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "decorator" }, - { name = "ply" }, - { name = "six" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/71/7c/45001b1f19af8c4478489fbae4fc657b21c4c669d7a5a036a86882581d85/jsonpath-rw-1.4.0.tar.gz", hash = "sha256:05c471281c45ae113f6103d1268ec7a4831a2e96aa80de45edc89b11fac4fbec", size = 13814 } - -[[package]] -name = "jsonschema" -version = "4.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "pyrsistent" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/26/67/36cfd516f7b3560bbf7183d7a0f82bb9514d2a5f4e1d682a8a1d55d8031d/jsonschema-4.4.0.tar.gz", hash = "sha256:636694eb41b3535ed608fe04129f26542b59ed99808b4f688aa32dcf55317a83", size = 313519 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/55/b2/2c4af6a97c3f12c6d5a72b41d328c3996e14e1e46701df3fac1ed65119c9/jsonschema-4.4.0-py3-none-any.whl", hash = "sha256:77281a1f71684953ee8b3d488371b162419767973789272434bbc3f29d9c8823", size = 72687 }, -] - -[[package]] -name = "jupyter-core" -version = "5.7.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "platformdirs" }, - { name = "pywin32", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'win32'" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/00/11/b56381fa6c3f4cc5d2cf54a7dbf98ad9aa0b339ef7a601d6053538b079a7/jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9", size = 87629 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c9/fb/108ecd1fe961941959ad0ee4e12ee7b8b1477247f30b1fdfd83ceaf017f0/jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409", size = 28965 }, -] - -[[package]] -name = "kiwisolver" -version = "1.4.7" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/85/4d/2255e1c76304cbd60b48cee302b66d1dde4468dc5b1160e4b7cb43778f2a/kiwisolver-1.4.7.tar.gz", hash = "sha256:9893ff81bd7107f7b685d3017cc6583daadb4fc26e4a888350df530e41980a60", size = 97286 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/97/9c/0a11c714cf8b6ef91001c8212c4ef207f772dd84540104952c45c1f0a249/kiwisolver-1.4.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:5360cc32706dab3931f738d3079652d20982511f7c0ac5711483e6eab08efff2", size = 121808 }, - { url = "https://files.pythonhosted.org/packages/f2/d8/0fe8c5f5d35878ddd135f44f2af0e4e1d379e1c7b0716f97cdcb88d4fd27/kiwisolver-1.4.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942216596dc64ddb25adb215c3c783215b23626f8d84e8eff8d6d45c3f29f75a", size = 65531 }, - { url = "https://files.pythonhosted.org/packages/80/c5/57fa58276dfdfa612241d640a64ca2f76adc6ffcebdbd135b4ef60095098/kiwisolver-1.4.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:48b571ecd8bae15702e4f22d3ff6a0f13e54d3d00cd25216d5e7f658242065ee", size = 63894 }, - { url = "https://files.pythonhosted.org/packages/8b/e9/26d3edd4c4ad1c5b891d8747a4f81b1b0aba9fb9721de6600a4adc09773b/kiwisolver-1.4.7-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad42ba922c67c5f219097b28fae965e10045ddf145d2928bfac2eb2e17673640", size = 1369296 }, - { url = "https://files.pythonhosted.org/packages/b6/67/3f4850b5e6cffb75ec40577ddf54f7b82b15269cc5097ff2e968ee32ea7d/kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:612a10bdae23404a72941a0fc8fa2660c6ea1217c4ce0dbcab8a8f6543ea9e7f", size = 1461450 }, - { url = "https://files.pythonhosted.org/packages/52/be/86cbb9c9a315e98a8dc6b1d23c43cffd91d97d49318854f9c37b0e41cd68/kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e838bba3a3bac0fe06d849d29772eb1afb9745a59710762e4ba3f4cb8424483", size = 1579168 }, - { url = "https://files.pythonhosted.org/packages/0f/00/65061acf64bd5fd34c1f4ae53f20b43b0a017a541f242a60b135b9d1e301/kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:22f499f6157236c19f4bbbd472fa55b063db77a16cd74d49afe28992dff8c258", size = 1507308 }, - { url = "https://files.pythonhosted.org/packages/21/e4/c0b6746fd2eb62fe702118b3ca0cb384ce95e1261cfada58ff693aeec08a/kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693902d433cf585133699972b6d7c42a8b9f8f826ebcaf0132ff55200afc599e", size = 1464186 }, - { url = "https://files.pythonhosted.org/packages/0a/0f/529d0a9fffb4d514f2782c829b0b4b371f7f441d61aa55f1de1c614c4ef3/kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4e77f2126c3e0b0d055f44513ed349038ac180371ed9b52fe96a32aa071a5107", size = 2247877 }, - { url = "https://files.pythonhosted.org/packages/d1/e1/66603ad779258843036d45adcbe1af0d1a889a07af4635f8b4ec7dccda35/kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:657a05857bda581c3656bfc3b20e353c232e9193eb167766ad2dc58b56504948", size = 2404204 }, - { url = "https://files.pythonhosted.org/packages/8d/61/de5fb1ca7ad1f9ab7970e340a5b833d735df24689047de6ae71ab9d8d0e7/kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4bfa75a048c056a411f9705856abfc872558e33c055d80af6a380e3658766038", size = 2352461 }, - { url = "https://files.pythonhosted.org/packages/ba/d2/0edc00a852e369827f7e05fd008275f550353f1f9bcd55db9363d779fc63/kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:34ea1de54beef1c104422d210c47c7d2a4999bdecf42c7b5718fbe59a4cac383", size = 2501358 }, - { url = "https://files.pythonhosted.org/packages/84/15/adc15a483506aec6986c01fb7f237c3aec4d9ed4ac10b756e98a76835933/kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:90da3b5f694b85231cf93586dad5e90e2d71b9428f9aad96952c99055582f520", size = 2314119 }, - { url = "https://files.pythonhosted.org/packages/36/08/3a5bb2c53c89660863a5aa1ee236912269f2af8762af04a2e11df851d7b2/kiwisolver-1.4.7-cp312-cp312-win32.whl", hash = "sha256:18e0cca3e008e17fe9b164b55735a325140a5a35faad8de92dd80265cd5eb80b", size = 46367 }, - { url = "https://files.pythonhosted.org/packages/19/93/c05f0a6d825c643779fc3c70876bff1ac221f0e31e6f701f0e9578690d70/kiwisolver-1.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:58cb20602b18f86f83a5c87d3ee1c766a79c0d452f8def86d925e6c60fbf7bfb", size = 55884 }, - { url = "https://files.pythonhosted.org/packages/d2/f9/3828d8f21b6de4279f0667fb50a9f5215e6fe57d5ec0d61905914f5b6099/kiwisolver-1.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:f5a8b53bdc0b3961f8b6125e198617c40aeed638b387913bf1ce78afb1b0be2a", size = 48528 }, - { url = "https://files.pythonhosted.org/packages/c4/06/7da99b04259b0f18b557a4effd1b9c901a747f7fdd84cf834ccf520cb0b2/kiwisolver-1.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2e6039dcbe79a8e0f044f1c39db1986a1b8071051efba3ee4d74f5b365f5226e", size = 121913 }, - { url = "https://files.pythonhosted.org/packages/97/f5/b8a370d1aa593c17882af0a6f6755aaecd643640c0ed72dcfd2eafc388b9/kiwisolver-1.4.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a1ecf0ac1c518487d9d23b1cd7139a6a65bc460cd101ab01f1be82ecf09794b6", size = 65627 }, - { url = "https://files.pythonhosted.org/packages/2a/fc/6c0374f7503522539e2d4d1b497f5ebad3f8ed07ab51aed2af988dd0fb65/kiwisolver-1.4.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7ab9ccab2b5bd5702ab0803676a580fffa2aa178c2badc5557a84cc943fcf750", size = 63888 }, - { url = "https://files.pythonhosted.org/packages/bf/3e/0b7172793d0f41cae5c923492da89a2ffcd1adf764c16159ca047463ebd3/kiwisolver-1.4.7-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f816dd2277f8d63d79f9c8473a79fe54047bc0467754962840782c575522224d", size = 1369145 }, - { url = "https://files.pythonhosted.org/packages/77/92/47d050d6f6aced2d634258123f2688fbfef8ded3c5baf2c79d94d91f1f58/kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf8bcc23ceb5a1b624572a1623b9f79d2c3b337c8c455405ef231933a10da379", size = 1461448 }, - { url = "https://files.pythonhosted.org/packages/9c/1b/8f80b18e20b3b294546a1adb41701e79ae21915f4175f311a90d042301cf/kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dea0bf229319828467d7fca8c7c189780aa9ff679c94539eed7532ebe33ed37c", size = 1578750 }, - { url = "https://files.pythonhosted.org/packages/a4/fe/fe8e72f3be0a844f257cadd72689c0848c6d5c51bc1d60429e2d14ad776e/kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c06a4c7cf15ec739ce0e5971b26c93638730090add60e183530d70848ebdd34", size = 1507175 }, - { url = "https://files.pythonhosted.org/packages/39/fa/cdc0b6105d90eadc3bee525fecc9179e2b41e1ce0293caaf49cb631a6aaf/kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:913983ad2deb14e66d83c28b632fd35ba2b825031f2fa4ca29675e665dfecbe1", size = 1463963 }, - { url = "https://files.pythonhosted.org/packages/6e/5c/0c03c4e542720c6177d4f408e56d1c8315899db72d46261a4e15b8b33a41/kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5337ec7809bcd0f424c6b705ecf97941c46279cf5ed92311782c7c9c2026f07f", size = 2248220 }, - { url = "https://files.pythonhosted.org/packages/3d/ee/55ef86d5a574f4e767df7da3a3a7ff4954c996e12d4fbe9c408170cd7dcc/kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c26ed10c4f6fa6ddb329a5120ba3b6db349ca192ae211e882970bfc9d91420b", size = 2404463 }, - { url = "https://files.pythonhosted.org/packages/0f/6d/73ad36170b4bff4825dc588acf4f3e6319cb97cd1fb3eb04d9faa6b6f212/kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c619b101e6de2222c1fcb0531e1b17bbffbe54294bfba43ea0d411d428618c27", size = 2352842 }, - { url = "https://files.pythonhosted.org/packages/0b/16/fa531ff9199d3b6473bb4d0f47416cdb08d556c03b8bc1cccf04e756b56d/kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:073a36c8273647592ea332e816e75ef8da5c303236ec0167196793eb1e34657a", size = 2501635 }, - { url = "https://files.pythonhosted.org/packages/78/7e/aa9422e78419db0cbe75fb86d8e72b433818f2e62e2e394992d23d23a583/kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3ce6b2b0231bda412463e152fc18335ba32faf4e8c23a754ad50ffa70e4091ee", size = 2314556 }, - { url = "https://files.pythonhosted.org/packages/a8/b2/15f7f556df0a6e5b3772a1e076a9d9f6c538ce5f05bd590eca8106508e06/kiwisolver-1.4.7-cp313-cp313-win32.whl", hash = "sha256:f4c9aee212bc89d4e13f58be11a56cc8036cabad119259d12ace14b34476fd07", size = 46364 }, - { url = "https://files.pythonhosted.org/packages/0b/db/32e897e43a330eee8e4770bfd2737a9584b23e33587a0812b8e20aac38f7/kiwisolver-1.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:8a3ec5aa8e38fc4c8af308917ce12c536f1c88452ce554027e55b22cbbfbff76", size = 55887 }, - { url = "https://files.pythonhosted.org/packages/c8/a4/df2bdca5270ca85fd25253049eb6708d4127be2ed0e5c2650217450b59e9/kiwisolver-1.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:76c8094ac20ec259471ac53e774623eb62e6e1f56cd8690c67ce6ce4fcb05650", size = 48530 }, -] - -[[package]] -name = "logzero" -version = "1.7.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/bc/9a/018883ee64df0900bde1ac314868f81d12cbc450e51b216ab55e6e4dfc7d/logzero-1.7.0.tar.gz", hash = "sha256:7f73ddd3ae393457236f081ffebd044a3aa2e423a47ae6ddb5179ab90d0ad082", size = 577803 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/68/aa714515d65090fcbcc9a1f3debd5a644b14aad11e59238f42f00bd4b298/logzero-1.7.0-py2.py3-none-any.whl", hash = "sha256:23eb1f717a2736f9ab91ca0d43160fd2c996ad49ae6bad34652d47aba908769d", size = 16162 }, -] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mdurl" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 }, -] - -[[package]] -name = "markupsafe" -version = "3.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 }, - { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 }, - { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 }, - { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 }, - { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 }, - { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 }, - { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 }, - { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 }, - { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 }, - { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 }, - { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 }, - { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 }, - { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 }, - { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 }, - { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 }, - { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 }, - { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 }, - { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 }, - { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 }, - { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 }, - { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 }, - { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 }, - { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 }, - { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 }, - { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 }, - { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 }, - { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 }, - { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 }, - { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 }, - { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 }, -] - -[[package]] -name = "matplotlib" -version = "3.8.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "contourpy" }, - { name = "cycler" }, - { name = "fonttools" }, - { name = "kiwisolver" }, - { name = "numpy" }, - { name = "packaging" }, - { name = "pillow" }, - { name = "pyparsing" }, - { name = "python-dateutil" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/38/4f/8487737a74d8be4ab5fbe6019b0fae305c1604cf7209500969b879b5f462/matplotlib-3.8.4.tar.gz", hash = "sha256:8aac397d5e9ec158960e31c381c5ffc52ddd52bd9a47717e2a694038167dffea", size = 35934425 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/91/eb/65f3bd78ce757dadd455c220273349428384b162485cd8aa380b61a867ed/matplotlib-3.8.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6485ac1f2e84676cff22e693eaa4fbed50ef5dc37173ce1f023daef4687df616", size = 7604083 }, - { url = "https://files.pythonhosted.org/packages/da/2b/2bb6073ca8d336da07ace7d98bf7bb9da8233f55876bb3db6a5ee924f3e9/matplotlib-3.8.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c89ee9314ef48c72fe92ce55c4e95f2f39d70208f9f1d9db4e64079420d8d732", size = 7496013 }, - { url = "https://files.pythonhosted.org/packages/61/cd/976d3a9c10328da1d2fe183f7c92c45f1e125536226a6eb3a820c4753cd1/matplotlib-3.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50bac6e4d77e4262c4340d7a985c30912054745ec99756ce213bfbc3cb3808eb", size = 11376749 }, - { url = "https://files.pythonhosted.org/packages/cd/ba/412149958e951876096198609b958b90a8a2c9bc07a96eeeaa9e2c480f30/matplotlib-3.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f51c4c869d4b60d769f7b4406eec39596648d9d70246428745a681c327a8ad30", size = 11600837 }, - { url = "https://files.pythonhosted.org/packages/dc/4f/e5b56ca109d8ab6bae37f519f15b891fc18809ddb8bc1aa26e0bfca83e25/matplotlib-3.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b12ba985837e4899b762b81f5b2845bd1a28f4fdd1a126d9ace64e9c4eb2fb25", size = 9538883 }, - { url = "https://files.pythonhosted.org/packages/7d/ca/e7bd1876a341ed8c456095962a582696cac1691cb6e55bd5ead15a755c5d/matplotlib-3.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:7a6769f58ce51791b4cb8b4d7642489df347697cd3e23d88266aaaee93b41d9a", size = 7659712 }, -] - -[[package]] -name = "mdurl" -version = "0.1.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, -] - -[[package]] -name = "mistune" -version = "3.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ef/c8/f0173fe3bf85fd891aee2e7bcd8207dfe26c2c683d727c5a6cc3aec7b628/mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8", size = 90840 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/74/c95adcdf032956d9ef6c89a9b8a5152bf73915f8c633f3e3d88d06bd699c/mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205", size = 47958 }, -] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, -] - -[[package]] -name = "nbformat" -version = "5.10.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "fastjsonschema" }, - { name = "jsonschema" }, - { name = "jupyter-core" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6d/fd/91545e604bc3dad7dca9ed03284086039b294c6b3d75c0d2fa45f9e9caf3/nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a", size = 142749 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/82/0340caa499416c78e5d8f5f05947ae4bc3cba53c9f038ab6e9ed964e22f1/nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b", size = 78454 }, -] - -[[package]] -name = "nodeenv" -version = "1.9.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, -] - -[[package]] -name = "numpy" -version = "2.1.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/25/ca/1166b75c21abd1da445b97bf1fa2f14f423c6cfb4fc7c4ef31dccf9f6a94/numpy-2.1.3.tar.gz", hash = "sha256:aa08e04e08aaf974d4458def539dece0d28146d866a39da5639596f4921fd761", size = 20166090 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/f0/385eb9970309643cbca4fc6eebc8bb16e560de129c91258dfaa18498da8b/numpy-2.1.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f55ba01150f52b1027829b50d70ef1dafd9821ea82905b63936668403c3b471e", size = 20849658 }, - { url = "https://files.pythonhosted.org/packages/54/4a/765b4607f0fecbb239638d610d04ec0a0ded9b4951c56dc68cef79026abf/numpy-2.1.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13138eadd4f4da03074851a698ffa7e405f41a0845a6b1ad135b81596e4e9958", size = 13492258 }, - { url = "https://files.pythonhosted.org/packages/bd/a7/2332679479c70b68dccbf4a8eb9c9b5ee383164b161bee9284ac141fbd33/numpy-2.1.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:a6b46587b14b888e95e4a24d7b13ae91fa22386c199ee7b418f449032b2fa3b8", size = 5090249 }, - { url = "https://files.pythonhosted.org/packages/c1/67/4aa00316b3b981a822c7a239d3a8135be2a6945d1fd11d0efb25d361711a/numpy-2.1.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:0fa14563cc46422e99daef53d725d0c326e99e468a9320a240affffe87852564", size = 6621704 }, - { url = "https://files.pythonhosted.org/packages/5e/da/1a429ae58b3b6c364eeec93bf044c532f2ff7b48a52e41050896cf15d5b1/numpy-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8637dcd2caa676e475503d1f8fdb327bc495554e10838019651b76d17b98e512", size = 13606089 }, - { url = "https://files.pythonhosted.org/packages/9e/3e/3757f304c704f2f0294a6b8340fcf2be244038be07da4cccf390fa678a9f/numpy-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2312b2aa89e1f43ecea6da6ea9a810d06aae08321609d8dc0d0eda6d946a541b", size = 16043185 }, - { url = "https://files.pythonhosted.org/packages/43/97/75329c28fea3113d00c8d2daf9bc5828d58d78ed661d8e05e234f86f0f6d/numpy-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a38c19106902bb19351b83802531fea19dee18e5b37b36454f27f11ff956f7fc", size = 16410751 }, - { url = "https://files.pythonhosted.org/packages/ad/7a/442965e98b34e0ae9da319f075b387bcb9a1e0658276cc63adb8c9686f7b/numpy-2.1.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02135ade8b8a84011cbb67dc44e07c58f28575cf9ecf8ab304e51c05528c19f0", size = 14082705 }, - { url = "https://files.pythonhosted.org/packages/ac/b6/26108cf2cfa5c7e03fb969b595c93131eab4a399762b51ce9ebec2332e80/numpy-2.1.3-cp312-cp312-win32.whl", hash = "sha256:e6988e90fcf617da2b5c78902fe8e668361b43b4fe26dbf2d7b0f8034d4cafb9", size = 6239077 }, - { url = "https://files.pythonhosted.org/packages/a6/84/fa11dad3404b7634aaab50733581ce11e5350383311ea7a7010f464c0170/numpy-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:0d30c543f02e84e92c4b1f415b7c6b5326cbe45ee7882b6b77db7195fb971e3a", size = 12566858 }, - { url = "https://files.pythonhosted.org/packages/4d/0b/620591441457e25f3404c8057eb924d04f161244cb8a3680d529419aa86e/numpy-2.1.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96fe52fcdb9345b7cd82ecd34547fca4321f7656d500eca497eb7ea5a926692f", size = 20836263 }, - { url = "https://files.pythonhosted.org/packages/45/e1/210b2d8b31ce9119145433e6ea78046e30771de3fe353f313b2778142f34/numpy-2.1.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f653490b33e9c3a4c1c01d41bc2aef08f9475af51146e4a7710c450cf9761598", size = 13507771 }, - { url = "https://files.pythonhosted.org/packages/55/44/aa9ee3caee02fa5a45f2c3b95cafe59c44e4b278fbbf895a93e88b308555/numpy-2.1.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:dc258a761a16daa791081d026f0ed4399b582712e6fc887a95af09df10c5ca57", size = 5075805 }, - { url = "https://files.pythonhosted.org/packages/78/d6/61de6e7e31915ba4d87bbe1ae859e83e6582ea14c6add07c8f7eefd8488f/numpy-2.1.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:016d0f6f5e77b0f0d45d77387ffa4bb89816b57c835580c3ce8e099ef830befe", size = 6608380 }, - { url = "https://files.pythonhosted.org/packages/3e/46/48bdf9b7241e317e6cf94276fe11ba673c06d1fdf115d8b4ebf616affd1a/numpy-2.1.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c181ba05ce8299c7aa3125c27b9c2167bca4a4445b7ce73d5febc411ca692e43", size = 13602451 }, - { url = "https://files.pythonhosted.org/packages/70/50/73f9a5aa0810cdccda9c1d20be3cbe4a4d6ea6bfd6931464a44c95eef731/numpy-2.1.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5641516794ca9e5f8a4d17bb45446998c6554704d888f86df9b200e66bdcce56", size = 16039822 }, - { url = "https://files.pythonhosted.org/packages/ad/cd/098bc1d5a5bc5307cfc65ee9369d0ca658ed88fbd7307b0d49fab6ca5fa5/numpy-2.1.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ea4dedd6e394a9c180b33c2c872b92f7ce0f8e7ad93e9585312b0c5a04777a4a", size = 16411822 }, - { url = "https://files.pythonhosted.org/packages/83/a2/7d4467a2a6d984549053b37945620209e702cf96a8bc658bc04bba13c9e2/numpy-2.1.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0df3635b9c8ef48bd3be5f862cf71b0a4716fa0e702155c45067c6b711ddcef", size = 14079598 }, - { url = "https://files.pythonhosted.org/packages/e9/6a/d64514dcecb2ee70bfdfad10c42b76cab657e7ee31944ff7a600f141d9e9/numpy-2.1.3-cp313-cp313-win32.whl", hash = "sha256:50ca6aba6e163363f132b5c101ba078b8cbd3fa92c7865fd7d4d62d9779ac29f", size = 6236021 }, - { url = "https://files.pythonhosted.org/packages/bb/f9/12297ed8d8301a401e7d8eb6b418d32547f1d700ed3c038d325a605421a4/numpy-2.1.3-cp313-cp313-win_amd64.whl", hash = "sha256:747641635d3d44bcb380d950679462fae44f54b131be347d5ec2bce47d3df9ed", size = 12560405 }, - { url = "https://files.pythonhosted.org/packages/a7/45/7f9244cd792e163b334e3a7f02dff1239d2890b6f37ebf9e82cbe17debc0/numpy-2.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:996bb9399059c5b82f76b53ff8bb686069c05acc94656bb259b1d63d04a9506f", size = 20859062 }, - { url = "https://files.pythonhosted.org/packages/b1/b4/a084218e7e92b506d634105b13e27a3a6645312b93e1c699cc9025adb0e1/numpy-2.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:45966d859916ad02b779706bb43b954281db43e185015df6eb3323120188f9e4", size = 13515839 }, - { url = "https://files.pythonhosted.org/packages/27/45/58ed3f88028dcf80e6ea580311dc3edefdd94248f5770deb980500ef85dd/numpy-2.1.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:baed7e8d7481bfe0874b566850cb0b85243e982388b7b23348c6db2ee2b2ae8e", size = 5116031 }, - { url = "https://files.pythonhosted.org/packages/37/a8/eb689432eb977d83229094b58b0f53249d2209742f7de529c49d61a124a0/numpy-2.1.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:a9f7f672a3388133335589cfca93ed468509cb7b93ba3105fce780d04a6576a0", size = 6629977 }, - { url = "https://files.pythonhosted.org/packages/42/a3/5355ad51ac73c23334c7caaed01adadfda49544f646fcbfbb4331deb267b/numpy-2.1.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7aac50327da5d208db2eec22eb11e491e3fe13d22653dce51b0f4109101b408", size = 13575951 }, - { url = "https://files.pythonhosted.org/packages/c4/70/ea9646d203104e647988cb7d7279f135257a6b7e3354ea6c56f8bafdb095/numpy-2.1.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4394bc0dbd074b7f9b52024832d16e019decebf86caf909d94f6b3f77a8ee3b6", size = 16022655 }, - { url = "https://files.pythonhosted.org/packages/14/ce/7fc0612903e91ff9d0b3f2eda4e18ef9904814afcae5b0f08edb7f637883/numpy-2.1.3-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:50d18c4358a0a8a53f12a8ba9d772ab2d460321e6a93d6064fc22443d189853f", size = 16399902 }, - { url = "https://files.pythonhosted.org/packages/ef/62/1d3204313357591c913c32132a28f09a26357e33ea3c4e2fe81269e0dca1/numpy-2.1.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:14e253bd43fc6b37af4921b10f6add6925878a42a0c5fe83daee390bca80bc17", size = 14067180 }, - { url = "https://files.pythonhosted.org/packages/24/d7/78a40ed1d80e23a774cb8a34ae8a9493ba1b4271dde96e56ccdbab1620ef/numpy-2.1.3-cp313-cp313t-win32.whl", hash = "sha256:08788d27a5fd867a663f6fc753fd7c3ad7e92747efc73c53bca2f19f8bc06f48", size = 6291907 }, - { url = "https://files.pythonhosted.org/packages/86/09/a5ab407bd7f5f5599e6a9261f964ace03a73e7c6928de906981c31c38082/numpy-2.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:2564fbdf2b99b3f815f2107c1bbc93e2de8ee655a69c261363a1172a79a257d4", size = 12644098 }, -] - -[[package]] -name = "packaging" -version = "24.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, -] - -[[package]] -name = "pathspec" -version = "0.12.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 }, -] - -[[package]] -name = "pillow" -version = "11.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a5/26/0d95c04c868f6bdb0c447e3ee2de5564411845e36a858cfd63766bc7b563/pillow-11.0.0.tar.gz", hash = "sha256:72bacbaf24ac003fea9bff9837d1eedb6088758d41e100c1552930151f677739", size = 46737780 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1c/a3/26e606ff0b2daaf120543e537311fa3ae2eb6bf061490e4fea51771540be/pillow-11.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2c0a187a92a1cb5ef2c8ed5412dd8d4334272617f532d4ad4de31e0495bd923", size = 3147642 }, - { url = "https://files.pythonhosted.org/packages/4f/d5/1caabedd8863526a6cfa44ee7a833bd97f945dc1d56824d6d76e11731939/pillow-11.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:084a07ef0821cfe4858fe86652fffac8e187b6ae677e9906e192aafcc1b69903", size = 2978999 }, - { url = "https://files.pythonhosted.org/packages/d9/ff/5a45000826a1aa1ac6874b3ec5a856474821a1b59d838c4f6ce2ee518fe9/pillow-11.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8069c5179902dcdce0be9bfc8235347fdbac249d23bd90514b7a47a72d9fecf4", size = 4196794 }, - { url = "https://files.pythonhosted.org/packages/9d/21/84c9f287d17180f26263b5f5c8fb201de0f88b1afddf8a2597a5c9fe787f/pillow-11.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f02541ef64077f22bf4924f225c0fd1248c168f86e4b7abdedd87d6ebaceab0f", size = 4300762 }, - { url = "https://files.pythonhosted.org/packages/84/39/63fb87cd07cc541438b448b1fed467c4d687ad18aa786a7f8e67b255d1aa/pillow-11.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fcb4621042ac4b7865c179bb972ed0da0218a076dc1820ffc48b1d74c1e37fe9", size = 4210468 }, - { url = "https://files.pythonhosted.org/packages/7f/42/6e0f2c2d5c60f499aa29be14f860dd4539de322cd8fb84ee01553493fb4d/pillow-11.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:00177a63030d612148e659b55ba99527803288cea7c75fb05766ab7981a8c1b7", size = 4381824 }, - { url = "https://files.pythonhosted.org/packages/31/69/1ef0fb9d2f8d2d114db982b78ca4eeb9db9a29f7477821e160b8c1253f67/pillow-11.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8853a3bf12afddfdf15f57c4b02d7ded92c7a75a5d7331d19f4f9572a89c17e6", size = 4296436 }, - { url = "https://files.pythonhosted.org/packages/44/ea/dad2818c675c44f6012289a7c4f46068c548768bc6c7f4e8c4ae5bbbc811/pillow-11.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3107c66e43bda25359d5ef446f59c497de2b5ed4c7fdba0894f8d6cf3822dafc", size = 4429714 }, - { url = "https://files.pythonhosted.org/packages/af/3a/da80224a6eb15bba7a0dcb2346e2b686bb9bf98378c0b4353cd88e62b171/pillow-11.0.0-cp312-cp312-win32.whl", hash = "sha256:86510e3f5eca0ab87429dd77fafc04693195eec7fd6a137c389c3eeb4cfb77c6", size = 2249631 }, - { url = "https://files.pythonhosted.org/packages/57/97/73f756c338c1d86bb802ee88c3cab015ad7ce4b838f8a24f16b676b1ac7c/pillow-11.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:8ec4a89295cd6cd4d1058a5e6aec6bf51e0eaaf9714774e1bfac7cfc9051db47", size = 2567533 }, - { url = "https://files.pythonhosted.org/packages/0b/30/2b61876e2722374558b871dfbfcbe4e406626d63f4f6ed92e9c8e24cac37/pillow-11.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:27a7860107500d813fcd203b4ea19b04babe79448268403172782754870dac25", size = 2254890 }, - { url = "https://files.pythonhosted.org/packages/63/24/e2e15e392d00fcf4215907465d8ec2a2f23bcec1481a8ebe4ae760459995/pillow-11.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcd1fb5bb7b07f64c15618c89efcc2cfa3e95f0e3bcdbaf4642509de1942a699", size = 3147300 }, - { url = "https://files.pythonhosted.org/packages/43/72/92ad4afaa2afc233dc44184adff289c2e77e8cd916b3ddb72ac69495bda3/pillow-11.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0e038b0745997c7dcaae350d35859c9715c71e92ffb7e0f4a8e8a16732150f38", size = 2978742 }, - { url = "https://files.pythonhosted.org/packages/9e/da/c8d69c5bc85d72a8523fe862f05ababdc52c0a755cfe3d362656bb86552b/pillow-11.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ae08bd8ffc41aebf578c2af2f9d8749d91f448b3bfd41d7d9ff573d74f2a6b2", size = 4194349 }, - { url = "https://files.pythonhosted.org/packages/cd/e8/686d0caeed6b998351d57796496a70185376ed9c8ec7d99e1d19ad591fc6/pillow-11.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d69bfd8ec3219ae71bcde1f942b728903cad25fafe3100ba2258b973bd2bc1b2", size = 4298714 }, - { url = "https://files.pythonhosted.org/packages/ec/da/430015cec620d622f06854be67fd2f6721f52fc17fca8ac34b32e2d60739/pillow-11.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:61b887f9ddba63ddf62fd02a3ba7add935d053b6dd7d58998c630e6dbade8527", size = 4208514 }, - { url = "https://files.pythonhosted.org/packages/44/ae/7e4f6662a9b1cb5f92b9cc9cab8321c381ffbee309210940e57432a4063a/pillow-11.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:c6a660307ca9d4867caa8d9ca2c2658ab685de83792d1876274991adec7b93fa", size = 4380055 }, - { url = "https://files.pythonhosted.org/packages/74/d5/1a807779ac8a0eeed57f2b92a3c32ea1b696e6140c15bd42eaf908a261cd/pillow-11.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:73e3a0200cdda995c7e43dd47436c1548f87a30bb27fb871f352a22ab8dcf45f", size = 4296751 }, - { url = "https://files.pythonhosted.org/packages/38/8c/5fa3385163ee7080bc13026d59656267daaaaf3c728c233d530e2c2757c8/pillow-11.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fba162b8872d30fea8c52b258a542c5dfd7b235fb5cb352240c8d63b414013eb", size = 4430378 }, - { url = "https://files.pythonhosted.org/packages/ca/1d/ad9c14811133977ff87035bf426875b93097fb50af747793f013979facdb/pillow-11.0.0-cp313-cp313-win32.whl", hash = "sha256:f1b82c27e89fffc6da125d5eb0ca6e68017faf5efc078128cfaa42cf5cb38798", size = 2249588 }, - { url = "https://files.pythonhosted.org/packages/fb/01/3755ba287dac715e6afdb333cb1f6d69740a7475220b4637b5ce3d78cec2/pillow-11.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:8ba470552b48e5835f1d23ecb936bb7f71d206f9dfeee64245f30c3270b994de", size = 2567509 }, - { url = "https://files.pythonhosted.org/packages/c0/98/2c7d727079b6be1aba82d195767d35fcc2d32204c7a5820f822df5330152/pillow-11.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:846e193e103b41e984ac921b335df59195356ce3f71dcfd155aa79c603873b84", size = 2254791 }, - { url = "https://files.pythonhosted.org/packages/eb/38/998b04cc6f474e78b563716b20eecf42a2fa16a84589d23c8898e64b0ffd/pillow-11.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4ad70c4214f67d7466bea6a08061eba35c01b1b89eaa098040a35272a8efb22b", size = 3150854 }, - { url = "https://files.pythonhosted.org/packages/13/8e/be23a96292113c6cb26b2aa3c8b3681ec62b44ed5c2bd0b258bd59503d3c/pillow-11.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:6ec0d5af64f2e3d64a165f490d96368bb5dea8b8f9ad04487f9ab60dc4bb6003", size = 2982369 }, - { url = "https://files.pythonhosted.org/packages/97/8a/3db4eaabb7a2ae8203cd3a332a005e4aba00067fc514aaaf3e9721be31f1/pillow-11.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c809a70e43c7977c4a42aefd62f0131823ebf7dd73556fa5d5950f5b354087e2", size = 4333703 }, - { url = "https://files.pythonhosted.org/packages/28/ac/629ffc84ff67b9228fe87a97272ab125bbd4dc462745f35f192d37b822f1/pillow-11.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:4b60c9520f7207aaf2e1d94de026682fc227806c6e1f55bba7606d1c94dd623a", size = 4412550 }, - { url = "https://files.pythonhosted.org/packages/d6/07/a505921d36bb2df6868806eaf56ef58699c16c388e378b0dcdb6e5b2fb36/pillow-11.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1e2688958a840c822279fda0086fec1fdab2f95bf2b717b66871c4ad9859d7e8", size = 4461038 }, - { url = "https://files.pythonhosted.org/packages/d6/b9/fb620dd47fc7cc9678af8f8bd8c772034ca4977237049287e99dda360b66/pillow-11.0.0-cp313-cp313t-win32.whl", hash = "sha256:607bbe123c74e272e381a8d1957083a9463401f7bd01287f50521ecb05a313f8", size = 2253197 }, - { url = "https://files.pythonhosted.org/packages/df/86/25dde85c06c89d7fc5db17940f07aae0a56ac69aa9ccb5eb0f09798862a8/pillow-11.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c39ed17edea3bc69c743a8dd3e9853b7509625c2462532e62baa0732163a904", size = 2572169 }, - { url = "https://files.pythonhosted.org/packages/51/85/9c33f2517add612e17f3381aee7c4072779130c634921a756c97bc29fb49/pillow-11.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:75acbbeb05b86bc53cbe7b7e6fe00fbcf82ad7c684b3ad82e3d711da9ba287d3", size = 2256828 }, -] - -[[package]] -name = "plac" -version = "1.4.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9b/79/1edb4c836c69306d0ecb0865f46d62ea7e28ef16b3f95bb394e4f2a46330/plac-1.4.3.tar.gz", hash = "sha256:d4cb3387b2113a28aebd509433d0264a4e5d9bb7c1a86db4fbd0a8f11af74eb3", size = 38984 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/af/4c61d2ac0d589719f548f5a1ba919738e44bac7b0c723ce147de5556d233/plac-1.4.3-py2.py3-none-any.whl", hash = "sha256:8a84fde8f950c9de6588a2d53c9deeac3ba1ddb456d887a33228460cf6549750", size = 22458 }, -] - -[[package]] -name = "platformdirs" -version = "4.3.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 }, -] - -[[package]] -name = "pluggy" -version = "1.5.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, -] - -[[package]] -name = "ply" -version = "3.11" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e5/69/882ee5c9d017149285cab114ebeab373308ef0f874fcdac9beb90e0ac4da/ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3", size = 159130 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/58/35da89ee790598a0700ea49b2a66594140f44dec458c07e8e3d4979137fc/ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce", size = 49567 }, -] - -[[package]] -name = "polyleven" -version = "0.8" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fb/ef/4076b8183b6d69e9cc4c9997952d0a4f118892b7e2b66340d8d8f4127972/polyleven-0.8.tar.gz", hash = "sha256:73099c4d93d1a55ce9f2017e941ae4dd1528e853140c090dd6233d078ebe75c8", size = 6373 } - -[[package]] -name = "pre-commit" -version = "4.0.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cfgv" }, - { name = "identify" }, - { name = "nodeenv" }, - { name = "pyyaml" }, - { name = "virtualenv" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2e/c8/e22c292035f1bac8b9f5237a2622305bc0304e776080b246f3df57c4ff9f/pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2", size = 191678 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/16/8f/496e10d51edd6671ebe0432e33ff800aa86775d2d147ce7d43389324a525/pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878", size = 218713 }, -] - -[[package]] -name = "psutil" -version = "6.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/26/10/2a30b13c61e7cf937f4adf90710776b7918ed0a9c434e2c38224732af310/psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a", size = 508565 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/01/9e/8be43078a171381953cfee33c07c0d628594b5dbfc5157847b85022c2c1b/psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688", size = 247762 }, - { url = "https://files.pythonhosted.org/packages/1d/cb/313e80644ea407f04f6602a9e23096540d9dc1878755f3952ea8d3d104be/psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e", size = 248777 }, - { url = "https://files.pythonhosted.org/packages/65/8e/bcbe2025c587b5d703369b6a75b65d41d1367553da6e3f788aff91eaf5bd/psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38", size = 284259 }, - { url = "https://files.pythonhosted.org/packages/58/4d/8245e6f76a93c98aab285a43ea71ff1b171bcd90c9d238bf81f7021fb233/psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b", size = 287255 }, - { url = "https://files.pythonhosted.org/packages/27/c2/d034856ac47e3b3cdfa9720d0e113902e615f4190d5d1bdb8df4b2015fb2/psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a", size = 288804 }, - { url = "https://files.pythonhosted.org/packages/ea/55/5389ed243c878725feffc0d6a3bc5ef6764312b6fc7c081faaa2cfa7ef37/psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e", size = 250386 }, - { url = "https://files.pythonhosted.org/packages/11/91/87fa6f060e649b1e1a7b19a4f5869709fbf750b7c8c262ee776ec32f3028/psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be", size = 254228 }, -] - -[[package]] -name = "pulp" -version = "2.8.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2c/e0/683a36567b0a396961192dc9ec477ba1f88be56d968ca26688bd6e02f23b/PuLP-2.8.0.tar.gz", hash = "sha256:4903bf96110bbab8ed2c68533f90565ebb76aa367d9e4df38e51bf727927c125", size = 17610412 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/09/d7/57e71e11108203039c895643368c0d1a99fe719a6a80184edf240c33d25f/PuLP-2.8.0-py3-none-any.whl", hash = "sha256:4a19814a5b0a4392d788ac2315263435293579b0583c3469943fe0c6a586f263", size = 17678623 }, -] - -[[package]] -name = "pydantic" -version = "2.10.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "annotated-types" }, - { name = "pydantic-core" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/45/0f/27908242621b14e649a84e62b133de45f84c255eecb350ab02979844a788/pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9", size = 786486 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/62/51/72c18c55cf2f46ff4f91ebcc8f75aa30f7305f3d726be3f4ebffb4ae972b/pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d", size = 456997 }, -] - -[[package]] -name = "pydantic-core" -version = "2.27.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a6/9f/7de1f19b6aea45aeb441838782d68352e71bfa98ee6fa048d5041991b33e/pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235", size = 412785 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/be/51/2e9b3788feb2aebff2aa9dfbf060ec739b38c05c46847601134cc1fed2ea/pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f", size = 1895239 }, - { url = "https://files.pythonhosted.org/packages/7b/9e/f8063952e4a7d0127f5d1181addef9377505dcce3be224263b25c4f0bfd9/pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02", size = 1805070 }, - { url = "https://files.pythonhosted.org/packages/2c/9d/e1d6c4561d262b52e41b17a7ef8301e2ba80b61e32e94520271029feb5d8/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c", size = 1828096 }, - { url = "https://files.pythonhosted.org/packages/be/65/80ff46de4266560baa4332ae3181fffc4488ea7d37282da1a62d10ab89a4/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac", size = 1857708 }, - { url = "https://files.pythonhosted.org/packages/d5/ca/3370074ad758b04d9562b12ecdb088597f4d9d13893a48a583fb47682cdf/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb", size = 2037751 }, - { url = "https://files.pythonhosted.org/packages/b1/e2/4ab72d93367194317b99d051947c071aef6e3eb95f7553eaa4208ecf9ba4/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529", size = 2733863 }, - { url = "https://files.pythonhosted.org/packages/8a/c6/8ae0831bf77f356bb73127ce5a95fe115b10f820ea480abbd72d3cc7ccf3/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35", size = 2161161 }, - { url = "https://files.pythonhosted.org/packages/f1/f4/b2fe73241da2429400fc27ddeaa43e35562f96cf5b67499b2de52b528cad/pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089", size = 1993294 }, - { url = "https://files.pythonhosted.org/packages/77/29/4bb008823a7f4cc05828198153f9753b3bd4c104d93b8e0b1bfe4e187540/pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381", size = 2001468 }, - { url = "https://files.pythonhosted.org/packages/f2/a9/0eaceeba41b9fad851a4107e0cf999a34ae8f0d0d1f829e2574f3d8897b0/pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb", size = 2091413 }, - { url = "https://files.pythonhosted.org/packages/d8/36/eb8697729725bc610fd73940f0d860d791dc2ad557faaefcbb3edbd2b349/pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae", size = 2154735 }, - { url = "https://files.pythonhosted.org/packages/52/e5/4f0fbd5c5995cc70d3afed1b5c754055bb67908f55b5cb8000f7112749bf/pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c", size = 1833633 }, - { url = "https://files.pythonhosted.org/packages/ee/f2/c61486eee27cae5ac781305658779b4a6b45f9cc9d02c90cb21b940e82cc/pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16", size = 1986973 }, - { url = "https://files.pythonhosted.org/packages/df/a6/e3f12ff25f250b02f7c51be89a294689d175ac76e1096c32bf278f29ca1e/pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e", size = 1883215 }, - { url = "https://files.pythonhosted.org/packages/0f/d6/91cb99a3c59d7b072bded9959fbeab0a9613d5a4935773c0801f1764c156/pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073", size = 1895033 }, - { url = "https://files.pythonhosted.org/packages/07/42/d35033f81a28b27dedcade9e967e8a40981a765795c9ebae2045bcef05d3/pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08", size = 1807542 }, - { url = "https://files.pythonhosted.org/packages/41/c2/491b59e222ec7e72236e512108ecad532c7f4391a14e971c963f624f7569/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf", size = 1827854 }, - { url = "https://files.pythonhosted.org/packages/e3/f3/363652651779113189cefdbbb619b7b07b7a67ebb6840325117cc8cc3460/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737", size = 1857389 }, - { url = "https://files.pythonhosted.org/packages/5f/97/be804aed6b479af5a945daec7538d8bf358d668bdadde4c7888a2506bdfb/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2", size = 2037934 }, - { url = "https://files.pythonhosted.org/packages/42/01/295f0bd4abf58902917e342ddfe5f76cf66ffabfc57c2e23c7681a1a1197/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107", size = 2735176 }, - { url = "https://files.pythonhosted.org/packages/9d/a0/cd8e9c940ead89cc37812a1a9f310fef59ba2f0b22b4e417d84ab09fa970/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51", size = 2160720 }, - { url = "https://files.pythonhosted.org/packages/73/ae/9d0980e286627e0aeca4c352a60bd760331622c12d576e5ea4441ac7e15e/pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a", size = 1992972 }, - { url = "https://files.pythonhosted.org/packages/bf/ba/ae4480bc0292d54b85cfb954e9d6bd226982949f8316338677d56541b85f/pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc", size = 2001477 }, - { url = "https://files.pythonhosted.org/packages/55/b7/e26adf48c2f943092ce54ae14c3c08d0d221ad34ce80b18a50de8ed2cba8/pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960", size = 2091186 }, - { url = "https://files.pythonhosted.org/packages/ba/cc/8491fff5b608b3862eb36e7d29d36a1af1c945463ca4c5040bf46cc73f40/pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23", size = 2154429 }, - { url = "https://files.pythonhosted.org/packages/78/d8/c080592d80edd3441ab7f88f865f51dae94a157fc64283c680e9f32cf6da/pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05", size = 1833713 }, - { url = "https://files.pythonhosted.org/packages/83/84/5ab82a9ee2538ac95a66e51f6838d6aba6e0a03a42aa185ad2fe404a4e8f/pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337", size = 1987897 }, - { url = "https://files.pythonhosted.org/packages/df/c3/b15fb833926d91d982fde29c0624c9f225da743c7af801dace0d4e187e71/pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5", size = 1882983 }, -] - -[[package]] -name = "pyfakefs" -version = "5.5.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b4/c1/9aba92f9fecb2b81a83efd516d028c6ddcbd3cebdfb3ecba42b9c858f5e8/pyfakefs-5.5.0.tar.gz", hash = "sha256:7448aaa07142f892d0a4eb52a5ed3206a9f02c6599e686cd97d624c18979c154", size = 205510 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/08/63/844e40b064d9b31c62e35029f034737929931c254a6b20312f36b14ee0e9/pyfakefs-5.5.0-py3-none-any.whl", hash = "sha256:8dbf203ab7bef1529f11f7d41b9478b898e95bf9f3b71262163aac07a518cd76", size = 219967 }, -] - -[[package]] -name = "pygments" -version = "2.18.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8e/62/8336eff65bcbc8e4cb5d05b55faf041285951b6e80f33e2bff2024788f31/pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", size = 4891905 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/3f/01c8b82017c199075f8f788d0d906b9ffbbc5a47dc9918a945e13d5a2bda/pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a", size = 1205513 }, -] - -[[package]] -name = "pyparsing" -version = "3.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8c/d5/e5aeee5387091148a19e1145f63606619cb5f20b83fccb63efae6474e7b2/pyparsing-3.2.0.tar.gz", hash = "sha256:cbf74e27246d595d9a74b186b810f6fbb86726dbf3b9532efb343f6d7294fe9c", size = 920984 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/be/ec/2eb3cd785efd67806c46c13a17339708ddc346cbb684eade7a6e6f79536a/pyparsing-3.2.0-py3-none-any.whl", hash = "sha256:93d9577b88da0bbea8cc8334ee8b918ed014968fd2ec383e868fb8afb1ccef84", size = 106921 }, -] - -[[package]] -name = "pyreadline3" -version = "3.5.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178 }, -] - -[[package]] -name = "pyrsistent" -version = "0.20.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ce/3a/5031723c09068e9c8c2f0bc25c3a9245f2b1d1aea8396c787a408f2b95ca/pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4", size = 103642 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/15/ee/ff2ed52032ac1ce2e7ba19e79bd5b05d152ebfb77956cf08fcd6e8d760ea/pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e", size = 83537 }, - { url = "https://files.pythonhosted.org/packages/80/f1/338d0050b24c3132bcfc79b68c3a5f54bce3d213ecef74d37e988b971d8a/pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e", size = 122615 }, - { url = "https://files.pythonhosted.org/packages/07/3a/e56d6431b713518094fae6ff833a04a6f49ad0fbe25fb7c0dc7408e19d20/pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3", size = 122335 }, - { url = "https://files.pythonhosted.org/packages/4a/bb/5f40a4d5e985a43b43f607250e766cdec28904682c3505eb0bd343a4b7db/pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d", size = 118510 }, - { url = "https://files.pythonhosted.org/packages/1c/13/e6a22f40f5800af116c02c28e29f15c06aa41cb2036f6a64ab124647f28b/pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174", size = 60865 }, - { url = "https://files.pythonhosted.org/packages/75/ef/2fa3b55023ec07c22682c957808f9a41836da4cd006b5f55ec76bf0fbfa6/pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d", size = 63239 }, - { url = "https://files.pythonhosted.org/packages/23/88/0acd180010aaed4987c85700b7cc17f9505f3edb4e5873e4dc67f613e338/pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b", size = 58106 }, -] - -[[package]] -name = "pysam" -version = "0.22.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a6/bc/e0a79d74137643940f5406121039d1272f29f55c5330e7b43484b2259da5/pysam-0.22.1.tar.gz", hash = "sha256:18a0b97be95bd71e584de698441c46651cdff378db1c9a4fb3f541e560253b22", size = 4643640 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/43/2a/de24972a7fde545aec7d3e59230f51b004e407d327087f4a90b7912cb679/pysam-0.22.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e72e129d245574801125029a5892c9e18d2956b13c4203ea585cbd64ccde9351", size = 8373896 }, - { url = "https://files.pythonhosted.org/packages/f6/2c/d226f2db96fda51f7ab70c5256691def40af4879b6b5ffadda2c676ac5e5/pysam-0.22.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f8f00bb1fb977fc33c87cf5fe9023eefc2ba3d43d30ab4875a1765827018c949", size = 8005195 }, - { url = "https://files.pythonhosted.org/packages/f5/6b/27106562f2477d38c0d8efa452839505877e335da12ace6bf993b7bee4dd/pysam-0.22.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c0e051fda433c1c7ff94532f60477bb83b97f4bb183567a0ae23f340e1c200b4", size = 24493675 }, - { url = "https://files.pythonhosted.org/packages/0c/fe/ce252dce8e5dd7ae06fd2036b5a146c1200598346ee70cbeb0a44740aa6b/pysam-0.22.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:860c7c78ddb1539b83d5476502ba14c8b4e8435810dc7a5b715196da3dfb86b6", size = 25164276 }, -] - -[[package]] -name = "pytabix" -version = "0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/84/6a/520ecf75c2ada77492cb4ed21fb22aed178e791df434ca083b59fffadddd/pytabix-0.1.tar.gz", hash = "sha256:0774f1687ebd41811fb07a0e50951b6be72d7cc7e22ed2b18972eaf7482eb7d1", size = 45811 } - -[[package]] -name = "pytest" -version = "8.2.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "iniconfig" }, - { name = "packaging" }, - { name = "pluggy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a6/58/e993ca5357553c966b9e73cb3475d9c935fe9488746e13ebdf9b80fae508/pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977", size = 1427980 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4e/e7/81ebdd666d3bff6670d27349b5053605d83d55548e6bd5711f3b0ae7dd23/pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343", size = 339873 }, -] - -[[package]] -name = "pytest-cov" -version = "5.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "coverage" }, - { name = "pytest" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/74/67/00efc8d11b630c56f15f4ad9c7f9223f1e5ec275aaae3fa9118c6a223ad2/pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857", size = 63042 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/3a/af5b4fa5961d9a1e6237b530eb87dd04aea6eb83da09d2a4073d81b54ccf/pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652", size = 21990 }, -] - -[[package]] -name = "pytest-mock" -version = "3.14.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pytest" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c6/90/a955c3ab35ccd41ad4de556596fa86685bf4fc5ffcc62d22d856cfd4e29a/pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0", size = 32814 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f2/3b/b26f90f74e2986a82df6e7ac7e319b8ea7ccece1caec9f8ab6104dc70603/pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f", size = 9863 }, -] - -[[package]] -name = "pytest-subprocess" -version = "1.5.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pytest" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/3d/02/0c8323e6013ea967f0461ef5653bd129b2d673d74f070c681c7a8663285c/pytest_subprocess-1.5.2.tar.gz", hash = "sha256:ad3ca8a35e798bf9c82d9f16d88700b30d98c5a28236117b86c5d6e581a8ed97", size = 40468 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/10/77/a80e8f9126b95ffd5ad4d04bd14005c68dcbf0d88f53b2b14893f6cc7232/pytest_subprocess-1.5.2-py3-none-any.whl", hash = "sha256:23ac7732aa8bd45f1757265b1316eb72a7f55b41fb21e2ca22e149ba3629fa46", size = 20886 }, -] - -[[package]] -name = "pytest-sugar" -version = "1.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "packaging" }, - { name = "pytest" }, - { name = "termcolor" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f5/ac/5754f5edd6d508bc6493bc37d74b928f102a5fff82d9a80347e180998f08/pytest-sugar-1.0.0.tar.gz", hash = "sha256:6422e83258f5b0c04ce7c632176c7732cab5fdb909cb39cca5c9139f81276c0a", size = 14992 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/92/fb/889f1b69da2f13691de09a111c16c4766a433382d44aa0ecf221deded44a/pytest_sugar-1.0.0-py3-none-any.whl", hash = "sha256:70ebcd8fc5795dc457ff8b69d266a4e2e8a74ae0c3edc749381c64b5246c8dfd", size = 10171 }, -] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "six" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, -] - -[[package]] -name = "pywin32" -version = "308" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/00/7c/d00d6bdd96de4344e06c4afbf218bc86b54436a94c01c71a8701f613aa56/pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897", size = 5939729 }, - { url = "https://files.pythonhosted.org/packages/21/27/0c8811fbc3ca188f93b5354e7c286eb91f80a53afa4e11007ef661afa746/pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47", size = 6543015 }, - { url = "https://files.pythonhosted.org/packages/9d/0f/d40f8373608caed2255781a3ad9a51d03a594a1248cd632d6a298daca693/pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091", size = 7976033 }, - { url = "https://files.pythonhosted.org/packages/a9/a4/aa562d8935e3df5e49c161b427a3a2efad2ed4e9cf81c3de636f1fdddfd0/pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed", size = 5938579 }, - { url = "https://files.pythonhosted.org/packages/c7/50/b0efb8bb66210da67a53ab95fd7a98826a97ee21f1d22949863e6d588b22/pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4", size = 6542056 }, - { url = "https://files.pythonhosted.org/packages/26/df/2b63e3e4f2df0224f8aaf6d131f54fe4e8c96400eb9df563e2aae2e1a1f9/pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd", size = 7974986 }, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, - { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, - { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, - { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, - { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, - { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, - { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, - { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, - { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, - { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, -] - -[[package]] -name = "requests" -version = "2.32.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "charset-normalizer" }, - { name = "idna" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, -] - -[[package]] -name = "requests-file" -version = "2.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "requests" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/72/97/bf44e6c6bd8ddbb99943baf7ba8b1a8485bcd2fe0e55e5708d7fee4ff1ae/requests_file-2.1.0.tar.gz", hash = "sha256:0f549a3f3b0699415ac04d167e9cb39bccfb730cb832b4d20be3d9867356e658", size = 6891 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/25/dd878a121fcfdf38f52850f11c512e13ec87c2ea72385933818e5b6c15ce/requests_file-2.1.0-py2.py3-none-any.whl", hash = "sha256:cf270de5a4c5874e84599fc5778303d496c10ae5e870bfa378818f35d21bda5c", size = 4244 }, -] - -[[package]] -name = "reretry" -version = "0.11.8" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/40/1d/25d562a62b7471616bccd7c15a7533062eb383927e68667bf331db990415/reretry-0.11.8.tar.gz", hash = "sha256:f2791fcebe512ea2f1d153a2874778523a8064860b591cd90afc21a8bed432e3", size = 4836 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/66/11/e295e07d4ae500144177f875a8de11daa4d86b8246ab41c76a98ce9280ca/reretry-0.11.8-py2.py3-none-any.whl", hash = "sha256:5ec1084cd9644271ee386d34cd5dd24bdb3e91d55961b076d1a31d585ad68a79", size = 5609 }, -] - -[[package]] -name = "rich" -version = "13.9.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markdown-it-py" }, - { name = "pygments" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ab/3a/0316b28d0761c6734d6bc14e770d85506c986c85ffb239e688eeaab2c2bc/rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098", size = 223149 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/19/71/39c7c0d87f8d4e6c020a393182060eaefeeae6c01dab6a84ec346f2567df/rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90", size = 242424 }, -] - -[[package]] -name = "ruamel-yaml" -version = "0.18.6" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "ruamel-yaml-clib", marker = "python_full_version < '3.13' and platform_python_implementation == 'CPython'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/29/81/4dfc17eb6ebb1aac314a3eb863c1325b907863a1b8b1382cdffcb6ac0ed9/ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b", size = 143362 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/73/67/8ece580cc363331d9a53055130f86b096bf16e38156e33b1d3014fffda6b/ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636", size = 117761 }, -] - -[[package]] -name = "ruamel-yaml-clib" -version = "0.2.12" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/84/80203abff8ea4993a87d823a5f632e4d92831ef75d404c9fc78d0176d2b5/ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f", size = 225315 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/48/41/e7a405afbdc26af961678474a55373e1b323605a4f5e2ddd4a80ea80f628/ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632", size = 133433 }, - { url = "https://files.pythonhosted.org/packages/ec/b0/b850385604334c2ce90e3ee1013bd911aedf058a934905863a6ea95e9eb4/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:943f32bc9dedb3abff9879edc134901df92cfce2c3d5c9348f172f62eb2d771d", size = 647362 }, - { url = "https://files.pythonhosted.org/packages/44/d0/3f68a86e006448fb6c005aee66565b9eb89014a70c491d70c08de597f8e4/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c3829bb364fdb8e0332c9931ecf57d9be3519241323c5274bd82f709cebc0c", size = 754118 }, - { url = "https://files.pythonhosted.org/packages/52/a9/d39f3c5ada0a3bb2870d7db41901125dbe2434fa4f12ca8c5b83a42d7c53/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd", size = 706497 }, - { url = "https://files.pythonhosted.org/packages/b0/fa/097e38135dadd9ac25aecf2a54be17ddf6e4c23e43d538492a90ab3d71c6/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31", size = 698042 }, - { url = "https://files.pythonhosted.org/packages/ec/d5/a659ca6f503b9379b930f13bc6b130c9f176469b73b9834296822a83a132/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680", size = 745831 }, - { url = "https://files.pythonhosted.org/packages/db/5d/36619b61ffa2429eeaefaab4f3374666adf36ad8ac6330d855848d7d36fd/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d", size = 715692 }, - { url = "https://files.pythonhosted.org/packages/b1/82/85cb92f15a4231c89b95dfe08b09eb6adca929ef7df7e17ab59902b6f589/ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5", size = 98777 }, - { url = "https://files.pythonhosted.org/packages/d7/8f/c3654f6f1ddb75daf3922c3d8fc6005b1ab56671ad56ffb874d908bfa668/ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4", size = 115523 }, - { url = "https://files.pythonhosted.org/packages/29/00/4864119668d71a5fa45678f380b5923ff410701565821925c69780356ffa/ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a", size = 132011 }, - { url = "https://files.pythonhosted.org/packages/7f/5e/212f473a93ae78c669ffa0cb051e3fee1139cb2d385d2ae1653d64281507/ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:e7e3736715fbf53e9be2a79eb4db68e4ed857017344d697e8b9749444ae57475", size = 642488 }, - { url = "https://files.pythonhosted.org/packages/1f/8f/ecfbe2123ade605c49ef769788f79c38ddb1c8fa81e01f4dbf5cf1a44b16/ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7e75b4965e1d4690e93021adfcecccbca7d61c7bddd8e22406ef2ff20d74ef", size = 745066 }, - { url = "https://files.pythonhosted.org/packages/e2/a9/28f60726d29dfc01b8decdb385de4ced2ced9faeb37a847bd5cf26836815/ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6", size = 701785 }, - { url = "https://files.pythonhosted.org/packages/84/7e/8e7ec45920daa7f76046578e4f677a3215fe8f18ee30a9cb7627a19d9b4c/ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf", size = 693017 }, - { url = "https://files.pythonhosted.org/packages/c5/b3/d650eaade4ca225f02a648321e1ab835b9d361c60d51150bac49063b83fa/ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1", size = 741270 }, - { url = "https://files.pythonhosted.org/packages/87/b8/01c29b924dcbbed75cc45b30c30d565d763b9c4d540545a0eeecffb8f09c/ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01", size = 709059 }, - { url = "https://files.pythonhosted.org/packages/30/8c/ed73f047a73638257aa9377ad356bea4d96125b305c34a28766f4445cc0f/ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6", size = 98583 }, - { url = "https://files.pythonhosted.org/packages/b0/85/e8e751d8791564dd333d5d9a4eab0a7a115f7e349595417fd50ecae3395c/ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3", size = 115190 }, -] - -[[package]] -name = "ruff" -version = "0.9.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/67/3e/e89f736f01aa9517a97e2e7e0ce8d34a4d8207087b3cfdec95133fee13b5/ruff-0.9.1.tar.gz", hash = "sha256:fd2b25ecaf907d6458fa842675382c8597b3c746a2dde6717fe3415425df0c17", size = 3498844 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/05/c3a2e0feb3d5d394cdfd552de01df9d3ec8a3a3771bbff247fab7e668653/ruff-0.9.1-py3-none-linux_armv6l.whl", hash = "sha256:84330dda7abcc270e6055551aca93fdde1b0685fc4fd358f26410f9349cf1743", size = 10645241 }, - { url = "https://files.pythonhosted.org/packages/dd/da/59f0a40e5f88ee5c054ad175caaa2319fc96571e1d29ab4730728f2aad4f/ruff-0.9.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3cae39ba5d137054b0e5b472aee3b78a7c884e61591b100aeb544bcd1fc38d4f", size = 10391066 }, - { url = "https://files.pythonhosted.org/packages/b7/fe/85e1c1acf0ba04a3f2d54ae61073da030f7a5dc386194f96f3c6ca444a78/ruff-0.9.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:50c647ff96f4ba288db0ad87048257753733763b409b2faf2ea78b45c8bb7fcb", size = 10012308 }, - { url = "https://files.pythonhosted.org/packages/6f/9b/780aa5d4bdca8dcea4309264b8faa304bac30e1ce0bcc910422bfcadd203/ruff-0.9.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0c8b149e9c7353cace7d698e1656ffcf1e36e50f8ea3b5d5f7f87ff9986a7ca", size = 10881960 }, - { url = "https://files.pythonhosted.org/packages/12/f4/dac4361afbfe520afa7186439e8094e4884ae3b15c8fc75fb2e759c1f267/ruff-0.9.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:beb3298604540c884d8b282fe7625651378e1986c25df51dec5b2f60cafc31ce", size = 10414803 }, - { url = "https://files.pythonhosted.org/packages/f0/a2/057a3cb7999513cb78d6cb33a7d1cc6401c82d7332583786e4dad9e38e44/ruff-0.9.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39d0174ccc45c439093971cc06ed3ac4dc545f5e8bdacf9f067adf879544d969", size = 11464929 }, - { url = "https://files.pythonhosted.org/packages/eb/c6/1ccfcc209bee465ced4874dcfeaadc88aafcc1ea9c9f31ef66f063c187f0/ruff-0.9.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:69572926c0f0c9912288915214ca9b2809525ea263603370b9e00bed2ba56dbd", size = 12170717 }, - { url = "https://files.pythonhosted.org/packages/84/97/4a524027518525c7cf6931e9fd3b2382be5e4b75b2b61bec02681a7685a5/ruff-0.9.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:937267afce0c9170d6d29f01fcd1f4378172dec6760a9f4dface48cdabf9610a", size = 11708921 }, - { url = "https://files.pythonhosted.org/packages/a6/a4/4e77cf6065c700d5593b25fca6cf725b1ab6d70674904f876254d0112ed0/ruff-0.9.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:186c2313de946f2c22bdf5954b8dd083e124bcfb685732cfb0beae0c47233d9b", size = 13058074 }, - { url = "https://files.pythonhosted.org/packages/f9/d6/fcb78e0531e863d0a952c4c5600cc5cd317437f0e5f031cd2288b117bb37/ruff-0.9.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f94942a3bb767675d9a051867c036655fe9f6c8a491539156a6f7e6b5f31831", size = 11281093 }, - { url = "https://files.pythonhosted.org/packages/e4/3b/7235bbeff00c95dc2d073cfdbf2b871b5bbf476754c5d277815d286b4328/ruff-0.9.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:728d791b769cc28c05f12c280f99e8896932e9833fef1dd8756a6af2261fd1ab", size = 10882610 }, - { url = "https://files.pythonhosted.org/packages/2a/66/5599d23257c61cf038137f82999ca8f9d0080d9d5134440a461bef85b461/ruff-0.9.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2f312c86fb40c5c02b44a29a750ee3b21002bd813b5233facdaf63a51d9a85e1", size = 10489273 }, - { url = "https://files.pythonhosted.org/packages/78/85/de4aa057e2532db0f9761e2c2c13834991e087787b93e4aeb5f1cb10d2df/ruff-0.9.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ae017c3a29bee341ba584f3823f805abbe5fe9cd97f87ed07ecbf533c4c88366", size = 11003314 }, - { url = "https://files.pythonhosted.org/packages/00/42/afedcaa089116d81447347f76041ff46025849fedb0ed2b187d24cf70fca/ruff-0.9.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5dc40a378a0e21b4cfe2b8a0f1812a6572fc7b230ef12cd9fac9161aa91d807f", size = 11342982 }, - { url = "https://files.pythonhosted.org/packages/39/c6/fe45f3eb27e3948b41a305d8b768e949bf6a39310e9df73f6c576d7f1d9f/ruff-0.9.1-py3-none-win32.whl", hash = "sha256:46ebf5cc106cf7e7378ca3c28ce4293b61b449cd121b98699be727d40b79ba72", size = 8819750 }, - { url = "https://files.pythonhosted.org/packages/38/8d/580db77c3b9d5c3d9479e55b0b832d279c30c8f00ab0190d4cd8fc67831c/ruff-0.9.1-py3-none-win_amd64.whl", hash = "sha256:342a824b46ddbcdddd3abfbb332fa7fcaac5488bf18073e841236aadf4ad5c19", size = 9701331 }, - { url = "https://files.pythonhosted.org/packages/b2/94/0498cdb7316ed67a1928300dd87d659c933479f44dec51b4f62bfd1f8028/ruff-0.9.1-py3-none-win_arm64.whl", hash = "sha256:1cd76c7f9c679e6e8f2af8f778367dca82b95009bc7b1a85a47f1521ae524fa7", size = 9145708 }, -] - -[[package]] -name = "shellingham" -version = "1.5.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 }, -] - -[[package]] -name = "simplejson" -version = "3.19.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3d/29/085111f19717f865eceaf0d4397bf3e76b08d60428b076b64e2a1903706d/simplejson-3.19.3.tar.gz", hash = "sha256:8e086896c36210ab6050f2f9f095a5f1e03c83fa0e7f296d6cba425411364680", size = 85237 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/20/15/513fea93fafbdd4993eacfcb762965b2ff3d29e618c029e2956174d68c4b/simplejson-3.19.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:66a0399e21c2112acacfebf3d832ebe2884f823b1c7e6d1363f2944f1db31a99", size = 92921 }, - { url = "https://files.pythonhosted.org/packages/a4/4f/998a907ae1a6c104dc0ee48aa248c2478490152808d34d8e07af57f396c3/simplejson-3.19.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6ef9383c5e05f445be60f1735c1816163c874c0b1ede8bb4390aff2ced34f333", size = 75311 }, - { url = "https://files.pythonhosted.org/packages/db/44/acd6122201e927451869d45952b9ab1d3025cdb5e61548d286d08fbccc08/simplejson-3.19.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:42e5acf80d4d971238d4df97811286a044d720693092b20a56d5e56b7dcc5d09", size = 74964 }, - { url = "https://files.pythonhosted.org/packages/27/ca/d0a1e8f16e1bbdc0b8c6d88166f45f565ed7285f53928cfef3b6ce78f14d/simplejson-3.19.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0b0efc7279d768db7c74d3d07f0b5c81280d16ae3fb14e9081dc903e8360771", size = 150106 }, - { url = "https://files.pythonhosted.org/packages/63/59/0554b78cf26c98e2b9cae3f44723bd72c2394e2afec1a14eedc6211f7187/simplejson-3.19.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0552eb06e7234da892e1d02365cd2b7b2b1f8233aa5aabdb2981587b7cc92ea0", size = 158347 }, - { url = "https://files.pythonhosted.org/packages/b2/fe/9f30890352e431e8508cc569912d3322147d3e7e4f321e48c0adfcb4c97d/simplejson-3.19.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf6a3b9a7d7191471b464fe38f684df10eb491ec9ea454003edb45a011ab187", size = 148456 }, - { url = "https://files.pythonhosted.org/packages/37/e3/663a09542ee021d4131162f7a164cb2e7f04ef48433a67591738afbf12ea/simplejson-3.19.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7017329ca8d4dca94ad5e59f496e5fc77630aecfc39df381ffc1d37fb6b25832", size = 152190 }, - { url = "https://files.pythonhosted.org/packages/31/20/4e0c4d35e10ff6465003bec304316d822a559a1c38c66ef6892ca199c207/simplejson-3.19.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:67a20641afebf4cfbcff50061f07daad1eace6e7b31d7622b6fa2c40d43900ba", size = 149846 }, - { url = "https://files.pythonhosted.org/packages/08/7a/46e2e072cac3987cbb05946f25167f0ad2fe536748e7405953fd6661a486/simplejson-3.19.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:dd6a7dabcc4c32daf601bc45e01b79175dde4b52548becea4f9545b0a4428169", size = 151714 }, - { url = "https://files.pythonhosted.org/packages/7f/7d/dbeeac10eb61d5d8858d0bb51121a21050d281dc83af4c557f86da28746c/simplejson-3.19.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:08f9b443a94e72dd02c87098c96886d35790e79e46b24e67accafbf13b73d43b", size = 158777 }, - { url = "https://files.pythonhosted.org/packages/fc/8f/a98bdbb799c6a4a884b5823db31785a96ba895b4b0f4d8ac345d6fe98bbf/simplejson-3.19.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fa97278ae6614346b5ca41a45a911f37a3261b57dbe4a00602048652c862c28b", size = 154230 }, - { url = "https://files.pythonhosted.org/packages/b1/db/852eebceb85f969ae40e06babed1a93d3bacb536f187d7a80ff5823a5979/simplejson-3.19.3-cp312-cp312-win32.whl", hash = "sha256:ef28c3b328d29b5e2756903aed888960bc5df39b4c2eab157ae212f70ed5bf74", size = 74002 }, - { url = "https://files.pythonhosted.org/packages/fe/68/9f0e5df0651cb79ef83cba1378765a00ee8038e6201cc82b8e7178a7778e/simplejson-3.19.3-cp312-cp312-win_amd64.whl", hash = "sha256:1e662336db50ad665777e6548b5076329a94a0c3d4a0472971c588b3ef27de3a", size = 75596 }, - { url = "https://files.pythonhosted.org/packages/93/3a/5896821ed543899fcb9c4256c7e71bb110048047349a00f42bc8b8fb379f/simplejson-3.19.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0959e6cb62e3994b5a40e31047ff97ef5c4138875fae31659bead691bed55896", size = 92931 }, - { url = "https://files.pythonhosted.org/packages/39/15/5d33d269440912ee40d856db0c8be2b91aba7a219690ab01f86cb0edd590/simplejson-3.19.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7a7bfad839c624e139a4863007233a3f194e7c51551081f9789cba52e4da5167", size = 75318 }, - { url = "https://files.pythonhosted.org/packages/2a/8d/2e7483a2bf7ec53acf7e012bafbda79d7b34f90471dda8e424544a59d484/simplejson-3.19.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afab2f7f2486a866ff04d6d905e9386ca6a231379181a3838abce1f32fbdcc37", size = 74971 }, - { url = "https://files.pythonhosted.org/packages/4d/9d/9bdf34437c8834a7cf7246f85e9d5122e30579f512c10a0c2560e994294f/simplejson-3.19.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00313681015ac498e1736b304446ee6d1c72c5b287cd196996dad84369998f7", size = 150112 }, - { url = "https://files.pythonhosted.org/packages/a7/e2/1f2ae2d89eaf85f6163c82150180aae5eaa18085cfaf892f8a57d4c51cbd/simplejson-3.19.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d936ae682d5b878af9d9eb4d8bb1fdd5e41275c8eb59ceddb0aeed857bb264a2", size = 158354 }, - { url = "https://files.pythonhosted.org/packages/60/83/26f610adf234c8492b3f30501e12f2271e67790f946c6898fe0c58aefe99/simplejson-3.19.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c6657485393f2e9b8177c77a7634f13ebe70d5e6de150aae1677d91516ce6b", size = 148455 }, - { url = "https://files.pythonhosted.org/packages/b5/4b/109af50006af77133653c55b5b91b4bd2d579ff8254ce11216c0b75f911b/simplejson-3.19.3-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a6a750d3c7461b1c47cfc6bba8d9e57a455e7c5f80057d2a82f738040dd1129", size = 152191 }, - { url = "https://files.pythonhosted.org/packages/75/dc/108872a8825cbd99ae6f4334e0490ff1580367baf12198bcaf988f6820ba/simplejson-3.19.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ea7a4a998c87c5674a27089e022110a1a08a7753f21af3baf09efe9915c23c3c", size = 149954 }, - { url = "https://files.pythonhosted.org/packages/eb/be/deec1d947a5d0472276ab4a4d1a9378dc5ee27f3dc9e54d4f62ffbad7a08/simplejson-3.19.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6300680d83a399be2b8f3b0ef7ef90b35d2a29fe6e9c21438097e0938bbc1564", size = 151812 }, - { url = "https://files.pythonhosted.org/packages/e9/58/4ee130702d36b1551ef66e7587eefe56651f3669255bf748cd71691e2434/simplejson-3.19.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ab69f811a660c362651ae395eba8ce84f84c944cea0df5718ea0ba9d1e4e7252", size = 158880 }, - { url = "https://files.pythonhosted.org/packages/0f/e1/59cc6a371b60f89e3498d9f4c8109f6b7359094d453f5fe80b2677b777b0/simplejson-3.19.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:256e09d0f94d9c3d177d9e95fd27a68c875a4baa2046633df387b86b652f5747", size = 154344 }, - { url = "https://files.pythonhosted.org/packages/79/45/1b36044670016f5cb25ebd92497427d2d1711ecb454d00f71eb9a00b77cc/simplejson-3.19.3-cp313-cp313-win32.whl", hash = "sha256:2c78293470313aefa9cfc5e3f75ca0635721fb016fb1121c1c5b0cb8cc74712a", size = 74002 }, - { url = "https://files.pythonhosted.org/packages/e2/58/b06226e6b0612f2b1fa13d5273551da259f894566b1eef32249ddfdcce44/simplejson-3.19.3-cp313-cp313-win_amd64.whl", hash = "sha256:3bbcdc438dc1683b35f7a8dc100960c721f922f9ede8127f63bed7dfded4c64c", size = 75599 }, - { url = "https://files.pythonhosted.org/packages/0d/e7/f9fafbd4f39793a20cc52e77bbd766f7384312526d402c382928dc7667f6/simplejson-3.19.3-py3-none-any.whl", hash = "sha256:49cc4c7b940d43bd12bf87ec63f28cbc4964fc4e12c031cc8cd01650f43eb94e", size = 57004 }, -] - -[[package]] -name = "six" -version = "1.17.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, -] - -[[package]] -name = "smart-open" -version = "7.0.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "wrapt" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a3/d8/1481294b2d110b805c0f5d23ef34158b7d5d4283633c0d34c69ea89bb76b/smart_open-7.0.5.tar.gz", hash = "sha256:d3672003b1dbc85e2013e4983b88eb9a5ccfd389b0d4e5015f39a9ee5620ec18", size = 71693 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/06/bc/706838af28a542458bffe74a5d0772ca7f207b5495cd9fccfce61ef71f2a/smart_open-7.0.5-py3-none-any.whl", hash = "sha256:8523ed805c12dff3eaa50e9c903a6cb0ae78800626631c5fe7ea073439847b89", size = 61387 }, -] - -[[package]] -name = "smmap" -version = "5.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/88/04/b5bf6d21dc4041000ccba7eb17dd3055feb237e7ffc2c20d3fae3af62baa/smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62", size = 22291 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/a5/10f97f73544edcdef54409f1d839f6049a0d79df68adbc1ceb24d1aaca42/smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da", size = 24282 }, -] - -[[package]] -name = "snakefmt" -version = "0.10.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "black" }, - { name = "click" }, - { name = "toml" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b4/61/0228586a10b76064431e1d0c965f030b4c7dfbea6d1cfcb4d3f0cb0e6726/snakefmt-0.10.2.tar.gz", hash = "sha256:4286a5903b66da7e52763c5e8184da4edc95113b758f4448528804fb54f9b75a", size = 28371 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/41/c1/e0b37324a4cc623db7a5f0c346dc48b04e6470d8aff9d6615354132acc39/snakefmt-0.10.2-py3-none-any.whl", hash = "sha256:fc38642414768c23ea5b7326bc1c3a4056d9a01cdedf3592f50da62ec7099909", size = 28193 }, -] - -[[package]] -name = "snakemake" -version = "7.32.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "appdirs" }, - { name = "configargparse" }, - { name = "connection-pool" }, - { name = "datrie" }, - { name = "docutils" }, - { name = "gitpython" }, - { name = "humanfriendly" }, - { name = "jinja2" }, - { name = "jsonschema" }, - { name = "nbformat" }, - { name = "packaging" }, - { name = "psutil" }, - { name = "pulp" }, - { name = "pyyaml" }, - { name = "requests" }, - { name = "reretry" }, - { name = "smart-open" }, - { name = "stopit" }, - { name = "tabulate" }, - { name = "throttler" }, - { name = "toposort" }, - { name = "wrapt" }, - { name = "yte" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f4/94/884160dab89886cef7802df0a8c8217bfb2d795427dee01ad0e0dc15964a/snakemake-7.32.4.tar.gz", hash = "sha256:fdc3f15dd7b06fabb7da30d460e0a3b1fba08e4ea91f9c32c47a83705cdc7b6e", size = 371171 } - -[[package]] -name = "snappy-pipeline" -version = "0.3.0" -source = { editable = "." } -dependencies = [ - { name = "altamisa" }, - { name = "biomedsheets" }, - { name = "fasteners" }, - { name = "jinja2" }, - { name = "matplotlib" }, - { name = "pydantic" }, - { name = "pysam" }, - { name = "ruamel-yaml" }, - { name = "snakemake" }, - { name = "termcolor" }, - { name = "varfish-cli" }, - { name = "vcfpy" }, -] - -[package.optional-dependencies] -all = [ - { name = "coverage" }, - { name = "coveralls" }, - { name = "pre-commit" }, - { name = "pyfakefs" }, - { name = "pytabix" }, - { name = "pytest" }, - { name = "pytest-cov" }, - { name = "pytest-mock" }, - { name = "pytest-subprocess" }, - { name = "pytest-sugar" }, - { name = "ruff" }, - { name = "snakefmt" }, - { name = "sphinx" }, - { name = "sphinx-mdinclude" }, - { name = "sphinx-rtd-theme" }, -] -dev = [ - { name = "pre-commit" }, - { name = "ruff" }, - { name = "snakefmt" }, -] -docs = [ - { name = "sphinx" }, - { name = "sphinx-mdinclude" }, - { name = "sphinx-rtd-theme" }, -] -test = [ - { name = "coverage" }, - { name = "coveralls" }, - { name = "pyfakefs" }, - { name = "pytabix" }, - { name = "pytest" }, - { name = "pytest-cov" }, - { name = "pytest-mock" }, - { name = "pytest-subprocess" }, - { name = "pytest-sugar" }, -] - -[package.metadata] -requires-dist = [ - { name = "altamisa", git = "https://github.com/bihealth/altamisa.git?rev=817dc491ff819e4c80686082bf3e5f602f1ac14c#817dc491ff819e4c80686082bf3e5f602f1ac14c" }, - { name = "biomedsheets", git = "https://github.com/bihealth/biomedsheets.git?rev=4e0a8484850c39d1511036c3fe29ec0b4f9271f8#4e0a8484850c39d1511036c3fe29ec0b4f9271f8" }, - { name = "coverage", marker = "extra == 'test'", specifier = ">=7.5.3,<8" }, - { name = "coveralls", marker = "extra == 'test'", specifier = ">=4.0.1,<5" }, - { name = "fasteners", specifier = ">=0.17.3,<1" }, - { name = "jinja2", specifier = ">=3.1.4,<4" }, - { name = "matplotlib", specifier = ">=3.8.4" }, - { name = "pre-commit", marker = "extra == 'dev'", specifier = ">=4.0.1,<5" }, - { name = "pydantic", specifier = ">=2.9.0,<3" }, - { name = "pyfakefs", marker = "extra == 'test'", specifier = ">=5.5.0,<6" }, - { name = "pysam", specifier = ">=0.22.1,<1" }, - { name = "pytabix", marker = "extra == 'test'", specifier = ">=0.1.0,<1" }, - { name = "pytest", marker = "extra == 'test'", specifier = ">=8.2.2,<9" }, - { name = "pytest-cov", marker = "extra == 'test'", specifier = ">=5.0.0,<6" }, - { name = "pytest-mock", marker = "extra == 'test'", specifier = ">=3.14.0,<4" }, - { name = "pytest-subprocess", marker = "extra == 'test'", specifier = ">=1.5.0,<2" }, - { name = "pytest-sugar", marker = "extra == 'test'", specifier = ">=1.0.0,<2" }, - { name = "ruamel-yaml", specifier = ">=0.18.6,<1" }, - { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.9.0,<1" }, - { name = "snakefmt", marker = "extra == 'dev'", specifier = ">=0.10.2,<1" }, - { name = "snakemake", specifier = ">=7.32.0,<8" }, - { name = "snappy-pipeline", extras = ["dev", "docs", "test"], marker = "extra == 'all'" }, - { name = "sphinx", marker = "extra == 'docs'", specifier = ">=7.3.7,<8" }, - { name = "sphinx-mdinclude", marker = "extra == 'docs'", specifier = ">=0.6.0,<1" }, - { name = "sphinx-rtd-theme", marker = "extra == 'docs'", specifier = ">=2.0.0,<3" }, - { name = "termcolor", specifier = ">=1.1.0,<3" }, - { name = "varfish-cli", specifier = ">=0.6.3" }, - { name = "vcfpy", specifier = ">=0.13.8,<1" }, -] - -[[package]] -name = "snowballstemmer" -version = "2.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/44/7b/af302bebf22c749c56c9c3e8ae13190b5b5db37a33d9068652e8f73b7089/snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1", size = 86699 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/dc/c02e01294f7265e63a7315fe086dd1df7dacb9f840a804da846b96d01b96/snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a", size = 93002 }, -] - -[[package]] -name = "sphinx" -version = "7.3.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "alabaster" }, - { name = "babel" }, - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "docutils" }, - { name = "imagesize" }, - { name = "jinja2" }, - { name = "packaging" }, - { name = "pygments" }, - { name = "requests" }, - { name = "snowballstemmer" }, - { name = "sphinxcontrib-applehelp" }, - { name = "sphinxcontrib-devhelp" }, - { name = "sphinxcontrib-htmlhelp" }, - { name = "sphinxcontrib-jsmath" }, - { name = "sphinxcontrib-qthelp" }, - { name = "sphinxcontrib-serializinghtml" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b7/0a/b88033900b1582f5ed8f880263363daef968d1cd064175e32abfd9714410/sphinx-7.3.7.tar.gz", hash = "sha256:a4a7db75ed37531c05002d56ed6948d4c42f473a36f46e1382b0bd76ca9627bc", size = 7094808 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/fa/130c32ed94cf270e3d0b9ded16fb7b2c8fea86fa7263c29a696a30c1dde7/sphinx-7.3.7-py3-none-any.whl", hash = "sha256:413f75440be4cacf328f580b4274ada4565fb2187d696a84970c23f77b64d8c3", size = 3335650 }, -] - -[[package]] -name = "sphinx-mdinclude" -version = "0.6.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "docutils" }, - { name = "mistune" }, - { name = "pygments" }, - { name = "sphinx" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b6/a7/c9a7888bb2187fdb06955d71e75f6f266b7e179b356ac76138d160a5b7eb/sphinx_mdinclude-0.6.2.tar.gz", hash = "sha256:447462e82cb8be61404a2204227f920769eb923d2f57608e3325f3bb88286b4c", size = 65257 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/42/3d/6b41fe1637cd53c4b10d56e0e6f396546f837973dabf9c4b2a1de44620ac/sphinx_mdinclude-0.6.2-py3-none-any.whl", hash = "sha256:648e78edb067c0e4bffc22943278d49d54a0714494743592032fa3ad82a86984", size = 16911 }, -] - -[[package]] -name = "sphinx-rtd-theme" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "docutils" }, - { name = "sphinx" }, - { name = "sphinxcontrib-jquery" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fe/33/2a35a9cdbfda9086bda11457bcc872173ab3565b16b6d7f6b3efaa6dc3d6/sphinx_rtd_theme-2.0.0.tar.gz", hash = "sha256:bd5d7b80622406762073a04ef8fadc5f9151261563d47027de09910ce03afe6b", size = 2785005 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ea/46/00fda84467815c29951a9c91e3ae7503c409ddad04373e7cfc78daad4300/sphinx_rtd_theme-2.0.0-py2.py3-none-any.whl", hash = "sha256:ec93d0856dc280cf3aee9a4c9807c60e027c7f7b461b77aeffed682e68f0e586", size = 2824721 }, -] - -[[package]] -name = "sphinxcontrib-applehelp" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300 }, -] - -[[package]] -name = "sphinxcontrib-devhelp" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530 }, -] - -[[package]] -name = "sphinxcontrib-htmlhelp" -version = "2.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705 }, -] - -[[package]] -name = "sphinxcontrib-jquery" -version = "4.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "sphinx" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/de/f3/aa67467e051df70a6330fe7770894b3e4f09436dea6881ae0b4f3d87cad8/sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a", size = 122331 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/76/85/749bd22d1a68db7291c89e2ebca53f4306c3f205853cf31e9de279034c3c/sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae", size = 121104 }, -] - -[[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071 }, -] - -[[package]] -name = "sphinxcontrib-qthelp" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743 }, -] - -[[package]] -name = "sphinxcontrib-serializinghtml" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072 }, -] - -[[package]] -name = "stopit" -version = "1.1.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/35/58/e8bb0b0fb05baf07bbac1450c447d753da65f9701f551dca79823ce15d50/stopit-1.1.2.tar.gz", hash = "sha256:f7f39c583fd92027bd9d06127b259aee7a5b7945c1f1fa56263811e1e766996d", size = 18281 } - -[[package]] -name = "tabulate" -version = "0.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252 }, -] - -[[package]] -name = "termcolor" -version = "2.5.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/37/72/88311445fd44c455c7d553e61f95412cf89054308a1aa2434ab835075fc5/termcolor-2.5.0.tar.gz", hash = "sha256:998d8d27da6d48442e8e1f016119076b690d962507531df4890fcd2db2ef8a6f", size = 13057 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/be/df630c387a0a054815d60be6a97eb4e8f17385d5d6fe660e1c02750062b4/termcolor-2.5.0-py3-none-any.whl", hash = "sha256:37b17b5fc1e604945c2642c872a3764b5d547a48009871aea3edd3afa180afb8", size = 7755 }, -] - -[[package]] -name = "throttler" -version = "1.2.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b4/22/638451122136d5280bc477c8075ea448b9ebdfbd319f0f120edaecea2038/throttler-1.2.2.tar.gz", hash = "sha256:d54db406d98e1b54d18a9ba2b31ab9f093ac64a0a59d730c1cf7bb1cdfc94a58", size = 7970 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/df/d4/36bf6010b184286000b2334622bfb3446a40c22c1d2a9776bff025cb0fe5/throttler-1.2.2-py3-none-any.whl", hash = "sha256:fc6ae612a2529e01110b32335af40375258b98e3b81232ec77cd07f51bf71392", size = 7609 }, -] - -[[package]] -name = "toml" -version = "0.10.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588 }, -] - -[[package]] -name = "toposort" -version = "1.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/69/19/8e955d90985ecbd3b9adb2a759753a6840da2dff3c569d412b2c9217678b/toposort-1.10.tar.gz", hash = "sha256:bfbb479c53d0a696ea7402601f4e693c97b0367837c8898bc6471adfca37a6bd", size = 11132 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f6/17/57b444fd314d5e1593350b9a31d000e7411ba8e17ce12dc7ad54ca76b810/toposort-1.10-py3-none-any.whl", hash = "sha256:cbdbc0d0bee4d2695ab2ceec97fe0679e9c10eab4b2a87a9372b929e70563a87", size = 8500 }, -] - -[[package]] -name = "tqdm" -version = "4.67.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "platform_system == 'Windows'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 }, -] - -[[package]] -name = "traitlets" -version = "5.14.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359 }, -] - -[[package]] -name = "typeguard" -version = "4.4.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/62/c3/400917dd37d7b8c07e9723f3046818530423e1e759a56a22133362adab00/typeguard-4.4.1.tar.gz", hash = "sha256:0d22a89d00b453b47c49875f42b6601b961757541a2e1e0ef517b6e24213c21b", size = 74959 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f2/53/9465dedf2d69fe26008e7732cf6e0a385e387c240869e7d54eed49782a3c/typeguard-4.4.1-py3-none-any.whl", hash = "sha256:9324ec07a27ec67fc54a9c063020ca4c0ae6abad5e9f0f9804ca59aee68c6e21", size = 35635 }, -] - -[[package]] -name = "typer" -version = "0.12.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "rich" }, - { name = "shellingham" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c5/58/a79003b91ac2c6890fc5d90145c662fd5771c6f11447f116b63300436bc9/typer-0.12.5.tar.gz", hash = "sha256:f592f089bedcc8ec1b974125d64851029c3b1af145f04aca64d69410f0c9b722", size = 98953 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/2b/886d13e742e514f704c33c4caa7df0f3b89e5a25ef8db02aa9ca3d9535d5/typer-0.12.5-py3-none-any.whl", hash = "sha256:62fe4e471711b147e3365034133904df3e235698399bc4de2b36c8579298d52b", size = 47288 }, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, -] - -[[package]] -name = "urllib3" -version = "2.2.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ed/63/22ba4ebfe7430b76388e7cd448d5478814d3032121827c12a2cc287e2260/urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9", size = 300677 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/d9/5f4c13cecde62396b0d3fe530a50ccea91e7dfc1ccf0e09c228841bb5ba8/urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", size = 126338 }, -] - -[[package]] -name = "varfish-cli" -version = "0.6.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "jsonschema" }, - { name = "logzero" }, - { name = "polyleven" }, - { name = "pydantic" }, - { name = "python-dateutil" }, - { name = "requests" }, - { name = "simplejson" }, - { name = "tabulate" }, - { name = "toml" }, - { name = "tqdm" }, - { name = "typeguard" }, - { name = "typer" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/58/11/aa1841bdacb92d3c8499901a04383c51f58be48a9b356c0dfc41ff637be5/varfish_cli-0.6.4.tar.gz", hash = "sha256:229fbfb2798f5703fc2023b24034e05a6af7356681de0c0fa902f68fe80f1490", size = 176354 } - -[[package]] -name = "vcfpy" -version = "0.13.8" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pysam" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e7/c1/314e8bac0b73b683ecbd23e565f3309173eef3e0f9b6d470ef60f5dfdfae/vcfpy-0.13.8.tar.gz", hash = "sha256:e7d00965105e7ca9567299f073ad60c6bbfc78d685d25ba33353988af9b33160", size = 993203 } - -[[package]] -name = "virtualenv" -version = "20.28.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "distlib" }, - { name = "filelock" }, - { name = "platformdirs" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/bf/75/53316a5a8050069228a2f6d11f32046cfa94fbb6cc3f08703f59b873de2e/virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa", size = 7650368 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/10/f9/0919cf6f1432a8c4baa62511f8f8da8225432d22e83e3476f5be1a1edc6e/virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0", size = 4276702 }, -] - -[[package]] -name = "wrapt" -version = "1.17.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/a1/fc03dca9b0432725c2e8cdbf91a349d2194cf03d8523c124faebe581de09/wrapt-1.17.0.tar.gz", hash = "sha256:16187aa2317c731170a88ef35e8937ae0f533c402872c1ee5e6d079fcf320801", size = 55542 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/85/82/518605474beafff11f1a34759f6410ab429abff9f7881858a447e0d20712/wrapt-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:89fc28495896097622c3fc238915c79365dd0ede02f9a82ce436b13bd0ab7569", size = 38904 }, - { url = "https://files.pythonhosted.org/packages/80/6c/17c3b2fed28edfd96d8417c865ef0b4c955dc52c4e375d86f459f14340f1/wrapt-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875d240fdbdbe9e11f9831901fb8719da0bd4e6131f83aa9f69b96d18fae7504", size = 88622 }, - { url = "https://files.pythonhosted.org/packages/4a/11/60ecdf3b0fd3dca18978d89acb5d095a05f23299216e925fcd2717c81d93/wrapt-1.17.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ed16d95fd142e9c72b6c10b06514ad30e846a0d0917ab406186541fe68b451", size = 80920 }, - { url = "https://files.pythonhosted.org/packages/d2/50/dbef1a651578a3520d4534c1e434989e3620380c1ad97e309576b47f0ada/wrapt-1.17.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18b956061b8db634120b58f668592a772e87e2e78bc1f6a906cfcaa0cc7991c1", size = 89170 }, - { url = "https://files.pythonhosted.org/packages/44/a2/78c5956bf39955288c9e0dd62e807b308c3aa15a0f611fbff52aa8d6b5ea/wrapt-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:daba396199399ccabafbfc509037ac635a6bc18510ad1add8fd16d4739cdd106", size = 86748 }, - { url = "https://files.pythonhosted.org/packages/99/49/2ee413c78fc0bdfebe5bee590bf3becdc1fab0096a7a9c3b5c9666b2415f/wrapt-1.17.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4d63f4d446e10ad19ed01188d6c1e1bb134cde8c18b0aa2acfd973d41fcc5ada", size = 79734 }, - { url = "https://files.pythonhosted.org/packages/c0/8c/4221b7b270e36be90f0930fe15a4755a6ea24093f90b510166e9ed7861ea/wrapt-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8a5e7cc39a45fc430af1aefc4d77ee6bad72c5bcdb1322cfde852c15192b8bd4", size = 87552 }, - { url = "https://files.pythonhosted.org/packages/4c/6b/1aaccf3efe58eb95e10ce8e77c8909b7a6b0da93449a92c4e6d6d10b3a3d/wrapt-1.17.0-cp312-cp312-win32.whl", hash = "sha256:0a0a1a1ec28b641f2a3a2c35cbe86c00051c04fffcfcc577ffcdd707df3f8635", size = 36647 }, - { url = "https://files.pythonhosted.org/packages/b3/4f/243f88ac49df005b9129194c6511b3642818b3e6271ddea47a15e2ee4934/wrapt-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c34f6896a01b84bab196f7119770fd8466c8ae3dfa73c59c0bb281e7b588ce7", size = 38830 }, - { url = "https://files.pythonhosted.org/packages/67/9c/38294e1bb92b055222d1b8b6591604ca4468b77b1250f59c15256437644f/wrapt-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:714c12485aa52efbc0fc0ade1e9ab3a70343db82627f90f2ecbc898fdf0bb181", size = 38904 }, - { url = "https://files.pythonhosted.org/packages/78/b6/76597fb362cbf8913a481d41b14b049a8813cd402a5d2f84e57957c813ae/wrapt-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da427d311782324a376cacb47c1a4adc43f99fd9d996ffc1b3e8529c4074d393", size = 88608 }, - { url = "https://files.pythonhosted.org/packages/bc/69/b500884e45b3881926b5f69188dc542fb5880019d15c8a0df1ab1dfda1f7/wrapt-1.17.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba1739fb38441a27a676f4de4123d3e858e494fac05868b7a281c0a383c098f4", size = 80879 }, - { url = "https://files.pythonhosted.org/packages/52/31/f4cc58afe29eab8a50ac5969963010c8b60987e719c478a5024bce39bc42/wrapt-1.17.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e711fc1acc7468463bc084d1b68561e40d1eaa135d8c509a65dd534403d83d7b", size = 89119 }, - { url = "https://files.pythonhosted.org/packages/aa/9c/05ab6bf75dbae7a9d34975fb6ee577e086c1c26cde3b6cf6051726d33c7c/wrapt-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:140ea00c87fafc42739bd74a94a5a9003f8e72c27c47cd4f61d8e05e6dec8721", size = 86778 }, - { url = "https://files.pythonhosted.org/packages/0e/6c/4b8d42e3db355603d35fe5c9db79c28f2472a6fd1ccf4dc25ae46739672a/wrapt-1.17.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:73a96fd11d2b2e77d623a7f26e004cc31f131a365add1ce1ce9a19e55a1eef90", size = 79793 }, - { url = "https://files.pythonhosted.org/packages/69/23/90e3a2ee210c0843b2c2a49b3b97ffcf9cad1387cb18cbeef9218631ed5a/wrapt-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0b48554952f0f387984da81ccfa73b62e52817a4386d070c75e4db7d43a28c4a", size = 87606 }, - { url = "https://files.pythonhosted.org/packages/5f/06/3683126491ca787d8d71d8d340e775d40767c5efedb35039d987203393b7/wrapt-1.17.0-cp313-cp313-win32.whl", hash = "sha256:498fec8da10e3e62edd1e7368f4b24aa362ac0ad931e678332d1b209aec93045", size = 36651 }, - { url = "https://files.pythonhosted.org/packages/f1/bc/3bf6d2ca0d2c030d324ef9272bea0a8fdaff68f3d1fa7be7a61da88e51f7/wrapt-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd136bb85f4568fffca995bd3c8d52080b1e5b225dbf1c2b17b66b4c5fa02838", size = 38835 }, - { url = "https://files.pythonhosted.org/packages/ce/b5/251165c232d87197a81cd362eeb5104d661a2dd3aa1f0b33e4bf61dda8b8/wrapt-1.17.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:17fcf043d0b4724858f25b8826c36e08f9fb2e475410bece0ec44a22d533da9b", size = 40146 }, - { url = "https://files.pythonhosted.org/packages/89/33/1e1bdd3e866eeb73d8c4755db1ceb8a80d5bd51ee4648b3f2247adec4e67/wrapt-1.17.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4a557d97f12813dc5e18dad9fa765ae44ddd56a672bb5de4825527c847d6379", size = 113444 }, - { url = "https://files.pythonhosted.org/packages/9f/7c/94f53b065a43f5dc1fbdd8b80fd8f41284315b543805c956619c0b8d92f0/wrapt-1.17.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0229b247b0fc7dee0d36176cbb79dbaf2a9eb7ecc50ec3121f40ef443155fb1d", size = 101246 }, - { url = "https://files.pythonhosted.org/packages/62/5d/640360baac6ea6018ed5e34e6e80e33cfbae2aefde24f117587cd5efd4b7/wrapt-1.17.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8425cfce27b8b20c9b89d77fb50e368d8306a90bf2b6eef2cdf5cd5083adf83f", size = 109320 }, - { url = "https://files.pythonhosted.org/packages/e3/cf/6c7a00ae86a2e9482c91170aefe93f4ccda06c1ac86c4de637c69133da59/wrapt-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c900108df470060174108012de06d45f514aa4ec21a191e7ab42988ff42a86c", size = 110193 }, - { url = "https://files.pythonhosted.org/packages/cd/cc/aa718df0d20287e8f953ce0e2f70c0af0fba1d3c367db7ee8bdc46ea7003/wrapt-1.17.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:4e547b447073fc0dbfcbff15154c1be8823d10dab4ad401bdb1575e3fdedff1b", size = 100460 }, - { url = "https://files.pythonhosted.org/packages/f7/16/9f3ac99fe1f6caaa789d67b4e3c562898b532c250769f5255fa8b8b93983/wrapt-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:914f66f3b6fc7b915d46c1cc424bc2441841083de01b90f9e81109c9759e43ab", size = 106347 }, - { url = "https://files.pythonhosted.org/packages/64/85/c77a331b2c06af49a687f8b926fc2d111047a51e6f0b0a4baa01ff3a673a/wrapt-1.17.0-cp313-cp313t-win32.whl", hash = "sha256:a4192b45dff127c7d69b3bdfb4d3e47b64179a0b9900b6351859f3001397dabf", size = 37971 }, - { url = "https://files.pythonhosted.org/packages/05/9b/b2469f8be9efed24283fd7b9eeb8e913e9bc0715cf919ea8645e428ab7af/wrapt-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:4f643df3d4419ea3f856c5c3f40fec1d65ea2e89ec812c83f7767c8730f9827a", size = 40755 }, - { url = "https://files.pythonhosted.org/packages/4b/d9/a8ba5e9507a9af1917285d118388c5eb7a81834873f45df213a6fe923774/wrapt-1.17.0-py3-none-any.whl", hash = "sha256:d2c63b93548eda58abf5188e505ffed0229bf675f7c3090f8e36ad55b8cbc371", size = 23592 }, -] - -[[package]] -name = "yte" -version = "1.5.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "dpath" }, - { name = "plac" }, - { name = "pyyaml" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/58/4b/3f89f96417e4e39c3f3e3f4a17d6233e81dc1e5cd5b5ed0a2498faedf690/yte-1.5.4.tar.gz", hash = "sha256:d2d77e53eafca74f58234fcd3fea28cc0a719e4f3784911511e35e86594bc880", size = 6352 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/15/64/97df1886abf11291e9a18b1672b2b79eb940499263c85339a1645d870600/yte-1.5.4-py3-none-any.whl", hash = "sha256:14ccfcb57d60b7652041b606129851423805140b22f52f5152f7c2692cd7b905", size = 7655 }, -]