From d4150a2484e1d12df38e7423902c63649d63845f Mon Sep 17 00:00:00 2001 From: Avik Datta Date: Tue, 30 May 2023 14:08:48 +0100 Subject: [PATCH] added api for predemult report upload --- app/iframe_view.py | 59 +- app/metadata_api.py | 7 +- app/models.py | 17 +- app/pre_demultiplexing_data_api.py | 418 +- app/pre_demultiplexing_view.py | 150 +- app/templates/iframe.html | 2 +- config.py | 3 + docker-compose.yaml | 2 +- static/predemult/bclconvert_report_v0.03.html | 15844 ++++++++++++++++ tests/config_test.py | 1 + tests/conftest.py | 4 + tests/test_apis.py | 20 +- tests/test_pre_demultiplexing_view.py | 56 +- tests/test_predemult_api.py | 88 + 14 files changed, 16389 insertions(+), 282 deletions(-) create mode 100644 static/predemult/bclconvert_report_v0.03.html create mode 100644 tests/test_predemult_api.py diff --git a/app/iframe_view.py b/app/iframe_view.py index f3812a1..53f5755 100644 --- a/app/iframe_view.py +++ b/app/iframe_view.py @@ -6,17 +6,19 @@ from flask_appbuilder.baseviews import BaseView, expose from flask_appbuilder.security.decorators import protect, has_access from app import db -from .models import Project_analysis_info_file -from .models import Project_seqrun_info_file -from .models import Project_seqrun_info_data -from .models import Project_analysis_info_data -from .metadata_view import ProjectView +from .models import ( + Project_analysis_info_file, + Project_seqrun_info_file, + Project_seqrun_info_data, + Project_analysis_info_data, + PreDeMultiplexingData) + log = logging.getLogger(__name__) def get_path_for_project_seqrun_info_file(id): try: - (file_path, project_id) = \ + record = \ db.session.\ query( Project_seqrun_info_file.file_path, @@ -24,13 +26,17 @@ def get_path_for_project_seqrun_info_file(id): join(Project_seqrun_info_data, Project_seqrun_info_data.project_seqrun_info_data_id==Project_seqrun_info_file.project_seqrun_info_data_id).\ filter(Project_seqrun_info_file.project_seqrun_info_file_id==id).\ one_or_none() + if record is None: + log.warning(f"Missing data for id {id}") + return '', '' + (file_path, project_id) = record return file_path, project_id except Exception as e: log.error(e) def get_path_for_project_analysis_info_file(id): try: - (file_path, project_id) = \ + record = \ db.session.\ query( Project_analysis_info_file.file_path, @@ -38,11 +44,34 @@ def get_path_for_project_analysis_info_file(id): join(Project_analysis_info_data, Project_analysis_info_data.project_analysis_info_data_id==Project_analysis_info_file.project_analysis_info_data_id).\ filter(Project_analysis_info_file.project_analysis_info_file_id==id).\ one_or_none() + if record is None: + log.warning(f"Missing data for id {id}") + return '', '' + (file_path, project_id) = record return file_path, project_id except Exception as e: log.error(e) +def get_path_for_predemult_report(id): + try: + record = \ + db.session.\ + query(PreDeMultiplexingData.file_path).\ + filter(PreDeMultiplexingData.demult_id==id).\ + one_or_none() + if record is None: + log.warning( + f"Missing pre-demult data for id {id}") + return '' + (file_path,) = \ + record + return file_path + except Exception as e: + raise ValueError( + f"Failed to get report for predemult entry {id}, error: {e}") + + class IFrameView(BaseView): route_base = "/" @@ -57,7 +86,7 @@ def view_seqrun_report(self, id): # return self.render_template("iframe.html", url=file_path, project_url=project_url) with open(file_path, 'r') as fp: html_data = fp.read() - return self.render_template("iframe.html", html_data=html_data, project_url=project_url) + return self.render_template("iframe.html", html_data=html_data, url_link=project_url) @expose("/static/analysis/") @has_access @@ -70,4 +99,16 @@ def view_analysis_report(self, id): # return self.render_template("iframe.html", url=file_path, project_url=project_url) with open(file_path, 'r') as fp: html_data = fp.read() - return self.render_template("iframe.html", html_data=html_data, project_url=project_url) \ No newline at end of file + return self.render_template("iframe.html", html_data=html_data, url_link=project_url) + + @expose("/static/predemult/") + @has_access + @cache.cached(timeout=600) + def view_predemult_report(self, id): + file_path = \ + get_path_for_predemult_report(id=id) + url_link = \ + url_for('PreDeMultiplexingDataView.list') + with open(file_path, 'r') as fp: + html_data = fp.read() + return self.render_template("iframe.html", html_data=html_data, url_link=url_link) \ No newline at end of file diff --git a/app/metadata_api.py b/app/metadata_api.py index 8b0726b..6a82a6c 100644 --- a/app/metadata_api.py +++ b/app/metadata_api.py @@ -8,6 +8,8 @@ from . import app, db, celery from .metadata.metadata_util import cleanup_and_load_new_data_to_metadata_tables +log = logging.getLogger(__name__) + @celery.task(bind=True) def async_cleanup_and_load_new_data_to_metadata_tables( self, json_file: str) -> dict: @@ -15,7 +17,7 @@ def async_cleanup_and_load_new_data_to_metadata_tables( cleanup_and_load_new_data_to_metadata_tables(json_file) return {"message": "success"} except Exception as e: - logging.error( + log.error( "Failed to run celery job, error: {0}".\ format(e)) @@ -51,6 +53,7 @@ def submit_cleanup_job(self): apply_async(args=[json_file]) return self.response(200, message='successfully submitted metadata update job') except Exception as e: - logging.error(e) + log.error(e) + return self.response_500('failed to submit metadata update job') diff --git a/app/models.py b/app/models.py index 4a9ae43..7a07f71 100644 --- a/app/models.py +++ b/app/models.py @@ -138,25 +138,18 @@ def __repr__(self): class PreDeMultiplexingData(Model): __tablename__ = 'pre_demultiplexing_data' __table_args__ = ( - UniqueConstraint('run_name', 'samplesheet_tag'), + UniqueConstraint('run_name', 'samplesheet_tag', 'date_stamp'), { 'mysql_engine':'InnoDB', 'mysql_charset':'utf8' }) demult_id = Column(INTEGER(unsigned=True), primary_key=True, nullable=False) run_name = Column(String(50), nullable=False) - samplesheet_tag = Column(String(50), nullable=False) - flowcell_cluster_plot = Column(TEXT()) - project_summary_table = Column(TEXT()) - project_summary_plot = Column(TEXT()) - sample_table = Column(TEXT()) - sample_plot= Column(TEXT()) - undetermined_table = Column(TEXT()) - undetermined_plot = Column(TEXT()) - file_path = Column(String(500), nullable=True) - status = Column(Enum("ACTIVE", "WITHDRAWN", "UNKNOWN"), nullable=False, server_default='UNKNOWN') + samplesheet_tag = Column(String(200), nullable=False) + file_path = Column(String(500), nullable=False) + status = Column(Enum("ACTIVE", "WITHDRAWN", "UNKNOWN"), nullable=False, server_default='ACTIVE') date_stamp = Column(TIMESTAMP(), nullable=False, server_default=current_timestamp(), onupdate=datetime.datetime.now) def __repr__(self): return self.run_name def report(self): - return Markup('report') + return Markup('report') """ Admin home view diff --git a/app/pre_demultiplexing_data_api.py b/app/pre_demultiplexing_data_api.py index 1c2eed5..b10f9ab 100644 --- a/app/pre_demultiplexing_data_api.py +++ b/app/pre_demultiplexing_data_api.py @@ -1,182 +1,312 @@ -import json, logging +import os, json, logging, hashlib, shutil, gzip, tempfile +from datetime import datetime from flask_appbuilder import ModelRestApi -from flask import request +from flask import request, jsonify from flask_appbuilder.api import expose from flask_appbuilder.models.sqla.interface import SQLAInterface from flask_appbuilder.security.decorators import protect -from . import db +from . import db, app, celery from .models import PreDeMultiplexingData """ Pre-demultiplexing data Api """ -def search_predemultiplexing_data(run_name, samplesheet_tag): - try: - result = \ - db.session.\ - query(PreDeMultiplexingData).\ - filter(PreDeMultiplexingData.run_name==run_name).\ - filter(PreDeMultiplexingData.samplesheet_tag==samplesheet_tag).\ - one_or_none() - return result - except Exception as e: - raise ValueError( - "Failed to search pre demultiplexing data, error: {0}".\ - format(e)) - +log = logging.getLogger(__name__) -def add_predemultiplexing_data(data): +def load_predemult_report( + run_name: str, + tag_name: str, + file_path: str, + base_path: str): try: - if isinstance(data, bytes): - data = json.loads(data.decode()) - if isinstance(data, str): - data = json.loads(data) - flowcell_cluster_plot = data.get("flowcell_cluster_plot") - if isinstance(flowcell_cluster_plot, dict): - flowcell_cluster_plot = json.dumps(flowcell_cluster_plot) - project_summary_table = data.get("project_summary_table") - if isinstance(project_summary_table, dict): - project_summary_table = json.dumps(project_summary_table) - project_summary_plot = data.get("project_summary_plot") - if isinstance(project_summary_plot, dict): - project_summary_plot = json.dumps(project_summary_plot) - sample_table = data.get("sample_table") - if isinstance(sample_table, dict): - sample_table = json.dumps(sample_table) - sample_plot = data.get("sample_plot") - if isinstance(sample_plot, dict): - sample_plot = json.dumps(sample_plot) - undetermined_table = data.get("undetermined_table") - if isinstance(undetermined_table, dict): - undetermined_table = json.dumps(undetermined_table) - undetermined_plot = data.get("undetermined_plot") - if isinstance(undetermined_plot, dict): - undetermined_plot = json.dumps(undetermined_plot) - predemult_data = \ - PreDeMultiplexingData( - run_name=data.get("run_name"), - samplesheet_tag=data.get("samplesheet_tag"), - flowcell_cluster_plot=flowcell_cluster_plot, - project_summary_table=project_summary_table, - project_summary_plot=project_summary_plot, - sample_table=sample_table, - sample_plot=sample_plot, - undetermined_table=undetermined_table, - undetermined_plot=undetermined_plot) + ## get date stamp + datestamp = datetime.now() + datetime_str = \ + datestamp.strftime("%Y%m%d_%H%M%S") + ## get file name + file_name = \ + os.path.basename(file_path) + ## calculate new disk path + hash_string = \ + f"{run_name}{tag_name}{file_name}{datetime_str}" + hash_md5 = \ + hashlib.\ + md5(hash_string.encode('utf-8')).\ + hexdigest() + ## create dir and copy report file + target_dir = \ + os.path.join( + base_path, + run_name, + hash_md5) + target_file_path = \ + os.path.join( + target_dir, + file_name) + os.makedirs( + target_dir, + exist_ok=True) + shutil.copyfile( + file_path, + target_file_path) + ## update db record try: - db.session.add(predemult_data) + predemult_entry = \ + PreDeMultiplexingData( + run_name=run_name, + samplesheet_tag=tag_name, + file_path=target_file_path, + date_stamp=datestamp + ) + db.session.add(predemult_entry) db.session.flush() db.session.commit() except: db.session.rollback() raise except Exception as e: - raise ValueError( - "Failed to add de-multiplex data, error: {0}".\ - format(e)) - -def edit_predemultiplexing_data(data): - try: - if isinstance(data, bytes): - data = json.loads(data.decode()) - if isinstance(data, str): - data = json.loads(data) - if "run_name" not in data: - raise ValueError("Missing run name") - if "samplesheet_tag" not in data: - raise ValueError("Missing sampleshheet tag") - flowcell_cluster_plot = data.get("flowcell_cluster_plot") - if flowcell_cluster_plot is not None and \ - isinstance(flowcell_cluster_plot, dict): - flowcell_cluster_plot = json.dumps(flowcell_cluster_plot) - data.update({"flowcell_cluster_plot": flowcell_cluster_plot}) - project_summary_table = data.get("project_summary_table") - if project_summary_table is not None and \ - isinstance(project_summary_table, dict): - project_summary_table = json.dumps(project_summary_table) - data.update({"project_summary_table": project_summary_table}) - project_summary_plot = data.get("project_summary_plot") - if project_summary_plot is not None and \ - isinstance(project_summary_plot, dict): - project_summary_plot = json.dumps(project_summary_plot) - data.update({"project_summary_plot": project_summary_plot}) - sample_table = data.get("sample_table") - if sample_table is not None and \ - isinstance(sample_table, dict): - sample_table = json.dumps(sample_table) - data.update({"sample_table": sample_table}) - sample_plot = data.get("sample_plot") - if sample_plot is not None and \ - isinstance(sample_plot, dict): - sample_plot = json.dumps(sample_plot) - data.update({"sample_plot": sample_plot}) - undetermined_table = data.get("undetermined_table") - if undetermined_table is not None and \ - isinstance(undetermined_table, dict): - undetermined_table = json.dumps(undetermined_table) - data.update({"undetermined_table": undetermined_table}) - undetermined_plot = data.get("undetermined_plot") - if undetermined_plot is not None and \ - isinstance(undetermined_plot, dict): - undetermined_plot = json.dumps(undetermined_plot) - data.update({"undetermined_plot": undetermined_plot}) - try: - db.session.\ - query(PreDeMultiplexingData).\ - filter(PreDeMultiplexingData.run_name==data.get("run_name")).\ - filter(PreDeMultiplexingData.samplesheet_tag==data.get("samplesheet_tag")).\ - update(data) - db.session.commit() - except: - db.session.rollback() - raise - except Exception as e: - raise ValueError( - "Failed to update de-multiplex data, error: {0}".\ - format(e)) + raise ValueError( + f"Failed to load pre-demult report to db, error: {e}") -def add_or_edit_predemultiplexing_data(data): +@celery.task(bind=True) +def async_load_predemult_report( + self, + run_name: str, + tag_name: str, + file_path: str, + base_path: str) -> dict: try: - if isinstance(data, bytes): - data = json.loads(data.decode()) - if isinstance(data, str): - data = json.loads(data) - if "run_name" not in data: - raise ValueError("Missing run name") - if "samplesheet_tag" not in data: - raise ValueError("Missing sampleshheet tag") - result = \ - search_predemultiplexing_data( - run_name=data.get("run_name"), - samplesheet_tag=data.get("samplesheet_tag")) - if result is None: - add_predemultiplexing_data(data=data) - else: - edit_predemultiplexing_data(data=data) + load_predemult_report( + run_name=run_name, + tag_name=tag_name, + file_path=file_path, + base_path=base_path) + return {"message": "success"} except Exception as e: - raise ValueError( - "Failed to add or update de-multiplex data, error: {0}".\ - format(e)) + log.error( + "Failed to run celery job, error: {0}".\ + format(e)) class PreDeMultiplexingDataApi(ModelRestApi): resource_name = "predemultiplexing_data" datamodel = SQLAInterface(PreDeMultiplexingData) - @expose('/add_or_edit_report', methods=['POST']) + @expose('/add_report', methods=['POST']) @protect() - def add_or_edit_demult_report(self): + def add_report(self): try: if not request.files: return self.response_400('No files') + json_data = request.form + run_name = json_data.get('run_name') + samplesheet_tag = json_data.get('samplesheet_tag') + if run_name is None or \ + samplesheet_tag is None: + return self.response_400('Missing run_name or samplesheet_tag') + ## get report file from request file_objs = request.files.getlist('file') file_obj = file_objs[0] + file_name = file_obj.filename file_obj.seek(0) - json_data = file_obj.read() - add_or_edit_predemultiplexing_data(data=json_data) - return self.response(200, message='successfully added or updated demult data') + file_data = file_obj.read() + ## report file can be gzipped + if file_name.endswith('.gz'): + file_data = gzip.decompress(file_data).decode('utf-8') + ## get report file and dump it to tmp dir + report_dir = \ + tempfile.mkdtemp( + dir=app.config['CELERY_WORK_DIR'], + prefix='report_',) + report_file = \ + os.path.join(report_dir, file_name) + with open(report_file, 'wb') as fp: + fp.write(file_data) + ## send job to celery worker + base_dir = \ + os.path.join( + app.config['REPORT_UPLOAD_PATH'], + 'predemult_reports') + _ = \ + async_load_predemult_report.\ + apply_async(args=[ + run_name, + samplesheet_tag, + report_file, + base_dir]) + return self.response(200, message=f'successfully submitted demult report loading job for {os.path.basename(report_file)}') except Exception as e: - logging.error(e) + log.error(e) + return self.response_500('failed to load file') + + +# def search_predemultiplexing_data(run_name, samplesheet_tag): +# try: +# result = \ +# db.session.\ +# query(PreDeMultiplexingData).\ +# filter(PreDeMultiplexingData.run_name==run_name).\ +# filter(PreDeMultiplexingData.samplesheet_tag==samplesheet_tag).\ +# one_or_none() +# return result +# except Exception as e: +# raise ValueError( +# "Failed to search pre demultiplexing data, error: {0}".\ +# format(e)) + + +# def add_predemultiplexing_data(data): +# try: +# if isinstance(data, bytes): +# data = json.loads(data.decode()) +# if isinstance(data, str): +# data = json.loads(data) +# flowcell_cluster_plot = data.get("flowcell_cluster_plot") +# if isinstance(flowcell_cluster_plot, dict): +# flowcell_cluster_plot = json.dumps(flowcell_cluster_plot) +# project_summary_table = data.get("project_summary_table") +# if isinstance(project_summary_table, dict): +# project_summary_table = json.dumps(project_summary_table) +# project_summary_plot = data.get("project_summary_plot") +# if isinstance(project_summary_plot, dict): +# project_summary_plot = json.dumps(project_summary_plot) +# sample_table = data.get("sample_table") +# if isinstance(sample_table, dict): +# sample_table = json.dumps(sample_table) +# sample_plot = data.get("sample_plot") +# if isinstance(sample_plot, dict): +# sample_plot = json.dumps(sample_plot) +# undetermined_table = data.get("undetermined_table") +# if isinstance(undetermined_table, dict): +# undetermined_table = json.dumps(undetermined_table) +# undetermined_plot = data.get("undetermined_plot") +# if isinstance(undetermined_plot, dict): +# undetermined_plot = json.dumps(undetermined_plot) +# predemult_data = \ +# PreDeMultiplexingData( +# run_name=data.get("run_name"), +# samplesheet_tag=data.get("samplesheet_tag"), +# flowcell_cluster_plot=flowcell_cluster_plot, +# project_summary_table=project_summary_table, +# project_summary_plot=project_summary_plot, +# sample_table=sample_table, +# sample_plot=sample_plot, +# undetermined_table=undetermined_table, +# undetermined_plot=undetermined_plot) +# try: +# db.session.add(predemult_data) +# db.session.flush() +# db.session.commit() +# except: +# db.session.rollback() +# raise +# except Exception as e: +# raise ValueError( +# "Failed to add de-multiplex data, error: {0}".\ +# format(e)) + +# def edit_predemultiplexing_data(data): +# try: +# if isinstance(data, bytes): +# data = json.loads(data.decode()) +# if isinstance(data, str): +# data = json.loads(data) +# if "run_name" not in data: +# raise ValueError("Missing run name") +# if "samplesheet_tag" not in data: +# raise ValueError("Missing sampleshheet tag") +# flowcell_cluster_plot = data.get("flowcell_cluster_plot") +# if flowcell_cluster_plot is not None and \ +# isinstance(flowcell_cluster_plot, dict): +# flowcell_cluster_plot = json.dumps(flowcell_cluster_plot) +# data.update({"flowcell_cluster_plot": flowcell_cluster_plot}) +# project_summary_table = data.get("project_summary_table") +# if project_summary_table is not None and \ +# isinstance(project_summary_table, dict): +# project_summary_table = json.dumps(project_summary_table) +# data.update({"project_summary_table": project_summary_table}) +# project_summary_plot = data.get("project_summary_plot") +# if project_summary_plot is not None and \ +# isinstance(project_summary_plot, dict): +# project_summary_plot = json.dumps(project_summary_plot) +# data.update({"project_summary_plot": project_summary_plot}) +# sample_table = data.get("sample_table") +# if sample_table is not None and \ +# isinstance(sample_table, dict): +# sample_table = json.dumps(sample_table) +# data.update({"sample_table": sample_table}) +# sample_plot = data.get("sample_plot") +# if sample_plot is not None and \ +# isinstance(sample_plot, dict): +# sample_plot = json.dumps(sample_plot) +# data.update({"sample_plot": sample_plot}) +# undetermined_table = data.get("undetermined_table") +# if undetermined_table is not None and \ +# isinstance(undetermined_table, dict): +# undetermined_table = json.dumps(undetermined_table) +# data.update({"undetermined_table": undetermined_table}) +# undetermined_plot = data.get("undetermined_plot") +# if undetermined_plot is not None and \ +# isinstance(undetermined_plot, dict): +# undetermined_plot = json.dumps(undetermined_plot) +# data.update({"undetermined_plot": undetermined_plot}) +# try: +# db.session.\ +# query(PreDeMultiplexingData).\ +# filter(PreDeMultiplexingData.run_name==data.get("run_name")).\ +# filter(PreDeMultiplexingData.samplesheet_tag==data.get("samplesheet_tag")).\ +# update(data) +# db.session.commit() +# except: +# db.session.rollback() +# raise +# except Exception as e: +# raise ValueError( +# "Failed to update de-multiplex data, error: {0}".\ +# format(e)) + + +# def add_or_edit_predemultiplexing_data(data): +# try: +# if isinstance(data, bytes): +# data = json.loads(data.decode()) +# if isinstance(data, str): +# data = json.loads(data) +# if "run_name" not in data: +# raise ValueError("Missing run name") +# if "samplesheet_tag" not in data: +# raise ValueError("Missing sampleshheet tag") +# result = \ +# search_predemultiplexing_data( +# run_name=data.get("run_name"), +# samplesheet_tag=data.get("samplesheet_tag")) +# if result is None: +# add_predemultiplexing_data(data=data) +# else: +# edit_predemultiplexing_data(data=data) +# except Exception as e: +# raise ValueError( +# "Failed to add or update de-multiplex data, error: {0}".\ +# format(e)) + + +# class PreDeMultiplexingDataApi(ModelRestApi): +# resource_name = "predemultiplexing_data" +# datamodel = SQLAInterface(PreDeMultiplexingData) + # @expose('/add_or_edit_report', methods=['POST']) + # @protect() + # def add_or_edit_demult_report(self): + # try: + # if not request.files: + # return self.response_400('No files') + # file_objs = request.files.getlist('file') + # file_obj = file_objs[0] + # file_obj.seek(0) + # json_data = file_obj.read() + # add_or_edit_predemultiplexing_data(data=json_data) + # return self.response(200, message='successfully added or updated demult data') + # except Exception as e: + # logging.error(e) diff --git a/app/pre_demultiplexing_view.py b/app/pre_demultiplexing_view.py index 7f8227c..1e66db2 100644 --- a/app/pre_demultiplexing_view.py +++ b/app/pre_demultiplexing_view.py @@ -20,87 +20,87 @@ class PreDeMultiplexingDataView(ModelView): 'run_name':'Sequencing run', 'samplesheet_tag':'Tag', 'date_stamp': 'Updated on', - 'report': 'De-multiplexing report'} + 'report': 'Report'} list_columns = [ 'run_name', 'samplesheet_tag', 'date_stamp', 'report'] - base_permissions = ['can_list', 'can_get_report'] + base_permissions = ['can_list']#, 'can_get_report'] base_order = ("date_stamp", "desc") - @expose('/predemult_report/') - @has_access - @cache.cached(timeout=600) - def get_report(self, id): - try: - (run_name, samplesheet_tag, flowcell_cluster_plot, project_summary_table, project_summary_plot, - sample_table, sample_plot, undetermined_table, undetermined_plot, date_stamp) = \ - get_pre_demultiplexing_data(demult_id=id) - flowcell_labels = flowcell_cluster_plot.get('labels') - total_cluster_raw = flowcell_cluster_plot.get('total_cluster_raw') - total_cluster_pf = flowcell_cluster_plot.get('total_cluster_pf') - total_yield = flowcell_cluster_plot.get('total_yield') - lanes = list(sample_table.keys()) - return \ - self.render_template( - 'demultiplexing_report.html', - run_name=run_name, - date_stamp=date_stamp, - flowcell_labels=flowcell_labels, - total_cluster_raw=total_cluster_raw, - total_cluster_pf=total_cluster_pf, - total_yield=total_yield, - project_summary_table=project_summary_table, - project_summary_plot=project_summary_plot, - sample_table=sample_table, - sample_plot=sample_plot, - undetermined_table=undetermined_table, - undetermined_plot=undetermined_plot, - lanes=lanes) - except Exception as e: - log.error(e) + # @expose('/predemult_report/') + # @has_access + # @cache.cached(timeout=600) + # def get_report(self, id): + # try: + # (run_name, samplesheet_tag, flowcell_cluster_plot, project_summary_table, project_summary_plot, + # sample_table, sample_plot, undetermined_table, undetermined_plot, date_stamp) = \ + # get_pre_demultiplexing_data(demult_id=id) + # flowcell_labels = flowcell_cluster_plot.get('labels') + # total_cluster_raw = flowcell_cluster_plot.get('total_cluster_raw') + # total_cluster_pf = flowcell_cluster_plot.get('total_cluster_pf') + # total_yield = flowcell_cluster_plot.get('total_yield') + # lanes = list(sample_table.keys()) + # return \ + # self.render_template( + # 'demultiplexing_report.html', + # run_name=run_name, + # date_stamp=date_stamp, + # flowcell_labels=flowcell_labels, + # total_cluster_raw=total_cluster_raw, + # total_cluster_pf=total_cluster_pf, + # total_yield=total_yield, + # project_summary_table=project_summary_table, + # project_summary_plot=project_summary_plot, + # sample_table=sample_table, + # sample_plot=sample_plot, + # undetermined_table=undetermined_table, + # undetermined_plot=undetermined_plot, + # lanes=lanes) + # except Exception as e: + # log.error(e) -def get_pre_demultiplexing_data(demult_id): - try: - result = \ - db.session.\ - query(PreDeMultiplexingData).\ - filter(PreDeMultiplexingData.demult_id==demult_id).\ - one_or_none() - run_name = '' - samplesheet_tag = '' - flowcell_cluster_plot = '' - project_summary_table = '' - project_summary_plot = '' - sample_table = '' - undetermined_table = '' - undetermined_plot = '' - if result is not None: - run_name = result.run_name - samplesheet_tag = result.samplesheet_tag - flowcell_cluster_plot = result.flowcell_cluster_plot - if isinstance(flowcell_cluster_plot, str): - flowcell_cluster_plot = json.loads(flowcell_cluster_plot) - project_summary_table = result.project_summary_table - project_summary_plot = result.project_summary_plot - if isinstance(project_summary_plot, str): - project_summary_plot = json.loads(project_summary_plot) - sample_table = result.sample_table - if isinstance(sample_table, str): - sample_table = json.loads(sample_table) - sample_plot = result.sample_plot - if isinstance(sample_plot, str): - sample_plot = json.loads(sample_plot) - undetermined_table = result.undetermined_table - if isinstance(undetermined_table, str): - undetermined_table = json.loads(undetermined_table) - undetermined_plot = result.undetermined_plot - if isinstance(undetermined_plot, str): - undetermined_plot = json.loads(undetermined_plot) - date_stamp = result.date_stamp - return run_name, samplesheet_tag, flowcell_cluster_plot, project_summary_table, project_summary_plot,\ - sample_table, sample_plot, undetermined_table, undetermined_plot, date_stamp - except: - raise \ No newline at end of file +# def get_pre_demultiplexing_data(demult_id): +# try: +# result = \ +# db.session.\ +# query(PreDeMultiplexingData).\ +# filter(PreDeMultiplexingData.demult_id==demult_id).\ +# one_or_none() +# run_name = '' +# samplesheet_tag = '' +# flowcell_cluster_plot = '' +# project_summary_table = '' +# project_summary_plot = '' +# sample_table = '' +# undetermined_table = '' +# undetermined_plot = '' +# if result is not None: +# run_name = result.run_name +# samplesheet_tag = result.samplesheet_tag +# flowcell_cluster_plot = result.flowcell_cluster_plot +# if isinstance(flowcell_cluster_plot, str): +# flowcell_cluster_plot = json.loads(flowcell_cluster_plot) +# project_summary_table = result.project_summary_table +# project_summary_plot = result.project_summary_plot +# if isinstance(project_summary_plot, str): +# project_summary_plot = json.loads(project_summary_plot) +# sample_table = result.sample_table +# if isinstance(sample_table, str): +# sample_table = json.loads(sample_table) +# sample_plot = result.sample_plot +# if isinstance(sample_plot, str): +# sample_plot = json.loads(sample_plot) +# undetermined_table = result.undetermined_table +# if isinstance(undetermined_table, str): +# undetermined_table = json.loads(undetermined_table) +# undetermined_plot = result.undetermined_plot +# if isinstance(undetermined_plot, str): +# undetermined_plot = json.loads(undetermined_plot) +# date_stamp = result.date_stamp +# return run_name, samplesheet_tag, flowcell_cluster_plot, project_summary_table, project_summary_plot,\ +# sample_table, sample_plot, undetermined_table, undetermined_plot, date_stamp +# except: +# raise \ No newline at end of file diff --git a/app/templates/iframe.html b/app/templates/iframe.html index 8429a5b..2a47e6b 100644 --- a/app/templates/iframe.html +++ b/app/templates/iframe.html @@ -1,6 +1,6 @@ {% extends "appbuilder/base.html" %} {% block content %} -

Go Back to project home

+

Go Back to previous page

{% endblock %} \ No newline at end of file diff --git a/config.py b/config.py index c45e70d..7ab99bf 100644 --- a/config.py +++ b/config.py @@ -108,6 +108,9 @@ # Setup image size default is (300, 200, True) # IMG_SIZE = (300, 200, True) +## report upload folder +REPORT_UPLOAD_PATH = "/static/reports/" + # Theme configuration # these are located on static/appbuilder/css/themes # you can create your own and easily use them placing them on the same dir structure to override diff --git a/docker-compose.yaml b/docker-compose.yaml index 11e1840..7b20005 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -14,7 +14,7 @@ services: ports: - "3306:3306" volumes: - - /home/vmuser/mysqlappdb_migration:/var/lib/mysql:rw + - /home/vmuser/mysqlappdb3:/var/lib/mysql:rw container_name: portal_db networks: - portal_network diff --git a/static/predemult/bclconvert_report_v0.03.html b/static/predemult/bclconvert_report_v0.03.html new file mode 100644 index 0000000..d17837d --- /dev/null +++ b/static/predemult/bclconvert_report_v0.03.html @@ -0,0 +1,15844 @@ + + + + + +bclconvert_report_v0.03 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/config_test.py b/tests/config_test.py index 5fa7217..c7fd31f 100644 --- a/tests/config_test.py +++ b/tests/config_test.py @@ -7,6 +7,7 @@ WTF_CSRF_ENABLED = False AUTH_ROLE_ADMIN = "Admin" AUTH_USER_REGISTRATION_ROLE = "Admin" +REPORT_UPLOAD_PATH = "/tmp" SQLALCHEMY_DATABASE_URI = \ os.environ.get("SQLALCHEMY_DATABASE_URI", "sqlite:///" + '/tmp/app.db') AUTH_ROLES_MAPPING = { diff --git a/tests/conftest.py b/tests/conftest.py index f676a8c..cc46d45 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -192,6 +192,10 @@ def test_client(db): admin_role, app.appbuilder.sm.add_permission_view_menu( "can_submit_cleanup_job", "MetadataLoadApi")) + app.appbuilder.sm.add_permission_role( + admin_role, + app.appbuilder.sm.add_permission_view_menu( + "can_add_report", "PreDeMultiplexingDataApi")) user = \ app.appbuilder.sm.find_user(email="admin@fab.org") if user is None: diff --git a/tests/test_apis.py b/tests/test_apis.py index 0d53933..1a48da7 100644 --- a/tests/test_apis.py +++ b/tests/test_apis.py @@ -4,10 +4,10 @@ from app.interop_data_api import add_interop_data from app.interop_data_api import edit_interop_data from app.interop_data_api import add_or_edit_interop_data -from app.pre_demultiplexing_data_api import search_predemultiplexing_data -from app.pre_demultiplexing_data_api import add_predemultiplexing_data -from app.pre_demultiplexing_data_api import edit_predemultiplexing_data -from app.pre_demultiplexing_data_api import add_or_edit_predemultiplexing_data +# from app.pre_demultiplexing_data_api import search_predemultiplexing_data +# from app.pre_demultiplexing_data_api import add_predemultiplexing_data +# from app.pre_demultiplexing_data_api import edit_predemultiplexing_data +# from app.pre_demultiplexing_data_api import add_or_edit_predemultiplexing_data class TestApiCase(unittest.TestCase): def setUp(self): @@ -73,12 +73,12 @@ def test_add_or_edit_interop_data(self): search_interop_for_run(run_name=run_name) self.assertEqual(result.table_data, "AAAAA") - def test_search_predemultiplexing_data(self): - result = \ - search_predemultiplexing_data( - run_name="AAAA", - samplesheet_tag="BBBB") - self.assertTrue(result is None) + # def test_search_predemultiplexing_data(self): + # result = \ + # search_predemultiplexing_data( + # run_name="AAAA", + # samplesheet_tag="BBBB") + # self.assertTrue(result is None) # def test_add_predemultiplexing_data(self): # result = \ diff --git a/tests/test_pre_demultiplexing_view.py b/tests/test_pre_demultiplexing_view.py index 34c741d..eb63b26 100644 --- a/tests/test_pre_demultiplexing_view.py +++ b/tests/test_pre_demultiplexing_view.py @@ -1,32 +1,32 @@ -import unittest, json -from app import appbuilder, db -from app.pre_demultiplexing_data_api import search_predemultiplexing_data -from app.pre_demultiplexing_data_api import add_predemultiplexing_data -from app.pre_demultiplexing_view import get_pre_demultiplexing_data +# import unittest, json +# from app import appbuilder, db +# from app.pre_demultiplexing_data_api import search_predemultiplexing_data +# from app.pre_demultiplexing_data_api import add_predemultiplexing_data +# from app.pre_demultiplexing_view import get_pre_demultiplexing_data -class TestPreDemultView(unittest.TestCase): - def setUp(self): - db.create_all() - self.demult_file = "data/demultiplexing_example.json" +# class TestPreDemultView(unittest.TestCase): +# def setUp(self): +# db.create_all() +# self.demult_file = "data/demultiplexing_example.json" - def tearDown(self): - db.drop_all() +# def tearDown(self): +# db.drop_all() - def test_get_pre_demultiplexing_data(self): - with open(self.demult_file, 'r') as fp: - json_data = json.load(fp) - add_predemultiplexing_data(data=json_data) - result = \ - search_predemultiplexing_data( - run_name="AAAA", - samplesheet_tag="BBBB") - self.assertTrue(result is not None) - (run_name, samplesheet_tag, flowcell_cluster_plot, project_summary_table, project_summary_plot,\ - sample_table, sample_plot, undetermined_table, undetermined_plot, date_stamp) = \ - get_pre_demultiplexing_data(demult_id=1) - self.assertEqual(run_name,"AAAA") - self.assertTrue("plot1" in flowcell_cluster_plot) - self.assertEqual(flowcell_cluster_plot.get("plot1"), "data1") +# def test_get_pre_demultiplexing_data(self): +# with open(self.demult_file, 'r') as fp: +# json_data = json.load(fp) +# add_predemultiplexing_data(data=json_data) +# result = \ +# search_predemultiplexing_data( +# run_name="AAAA", +# samplesheet_tag="BBBB") +# self.assertTrue(result is not None) +# (run_name, samplesheet_tag, flowcell_cluster_plot, project_summary_table, project_summary_plot,\ +# sample_table, sample_plot, undetermined_table, undetermined_plot, date_stamp) = \ +# get_pre_demultiplexing_data(demult_id=1) +# self.assertEqual(run_name,"AAAA") +# self.assertTrue("plot1" in flowcell_cluster_plot) +# self.assertEqual(flowcell_cluster_plot.get("plot1"), "data1") -if __name__ == '__main__': - unittest.main() \ No newline at end of file +# if __name__ == '__main__': +# unittest.main() \ No newline at end of file diff --git a/tests/test_predemult_api.py b/tests/test_predemult_api.py new file mode 100644 index 0000000..94c396e --- /dev/null +++ b/tests/test_predemult_api.py @@ -0,0 +1,88 @@ +import json +import os +import tempfile +from io import BytesIO +from app.models import PreDeMultiplexingData +from app.pre_demultiplexing_data_api import ( + load_predemult_report, + async_load_predemult_report, + PreDeMultiplexingDataApi) +from flask_appbuilder.const import ( + API_SECURITY_PASSWORD_KEY, + API_SECURITY_PROVIDER_KEY, + API_SECURITY_REFRESH_KEY, + API_SECURITY_USERNAME_KEY) + +def test_load_predemult_report(db, tmp_path): + temp_report_dir = \ + tempfile.mkdtemp(dir=tmp_path) + temp_base_dir = \ + tempfile.mkdtemp(dir=tmp_path) + # Create a dummy report + temp_report_path = os.path.join(temp_report_dir, 'report.html') + with open(temp_report_path, 'w') as fp: + fp.write('

Its as test report

') + load_predemult_report( + run_name='test1', + tag_name='test 1', + file_path=temp_report_path, + base_path=temp_base_dir) + # check if its loaded + record = db.session.query(PreDeMultiplexingData).filter_by(run_name='test1').first() + assert record is not None + assert record.run_name == 'test1' + assert record.samplesheet_tag == 'test 1' + assert os.path.basename(record.file_path) == 'report.html' + assert os.path.exists(record.file_path) + assert record.file_path != temp_report_path + assert temp_base_dir in record.file_path + +def test_async_load_predemult_report(db, tmp_path): + temp_report_dir = \ + tempfile.mkdtemp(dir=tmp_path) + temp_base_dir = \ + tempfile.mkdtemp(dir=tmp_path) + # Create a dummy report + temp_report_path = os.path.join(temp_report_dir, 'report.html') + with open(temp_report_path, 'w') as fp: + fp.write('

Its as test report

') + async_load_predemult_report( + run_name='test1', + tag_name='test 1', + file_path=temp_report_path, + base_path=temp_base_dir) + # check if its loaded + record = db.session.query(PreDeMultiplexingData).filter_by(run_name='test1').first() + assert record is not None + assert record.run_name == 'test1' + assert record.samplesheet_tag == 'test 1' + assert os.path.basename(record.file_path) == 'report.html' + assert os.path.exists(record.file_path) + assert record.file_path != temp_report_path + assert temp_base_dir in record.file_path + +def test_PreDeMultiplexingDataApi1(db, test_client, tmp_path): + res = \ + test_client.post( + "/api/v1/security/login", + json={ + API_SECURITY_USERNAME_KEY: "admin", + API_SECURITY_PASSWORD_KEY: "password", + API_SECURITY_PROVIDER_KEY: "db"}) + assert res.status_code == 200 + token = \ + json.loads(res.data.decode("utf-8")).\ + get("access_token") + # temp_base_dir = \ + # tempfile.mkdtemp(dir=tmp_path) + # app.config['REPORT_UPLOAD_PATH'] = temp_base_dir + report_file_data = \ + BytesIO(b'

Its as test report

') + res = \ + test_client.post( + '/api/v1/predemultiplexing_data/add_report', + data=dict(file=(report_file_data, 'report.html'),run_name="test1",samplesheet_tag="test 1"), + headers={"Authorization": f"Bearer {token}"}, + content_type='multipart/form-data') + assert res.status_code == 200 + assert json.loads(res.data.decode('utf-8')).get("message") == 'successfully submitted demult report loading job for report.html'