Skip to content

Commit

Permalink
Merge pull request #136 from mwang87/test_refactor
Browse files Browse the repository at this point in the history
refactoring integration test
  • Loading branch information
mwang87 authored Jan 2, 2020
2 parents 0813a76 + ffa503b commit e48684f
Show file tree
Hide file tree
Showing 6 changed files with 105 additions and 92 deletions.
1 change: 1 addition & 0 deletions code/docker-compose-production.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ services:
LETSENCRYPT_HOST: redu.ucsd.edu
#LETSENCRYPT_HOST: mingwangbeta.ucsd.edu
LETSENCRYPT_EMAIL: [email protected]
command: /app/run_production_server.sh

networks:
nginx-net:
Expand Down
2 changes: 1 addition & 1 deletion code/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ services:
- ../database/:/app/database
- ../temp/:/app/temp
- ../logs/:/app/logs
command: /app/run_production_server.sh
command: /app/run_server.sh

redu-ms2-populate:
build: .
Expand Down
53 changes: 28 additions & 25 deletions code/templates/metadataselection.html
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,6 @@
(% block content %)

<div id="app-4" class="container-fluid justify-content-center align-items-center">
<!-- <div class="row">
<div class="col-sm-1"></div>
<div class="col-sm text-center">
<h2>Metadata Analysis Selection Dashboard</h2>
</div>
<div class="col-sm-1"></div>
</div> -->

<br>

<div class="row">
Expand All @@ -28,7 +20,6 @@ <h2>File Selection Interface</h2>
<h3>Selection Summary</h3>
</div>

<!-- <analysis-gobutton></analysis-gobutton> -->
<analysis-display-selection></analysis-display-selection>
</div>

Expand Down Expand Up @@ -158,11 +149,11 @@ <h3>Re-analysis of Public Data in GNPS</h3>
},
methods: {
clickMe: function(event) {
//$("#loadMe").modal({
// backdrop: "static", //remove ability to close modal with click
// keyboard: false, //remove option to close with keyboard
// show: true //Display loader!
// });
$("#loadMe").modal({
backdrop: "static", //remove ability to close modal with click
keyboard: false, //remove option to close with keyboard
show: true //Display loader!
});

$.ajax({
url: "/attribute/" + this.$props["attributename"] + "/attributeterms",
Expand Down Expand Up @@ -481,16 +472,27 @@ <h3>Re-analysis of Public Data in GNPS</h3>
total_number_of_files = this.$parent.groupG1.length + this.$parent.groupG2.length + this.$parent.groupG3.length + this.$parent.groupG4.length + this.$parent.groupG5.length + this.$parent.groupG6.length
unique_list_files = extractFilenamesToList(this.$parent.groupG1.concat(this.$parent.groupG2, this.$parent.groupG3, this.$parent.groupG4, this.$parent.groupG5, this.$parent.groupG6));
unique_list_files = Array.from(new Set(unique_list_files));

$("#loadMe").modal({
backdrop: "static", //remove ability to close modal with click
keyboard: false, //remove option to close with keyboard
show: true //Display loader!
});

$.post( "/processcomparemultivariate", {files: JSON.stringify(unique_list_files)})
.done(function(data)
{
var win = window.open();
win.document.write(data);
win.document.close();
win.focus();
});

$.ajax( {
type: "POST",
url: "/processcomparemultivariate",
data: {
files: JSON.stringify(unique_list_files)
},
success: function(data){
$("#loadMe").modal("hide");
//document.write(data)
//TODO: Implement this
alert("Implement Me")
}
});

}

},
Expand All @@ -499,10 +501,11 @@ <h3>Re-analysis of Public Data in GNPS</h3>
<br> \
<button class="btn btn-primary btn-block" v-on:click="setuplibrarysearch">Set Up Co-Analysis with GNPS Library Search</button> \
<br> \
<button class = "btn btn-primary btn-block" v-on:click="setuppca">Launch PCA of Selected Files</button> \
<br> \
</div>'

//<button class = "btn btn-primary btn-block" v-on:click="setuppca">Launch PCA of Selected Files</button> \
//<br> \


})

Expand Down
28 changes: 18 additions & 10 deletions code/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ def resolve_ontology(attribute, term):
url = "https://www.ebi.ac.uk/ols/api/ontologies/doid/terms?iri=http://purl.obolibrary.org/obo/%s" % (term.replace(":", "_"))
try:
ontology_json = requests.get(url).json()
#print(json.dumps(ontology_json))
return ontology_json["_embedded"]["terms"][0]["label"]
except KeyboardInterrupt:
raise
Expand All @@ -49,13 +48,12 @@ def resolve_ontology(attribute, term):

if attribute == "ATTRIBUTE_DatasetAccession":
try:
url = "https://massive.ucsd.edu/ProteoSAFe/proxi/datasets?resultType=full&accession=%s" % (term)
url = f"https://massive.ucsd.edu/ProteoSAFe//proxi/v0.1/datasets?filter={term}&function=datasets"
dataset_information = requests.get(url).json()
return dataset_information[0]["title"]
return dataset_information["title"]
except:
raise
return term

#raise Exception(url)

return term

Expand Down Expand Up @@ -276,7 +274,7 @@ def viewattributeterms(attribute):
attribute_db = Attribute.select().where(Attribute.categoryname == attribute)
all_terms_db = AttributeTerm.select().join(FilenameAttributeConnection).where(FilenameAttributeConnection.attribute == attribute_db).group_by(AttributeTerm.term)

filters_list = json.loads(request.args['filters'])
filters_list = json.loads(request.values.get('filters', "[]"))

output_list = []

Expand Down Expand Up @@ -673,13 +671,24 @@ def analyzelibrarysearch():
import redu_pca
import config

#This displays global PCoA of public data as a web url
@app.route("/displayglobalmultivariate", methods = ["GET"])
def displayglobalmultivariate():
def displayglobalmultivariate():
if not os.path.isfile(config.PATH_TO_ORIGINAL_PCA):
print("Missing Global PCA Calculation, Calculating")
if not os.path.isfile(config.PATH_TO_GLOBAL_OCCURRENCES):
#Get the actual all identifictions file
import urllib.request as request
from contextlib import closing
import shutil

with closing(request.urlopen('ftp://massive.ucsd.edu/MSV000084206/other/ReDU_all_identifications.tsv')) as r:
with open(config.PATH_TO_GLOBAL_OCCURRENCES, 'wb') as f:
shutil.copyfileobj(r, f)

redu_pca.calculate_master_projection(config.PATH_TO_GLOBAL_OCCURRENCES)

print("Begin Getting Global PCA")
print("Begin Getting Global PCA")
df_temp = pd.read_csv(config.PATH_TO_ORIGINAL_PCA)
full_file_list = df_temp["Unnamed: 0"].tolist()
df_temp.drop("Unnamed: 0", axis = 1, inplace = True)
Expand All @@ -696,8 +705,7 @@ def displayglobalmultivariate():

return send_file("./tempuploads/global/index.html")

#from line_profiler import LineProfiler

###TODO: What does this do?
@app.route('/processcomparemultivariate', methods=['GET', 'POST'])
def processcomparemultivariate():
#determine if it's a recalculation of data
Expand Down
2 changes: 2 additions & 0 deletions test-production-integration/run_local_test.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
export SERVER_URL="http://localhost:5005"
nose2 -v
111 changes: 55 additions & 56 deletions test-production-integration/test_redu_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,53 +3,28 @@
import requests
import json

BASE_URL = "https://redu.ucsd.edu/"
SERVER_URL = os.environ.get("SERVER_URL", "https://redu.ucsd.edu")
SAMPLE_TASK_ID = "ffa003f6c4d844188f1f751d34c649b0"
TEST_COMPOUND = "2,5-Dimethoxyphenethylamine"

def test_heartbeat():
url = f"https://{BASE_URL}/heartbeat"
url = f"{SERVER_URL}/heartbeat"
r = requests.get(url)
r.raise_for_status()


def test_pca_library_search():
query_url = BASE_URL + "processcomparemultivariate?task={}".format("f39c94cb7afe4568950bf61cdb8fee0d")
r = requests.get(query_url)
r.raise_for_status()

return 0

def test_pca_metabolomics_snets():
query_url = BASE_URL + "processcomparemultivariate?task={}".format("1ad7bc366aef45ce81d2dfcca0a9a5e7")
r = requests.get(query_url)
r.raise_for_status()

return 0

def test_pca_feature_based():
query_url = BASE_URL + "processcomparemultivariate?task={}".format("bb49a839face44cbb5ec3e6f855e7285")
r = requests.get(query_url)
r.raise_for_status()

return 0


def test_data_dump():
query_url = BASE_URL + "dump"
query_url = f"{SERVER_URL}/dump"
response = requests.get(query_url)
data = response.content
file_size = sys.getsizeof(data)

if file_size < 17762000:
return 1

return 0
assert(file_size > 17762000)

def test_attribute_filtration():
attribute = "ATTRIBUTE_DatasetAccession"
url_2 = 'attribute/%s/attributeterms?filters=%%5B%%5D' %attribute
query_url = BASE_URL + url_2
query_url = f"{SERVER_URL}/attribute/{attribute}/attributeterms?filters=[]"
response = requests.get(query_url)
response.raise_for_status()

Expand All @@ -63,21 +38,30 @@ def test_attribute_filtration():
#return 0


def test_attribute_terms_display():
query_url = BASE_URL + "/attributes"
def test_attribute_terms_fields():
query_url = f"{SERVER_URL}/attributes"
response = requests.get(query_url)
data = json.loads(response.content)
data = response.json()
key_value = list(data[0].keys())

expected_keys = ["attributename", "attributedisplay", "countterms"]

if (key_value != expected_keys):
return 1
assert(key_value == expected_keys)

return 0

def test_attribute_list():
query_url = f"{SERVER_URL}/attributes"
r = requests.get(query_url)
all_attributes = r.json()
for attribute in all_attributes:
attribute_name = attribute["attributename"]
query_url = f"{SERVER_URL}/attribute/{attribute_name}/attributeterms?filters=[]"
response = requests.get(query_url)
response.raise_for_status()


def test_file_enrichment():
query_url = BASE_URL + "compoundfilename"
query_url = f"{SERVER_URL}/compoundfilename"
params = {'compoundname' : TEST_COMPOUND}
response = requests.get(query_url, params = params)
data = json.loads(response.content)
Expand All @@ -90,38 +74,53 @@ def test_file_enrichment():
return 0

def test_compound_enrichment():
query_url = BASE_URL + "compoundenrichment"
query_url = f"{SERVER_URL}/compoundenrichment"
params = {'compoundname' : TEST_COMPOUND}
response = requests.post(query_url, params )
data = json.loads(response.content)
key_value = list(data[0].keys())

expected_keys = ["attribute_name", "attribute_term", "totalfiles", "compoundfiles", "percentage"]

if key_value != expected_keys:
return 1

return 0

assert(expected_keys == key_value)


def test_pca_library_search():
query_url = f"{SERVER_URL}/processcomparemultivariate?task=f39c94cb7afe4568950bf61cdb8fee0d"
r = requests.get(query_url)
r.raise_for_status()

def test_pca_metabolomics_snets():
query_url = f"{SERVER_URL}/processcomparemultivariate?task=1ad7bc366aef45ce81d2dfcca0a9a5e7"
r = requests.get(query_url)
r.raise_for_status()

def test_pca_feature_based():
query_url = f"{SERVER_URL}/processcomparemultivariate?task=bb49a839face44cbb5ec3e6f855e7285"
r = requests.get(query_url)
r.raise_for_status()

def test_your_pca():
params = {'task': SAMPLE_TASK_ID}
query_url = BASE_URL + "processcomparemultivariate"
query_url = f"{SERVER_URL}/processcomparemultivariate"
response = requests.get(query_url, params = params)
data = response.content
file_size = sys.getsizeof(data)

if (file_size < 28000000):
return 1

return 0
file_size = sys.getsizeof(data)

assert(file_size > 22000000)


def test_global_pca():
response = requests.get(BASE_URL + "displayglobalmultivariate")
query_url = f"{SERVER_URL}/displayglobalmultivariate"
response = requests.get(query_url)
data = response.content
file_size = sys.getsizeof(data)
file_size = sys.getsizeof(data)

if (file_size < 27760000):
return 1

return 0
assert(file_size > 22760000)


def testing_massive_api():
url = "https://massive.ucsd.edu/ProteoSAFe//proxi/v0.1/datasets?filter=MSV000084741&function=datasets"
r = requests.get(url)
r.json()
r.raise_for_status()

0 comments on commit e48684f

Please sign in to comment.