Skip to content

Commit

Permalink
Fixes tests #85
Browse files Browse the repository at this point in the history
  • Loading branch information
sylvanie85 committed May 31, 2024
1 parent 19f463e commit cd422e0
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 197 deletions.
114 changes: 8 additions & 106 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,6 @@ async def before_serving():
# Store the seeds names and ml structure in CACHE
CACHE["seeds"] = datastore.get_all_seeds_names()
seeds = datastore.get_all_seeds()

Check failure on line 171 in app.py

View workflow job for this annotation

GitHub Actions / lint-test / lint-test

Ruff (F841)

app.py:171:9: F841 Local variable `seeds` is assigned to but never used
print(jsonify(seeds))
CACHE["endpoints"] = await get_pipelines()

print(
Expand Down Expand Up @@ -398,6 +397,8 @@ async def inference_request():
picture_id = await datastore.get_picture_id(
cursor, user_id, image_hash_value, container_client
)
# Close connection
datastore.end_query(connection, cursor)

pipeline = pipelines_endpoints.get(pipeline_name)

Expand All @@ -423,8 +424,14 @@ async def inference_request():
result_json_string,
image_hash_value,
)

# Open db connection
connection = datastore.get_connection()
cursor = datastore.get_cursor(connection)

saved_result_json = await datastore.save_inference_result(cursor, user_id, processed_result_json[0], picture_id, pipeline_name, 1)

# Close connection
datastore.end_query(connection, cursor)

# return the inference results to the client
Expand All @@ -435,47 +442,6 @@ async def inference_request():
print(error)
return jsonify(["InferenceRequestError: " + error.args[0]]), 400

@app.get("/picture-form")
async def get_picture_form_info():
"""
Retrieves the names of seeds from the database and returns them as a JSON
response.
Returns:
A JSON response containing the names of seeds.
Raises:
APIErrors: If there is an error while retrieving the seeds names from
the database.
"""
try:
seeds_names = datastore.get_all_seeds_names()
return jsonify(seeds_names), 200
except datastore.DatastoreError as error:
return jsonify([error.args[0]]), 400

@app.put("/upload-pictures")
async def picture_batch_import():
"""
This function handles the batch import of pictures.
It performs the following steps:
1. Uploads and chunks the file.
2. Reconstructs the file and extracts data.
3. Validates and uploads the data.
Returns:
- If successful, returns a JSON response with the picture ID and a status code of 200.
- If an error occurs, returns a JSON response with the error message and a status code of 400.
"""
try:
temp_files = await upload_and_chunk_file(request)
email, picture_set, data = reconstruct_file_and_extract_data(temp_files)
picture_id = validate_and_upload_data(email, picture_set, data)
return jsonify([picture_id]), 200
except APIErrors as error:
return jsonify([error.args[0]]), 400

@app.get("/seed-data/<seed_name>")
async def get_seed_data(seed_name):
"""
Expand Down Expand Up @@ -517,7 +483,6 @@ async def get_seeds():
Returns JSON containing the model seeds metadata
"""
seeds = await datastore.get_all_seeds()
print(jsonify(seeds))
if seeds :
return jsonify(seeds), 200
else:
Expand Down Expand Up @@ -559,69 +524,6 @@ async def record_model(pipeline: namedtuple, result: list):
result[0]["models"] = new_entry
return json.dumps(result, indent=4)

async def upload_and_chunk_file(request):
"""
Uploads a file and chunks it into smaller parts.
Args:
request: The request object containing the file to be uploaded.
Returns:
A list of file paths representing the chunks of the uploaded file.
"""
temp_dir = tempfile.TemporaryDirectory()

upload_stream = await request.stream()
chunk_filename = os.path.join(temp_dir.name, f"chunk_{len(os.listdir(temp_dir.name))}")
with open(chunk_filename, "wb") as chunk_file:
async for chunk in upload_stream:
chunk_file.write(chunk)

return [os.path.join(temp_dir.name, f) for f in os.listdir(temp_dir.name)]

def reconstruct_file_and_extract_data(temp_files):
"""
Reconstructs a file from multiple chunks and extracts data from it.
Args:
temp_files (list): A list of file paths to the temporary chunk files.
Returns:
tuple: A tuple containing the extracted email, picture_set, and the original data.
"""
full_file = b''
for chunk_filename in temp_files:
with open(chunk_filename, "rb") as chunk_file:
full_file += chunk_file.read()
data = json.loads(full_file)
email = data.get("email")
picture_set = data.get("picture_set")
return email, picture_set, data

def validate_and_upload_data(email, picture_set, data):
"""
Validates the input parameters and uploads the picture set data to the database.
Args:
email (str): The user's email address.
picture_set (list): The list of pictures in the picture set.
data (dict): Additional data for the picture set.
Returns:
int: The ID of the uploaded picture set.
Raises:
EmailNotSendError: If the user email is not provided.
EmptyPictureSetError: If no picture set is provided.
"""
if email is None:
raise EmailNotSendError("the user email is not provided")
if not picture_set:
raise EmptyPictureSetError("no picture set provided")
user_id = datastore.validate_user(email)
picture_id = datastore.upload_picture_set(user_id=user_id, **data)
return picture_id

async def fetch_json(repo_URL, key, file_path):
"""
Fetches JSON document from a GitHub repository.
Expand Down
4 changes: 0 additions & 4 deletions storage/datastore_storage_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,10 +117,6 @@ async def get_pipelines() -> list:
raise GetPipelinesError(error.args[0])

async def save_inference_result(cursor, user_id:str, inference_dict, picture_id:str, pipeline_id:str, type:int):
nb_object = int(inference_dict["totalBoxes"])
for box_index in range(nb_object):
print(inference_dict["boxes"][box_index]["label"])
print(get_all_seeds())
return await datastore.register_inference_result(cursor, user_id, inference_dict, picture_id, pipeline_id, type)

async def save_perfect_feedback(inference_id:str, user_id:str):
Expand Down
85 changes: 0 additions & 85 deletions tests/test_image_batch_import.py

This file was deleted.

8 changes: 6 additions & 2 deletions tests/test_inference_request.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def tearDown(self) -> None:
self.image_src = None
self.test = None

@patch("azure_storage.azure_storage_api.mount_container")
@patch("storage.azure_storage_api.mount_container")
def test_inference_request_successful(self, mock_container):
# Mock azure client services
mock_blob = Mock()
Expand Down Expand Up @@ -78,6 +78,7 @@ def test_inference_request_successful(self, mock_container):
"Access-Control-Allow-Origin": "*",
},
json={
"userId":"3e4d7d70-68d2-4302-a377-a869f1fd455e",
"image": self.image_header + self.image_src,
"imageDims": [720,540],
"folder_name": self.folder_name,
Expand All @@ -94,7 +95,7 @@ def test_inference_request_successful(self, mock_container):
print(expected_keys == responses)
self.assertEqual(responses, expected_keys)

@patch("azure_storage.azure_storage_api.mount_container")
@patch("storage.azure_storage_api.mount_container")
def test_inference_request_unsuccessfull(self, mock_container):
# Mock azure client services
mock_blob = Mock()
Expand Down Expand Up @@ -123,6 +124,7 @@ def test_inference_request_unsuccessfull(self, mock_container):
"Access-Control-Allow-Origin": "*",
},
json={
"userId":"3e4d7d70-68d2-4302-a377-a869f1fd455e",
"image": self.image_header,
"imageDims": [720,540],
"folder_name": self.folder_name,
Expand All @@ -140,6 +142,7 @@ def test_inference_request_missing_argument(self):
expected = ("InferenceRequestError: missing request arguments: either folder_name, container_name, imageDims or image is missing")

data = {
"userId":"3e4d7d70-68d2-4302-a377-a869f1fd455e",
"image": self.image_header,
"imageDims": [720,540],
"folder_name": self.folder_name,
Expand Down Expand Up @@ -189,6 +192,7 @@ def test_inference_request_wrong_pipeline_name(self):
"Access-Control-Allow-Origin": "*",
},
json={
"userId":"3e4d7d70-68d2-4302-a377-a869f1fd455e",
"image": self.image_src,
"imageDims": [720,540],
"folder_name": self.folder_name,
Expand Down

0 comments on commit cd422e0

Please sign in to comment.