Skip to content

Commit

Permalink
issue #51: Implement Ricky's review
Browse files Browse the repository at this point in the history
issue #51: change mermaid theme
  • Loading branch information
Maxence Guindon committed Apr 8, 2024
1 parent 58b4d44 commit 50b2e1b
Show file tree
Hide file tree
Showing 5 changed files with 10 additions and 129 deletions.
6 changes: 2 additions & 4 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,7 @@
// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
"image": "mcr.microsoft.com/devcontainers/python:1-3.11-bullseye",
"features": {
"ghcr.io/devcontainers/features/azure-cli:1": {},
"ghcr.io/devcontainers/features/docker-in-docker:2": {},
"ghcr.io/devcontainers/features/github-cli:1": {}
"ghcr.io/devcontainers/features/azure-cli:1": {}
},

// Features to add to the dev container. More info: https://containers.dev/features.
Expand All @@ -27,7 +25,7 @@
"GitHub.vscode-pull-request-github",
"ms-python.python",
"ms-python.black-formatter",
"stkb.rewrap",
"stkb.rewrap"
]
}
}
Expand Down
14 changes: 7 additions & 7 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,9 +151,9 @@ async def inference_request():
The image and inference results are uploaded to a folder in the user's container.
"""

seconds = time.perf_counter() # transform into logging
seconds = time.perf_counter() # TODO: transform into logging
try:
print(f"{date.today()} Entering inference request") # Transform into logging
print(f"{date.today()} Entering inference request") # TODO: Transform into logging
data = await request.get_json()
pipeline_name = data.get("model_name")
folder_name = data["folder_name"]
Expand Down Expand Up @@ -190,16 +190,16 @@ async def inference_request():
image_bytes = base64.b64encode(blob).decode("utf8")

# Keep track of every output given by the models
# TO DO add it to CACHE variable
# TODO: add it to CACHE variable
cache_json_result = [image_bytes]

for idx, model in enumerate(pipelines_endpoints.get(pipeline_name)):
print(f"Entering {model.name.upper()} model") # Transform into logging
print(f"Entering {model.name.upper()} model") # TODO: Transform into logging
result_json = await model.entry_function(model, cache_json_result[idx])
cache_json_result.append(result_json)

print("End of inference request") # Transform into logging
print("Process results") # Transform into logging
print("End of inference request") # TODO: Transform into logging
print("Process results") # TODO: Transform into logging

processed_result_json = await inference.process_inference_results(
cache_json_result[-1], imageDims
Expand All @@ -216,7 +216,7 @@ async def inference_request():
hash_value,
)
# return the inference results to the client
print(f"Took: {'{:10.4f}'.format(time.perf_counter() - seconds)} seconds")
print(f"Took: {'{:10.4f}'.format(time.perf_counter() - seconds)} seconds") # TODO: Transform into logging
return jsonify(processed_result_json), 200

except (KeyError, InferenceRequestError) as error:
Expand Down
4 changes: 1 addition & 3 deletions docs/nachet-inference-documentation.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,11 +57,9 @@ to a model and receive the result.

*Suggestion: we could call the pipeline a method if we don't want to mix terms.*

## Sequence Diagram for inference request 1.2.1

```mermaid
sequenceDiagram
title: Sequence Diagram for inference request 1.2.1
actor Client
participant Frontend
participant Backend
Expand Down
113 changes: 0 additions & 113 deletions pipelines_version_insertion.py

This file was deleted.

2 changes: 0 additions & 2 deletions tests/test_health_request.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,6 @@ def test_health(self):
response = asyncio.run(
test.get('/health')
)

print(response.status_code == 200)
self.assertEqual(response.status_code, 200)

if __name__ == '__main__':
Expand Down

0 comments on commit 50b2e1b

Please sign in to comment.