Skip to content

Commit

Permalink
readd gif lfs pointers for models
Browse files Browse the repository at this point in the history
  • Loading branch information
yrobla committed Dec 2, 2024
1 parent 56d4d93 commit f75ebec
Show file tree
Hide file tree
Showing 5 changed files with 30 additions and 22 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/image-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ on:
branches:
- main
schedule:
# Weekdays at noon GMT
- cron: '00 12 * * 1-5'
# Once weekly on fridays at noon
- cron: '00 12 * * 5'
# Allow for manually triggering the workflow
workflow_dispatch:
jobs:
Expand Down
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,10 @@ source venv/bin/activate # On Windows: venv\Scripts\activate

# Install dev dependencies
pip install -e ".[dev]"

# Download models
git lfs install
git lfs pull
```

### Testing
Expand Down
3 changes: 3 additions & 0 deletions models/all-MiniLM-L6-v2-Q5_K_M.gguf
Git LFS file not shown
3 changes: 3 additions & 0 deletions models/qwen2.5-coder-1.5b-instruct-q5_k_m.gguf
Git LFS file not shown
38 changes: 18 additions & 20 deletions tests/test_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,23 +34,21 @@ def mock_inference_engine():

@pytest.mark.asyncio
async def test_search(mock_weaviate_client, mock_inference_engine):
# Patch the WeaviateClient and LlamaCppInferenceEngine inside the test function
with (
patch("weaviate.WeaviateClient", return_value=mock_weaviate_client),
patch(
"codegate.inference.inference_engine.LlamaCppInferenceEngine",
return_value=mock_inference_engine,
),
):

# Initialize StorageEngine
storage_engine = StorageEngine(data_path="./weaviate_data")

# Invoke the search method
results = await storage_engine.search("test query", 5, 0.3)

# Assertions to validate the expected behavior
assert len(results) == 1 # Assert that one result is returned
assert results[0]["properties"]["name"] == "test"
mock_weaviate_client.connect.assert_called()
mock_weaviate_client.close.assert_called()
# Patch the LlamaCppInferenceEngine.embed method (not the entire class)
with patch("codegate.inference.inference_engine.LlamaCppInferenceEngine.embed",
mock_inference_engine.embed):

# Mock the WeaviateClient as before
with patch("weaviate.WeaviateClient", return_value=mock_weaviate_client):

# Initialize StorageEngine
storage_engine = StorageEngine(data_path="./weaviate_data")

# Invoke the search method
results = await storage_engine.search("test query", 5, 0.3)

# Assertions to validate the expected behavior
assert len(results) == 1 # Assert that one result is returned
assert results[0]["properties"]["name"] == "test"
mock_weaviate_client.connect.assert_called()
mock_weaviate_client.close.assert_called()

0 comments on commit f75ebec

Please sign in to comment.