From baf973fbdece866be844f43a690635450df1c31d Mon Sep 17 00:00:00 2001 From: Logan Date: Mon, 19 Feb 2024 11:44:48 -0600 Subject: [PATCH] version bumps [v0.10.7] (#10981) --- llama-index-core/llama_index/core/__init__.py | 2 +- llama-index-core/pyproject.toml | 2 +- .../pyproject.toml | 2 +- .../github/repository/github_client.py | 34 +++++++++++-------- .../readers/github/repository/utils.py | 11 ++++-- .../llama-index-readers-github/pyproject.toml | 2 +- pyproject.toml | 2 +- 7 files changed, 32 insertions(+), 23 deletions(-) diff --git a/llama-index-core/llama_index/core/__init__.py b/llama-index-core/llama_index/core/__init__.py index 572b66be294d6..5f1aa3fa6e880 100644 --- a/llama-index-core/llama_index/core/__init__.py +++ b/llama-index-core/llama_index/core/__init__.py @@ -1,6 +1,6 @@ """Init file of LlamaIndex.""" -__version__ = "0.10.6" +__version__ = "0.10.7" import logging from logging import NullHandler diff --git a/llama-index-core/pyproject.toml b/llama-index-core/pyproject.toml index 497fd7cb87e6d..f8bd5c49375ac 100644 --- a/llama-index-core/pyproject.toml +++ b/llama-index-core/pyproject.toml @@ -42,7 +42,7 @@ name = "llama-index-core" packages = [{include = "llama_index"}] readme = "README.md" repository = "https://github.com/run-llama/llama_index" -version = "0.10.6.post1" +version = "0.10.7" [tool.poetry.dependencies] SQLAlchemy = {extras = ["asyncio"], version = ">=1.4.49"} diff --git a/llama-index-integrations/embeddings/llama-index-embeddings-clip/pyproject.toml b/llama-index-integrations/embeddings/llama-index-embeddings-clip/pyproject.toml index 9a1359112fd90..ecf023bef417c 100644 --- a/llama-index-integrations/embeddings/llama-index-embeddings-clip/pyproject.toml +++ b/llama-index-integrations/embeddings/llama-index-embeddings-clip/pyproject.toml @@ -24,7 +24,7 @@ description = "llama-index embeddings clip integration" license = "MIT" name = "llama-index-embeddings-clip" readme = "README.md" -version = "0.1.2" +version = "0.1.3" [tool.poetry.dependencies] python = ">=3.8.1,<3.12" diff --git a/llama-index-integrations/readers/llama-index-readers-github/llama_index/readers/github/repository/github_client.py b/llama-index-integrations/readers/llama-index-readers-github/llama_index/readers/github/repository/github_client.py index e87e0fe17c9a0..e31e970d39ee9 100644 --- a/llama-index-integrations/readers/llama-index-readers-github/llama_index/readers/github/repository/github_client.py +++ b/llama-index-integrations/readers/llama-index-readers-github/llama_index/readers/github/repository/github_client.py @@ -176,7 +176,7 @@ async def get_blob( owner: str, repo: str, file_sha: str, - ) -> GitBlobResponseModel: + ) -> Optional[GitBlobResponseModel]: ... async def get_commit( @@ -405,7 +405,7 @@ async def get_blob( repo: str, file_sha: str, timeout: Optional[int] = 5, - ) -> GitBlobResponseModel: + ) -> Optional[GitBlobResponseModel]: """ Get information about a blob. (Github API endpoint: getBlob). @@ -421,18 +421,22 @@ async def get_blob( Examples: >>> blob_info = client.get_blob("owner", "repo", "file_sha") """ - return GitBlobResponseModel.from_json( - ( - await self.request( - "getBlob", - "GET", - owner=owner, - repo=repo, - file_sha=file_sha, - timeout=timeout, - ) - ).text - ) + try: + return GitBlobResponseModel.from_json( + ( + await self.request( + "getBlob", + "GET", + owner=owner, + repo=repo, + file_sha=file_sha, + timeout=timeout, + ) + ).text + ) + except KeyError: + print(f"Failed to get blob for {owner}/{repo}/{file_sha}") + return None async def get_commit( self, @@ -487,6 +491,6 @@ async def main() -> None: blob_response = await client.get_blob( owner="ahmetkca", repo="CommitAI", file_sha=obj.sha ) - print(blob_response.content) + print(blob_response.content if blob_response else "None") asyncio.run(main()) diff --git a/llama-index-integrations/readers/llama-index-readers-github/llama_index/readers/github/repository/utils.py b/llama-index-integrations/readers/llama-index-readers-github/llama_index/readers/github/repository/utils.py index c22bc19773e59..7e349e89c70c0 100644 --- a/llama-index-integrations/readers/llama-index-readers-github/llama_index/readers/github/repository/utils.py +++ b/llama-index-integrations/readers/llama-index-readers-github/llama_index/readers/github/repository/utils.py @@ -8,7 +8,7 @@ import os import time from abc import ABC, abstractmethod -from typing import List, Tuple +from typing import List, Optional, Tuple from llama_index.readers.github.repository.github_client import ( GitBlobResponseModel, @@ -146,7 +146,7 @@ async def _fill_buffer(self) -> None: if self._verbose: start_t = time.time() - results: List[GitBlobResponseModel] = await asyncio.gather( + results: List[Optional[GitBlobResponseModel]] = await asyncio.gather( *[ self._github_client.get_blob(self._owner, self._repo, blob.sha) for blob, _ in self._blobs_and_paths[ @@ -154,6 +154,9 @@ async def _fill_buffer(self) -> None: ] # TODO: use batch_size instead of buffer_size for concurrent requests ] ) + + filtered_results = [result for result in results if result is not None] + if self._verbose: end_t = time.time() blob_names_and_sizes = [ @@ -167,5 +170,7 @@ async def _fill_buffer(self) -> None: self._buffer = [ (result, path) - for result, (_, path) in zip(results, self._blobs_and_paths[start:end]) + for result, (_, path) in zip( + filtered_results, self._blobs_and_paths[start:end] + ) ] diff --git a/llama-index-integrations/readers/llama-index-readers-github/pyproject.toml b/llama-index-integrations/readers/llama-index-readers-github/pyproject.toml index d9f7e7d6c094b..e1a9576806be4 100644 --- a/llama-index-integrations/readers/llama-index-readers-github/pyproject.toml +++ b/llama-index-integrations/readers/llama-index-readers-github/pyproject.toml @@ -26,7 +26,7 @@ license = "MIT" maintainers = ["ahmetkca", "moncho", "rwood-97"] name = "llama-index-readers-github" readme = "README.md" -version = "0.1.4" +version = "0.1.5" [tool.poetry.dependencies] python = ">=3.8.1,<3.12" diff --git a/pyproject.toml b/pyproject.toml index 797c8c2139db6..3665bdb82efc4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,7 +44,7 @@ name = "llama-index" packages = [{from = "_llama-index", include = "llama_index"}] readme = "README.md" repository = "https://github.com/run-llama/llama_index" -version = "0.10.6" +version = "0.10.7" [tool.poetry.dependencies] python = ">=3.8.1,<4.0"