Skip to content

Commit

Permalink
final version bumps
Browse files Browse the repository at this point in the history
  • Loading branch information
logan-markewich committed Feb 16, 2024
1 parent b571b39 commit d0c3704
Show file tree
Hide file tree
Showing 7 changed files with 19 additions and 10 deletions.
2 changes: 1 addition & 1 deletion llama-index-core/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ name = "llama-index-core"
packages = [{include = "llama_index"}]
readme = "README.md"
repository = "https://github.com/run-llama/llama_index"
version = "0.10.3"
version = "0.10.5"

[tool.poetry.dependencies]
SQLAlchemy = {extras = ["asyncio"], version = ">=1.4.49"}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def __init__(
) -> None:
"""Init params."""
import torch
from llama_index.finetuning.embeddings.adapter_utils import (
from llama_index.embeddings.adapter import (
BaseAdapter,
LinearLayer,
)
Expand Down Expand Up @@ -108,7 +108,7 @@ def from_model_path(
**kwargs (Any): Additional kwargs (see __init__)
"""
from llama_index.finetuning.embeddings.adapter_utils import LinearLayer
from llama_index.embeddings.adapter import LinearLayer

model_cls = model_cls or LinearLayer
model = model_cls.load(model_path)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import torch
import transformers
from llama_index.core.utils import print_text
from llama_index.embeddings.adapter.utils import BaseAdapter
from llama_index.embeddings.adapter import BaseAdapter
from sentence_transformers.util import cos_sim
from torch import Tensor, nn
from torch.optim import Optimizer
Expand Down
4 changes: 2 additions & 2 deletions llama-index-finetuning/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,15 @@ description = "llama-index finetuning"
license = "MIT"
name = "llama-index-finetuning"
readme = "README.md"
version = "0.1.1"
version = "0.1.2"

[tool.poetry.dependencies]
python = ">=3.8.1,<3.12"
llama-index-core = "^0.10.1"
llama-index-llms-openai = "^0.1.1"
llama-index-llms-gradient = "^0.1.1"
llama-index-postprocessor-cohere-rerank = "^0.1.1"
llama-index-embeddings-adapter = "^0.1.1"
llama-index-embeddings-adapter = "^0.1.2"
sentence-transformers = "^2.3.0"

[tool.poetry.group.dev.dependencies]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,12 @@
from llama_index.embeddings.adapter.base import AdapterEmbeddingModel
from llama_index.embeddings.adapter.base import (
AdapterEmbeddingModel,
LinearAdapterEmbeddingModel,
)
from llama_index.embeddings.adapter.utils import BaseAdapter, LinearLayer

__all__ = ["AdapterEmbeddingModel"]
__all__ = [
"AdapterEmbeddingModel",
"LinearAdapterEmbeddingModel",
"BaseAdapter",
"LinearLayer",
]
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ description = "llama-index embeddings adapter integration"
license = "MIT"
name = "llama-index-embeddings-adapter"
readme = "README.md"
version = "0.1.1"
version = "0.1.2"

[tool.poetry.dependencies]
python = ">=3.8.1,<3.12"
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ name = "llama-index"
packages = [{from = "_llama-index", include = "llama_index"}]
readme = "README.md"
repository = "https://github.com/run-llama/llama_index"
version = "0.10.3"
version = "0.10.5"

[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
Expand Down

0 comments on commit d0c3704

Please sign in to comment.