Skip to content

Commit

Permalink
[version] bump version to 0.10.65 (run-llama#15305)
Browse files Browse the repository at this point in the history
* changelog

* version bumps

* 0.10.65

* 0.10.65

* 0.10.65
  • Loading branch information
nerdai authored Aug 12, 2024
1 parent 2579d3c commit 682fa27
Show file tree
Hide file tree
Showing 15 changed files with 133 additions and 19 deletions.
59 changes: 59 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,64 @@
# ChangeLog

## [2024-08-12]

### `llama-index-core` [0.10.65]

- chore: bump nltk version (#15277)

### `llama-index-tools-box` [0.1.0]

- Box tools for AI Agents (#15236)

### `llama-index-multi-modal-llms-gemini` [0.1.8]

- feat: add default_headers to Gemini multi-model (#15296)

### `llama-index-vector-stores-clickhouse` [0.2.0]

- chore: stop using ServiceContext from the clickhouse integration (#15300)

### `llama-index-experimental` [0.2.0]

- chore: remove ServiceContext usage from experimental package (#15301)

### `llama-index-extractors-marvin` [0.1.4]

- fix: MarvinMetadataExtractor functionality and apply async support (#15247)

### `llama-index-utils-workflow` [0.1.1]

- chore: bump black version (#15288)
- chore: bump nltk version (#15277)

### `llama-index-readers-microsoft-onedrive` [0.1.9]

- chore: bump nltk version (#15277)

### `llama-index-embeddings-upstage` [0.1.3]

- chore: bump nltk version (#15277)

### `llama-index-embeddings-nvidia` [0.1.5]

- chore: bump nltk version (#15277)

### `llama-index-embeddings-litellm` [0.1.1]

- chore: bump nltk version (#15277)

### `llama-index-legacy` [0.9.48post1]

- chore: bump nltk version (#15277)

### `llama-index-packs-streamlit-chatbot` [0.1.5]

- chore: bump nltk version (#15277)

### `llama-index-embeddings-huggingface` [0.2.3]

- Feature: added multiprocessing for creating hf embedddings (#15260)

## [2024-08-09]

### `llama-index-core` [0.10.64]
Expand Down
55 changes: 55 additions & 0 deletions docs/docs/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,60 @@
# ChangeLog

## [2024-08-12]

### `llama-index-core` [0.10.65]

- chore: bump nltk version (#15277)

### `llama-index-multi-modal-llms-gemini` [0.1.8]

- feat: add default_headers to Gemini multi-model (#15296)

### `llama-index-vector-stores-clickhouse` [0.2.0]

- chore: stop using ServiceContext from the clickhouse integration (#15300)

### `llama-index-experimental` [0.2.0]

- chore: remove ServiceContext usage from experimental package (#15301)

### `llama-index-extractors-marvin` [0.1.4]

- fix: MarvinMetadataExtractor functionality and apply async support (#15247)

### `llama-index-utils-workflow` [0.1.1]

- chore: bump black version (#15288)
- chore: bump nltk version (#15277)

### `llama-index-readers-microsoft-onedrive` [0.1.9]

- chore: bump nltk version (#15277)

### `llama-index-embeddings-upstage` [0.1.3]

- chore: bump nltk version (#15277)

### `llama-index-embeddings-nvidia` [0.1.5]

- chore: bump nltk version (#15277)

### `llama-index-embeddings-litellm` [0.1.1]

- chore: bump nltk version (#15277)

### `llama-index-legacy` [0.9.48post1]

- chore: bump nltk version (#15277)

### `llama-index-packs-streamlit-chatbot` [0.1.5]

- chore: bump nltk version (#15277)

### `llama-index-embeddings-huggingface` [0.2.3]

- Feature: added multiprocessing for creating hf embedddings (#15260)

## [2024-08-09]

### `llama-index-core` [0.10.64]
Expand Down
2 changes: 1 addition & 1 deletion docs/mkdocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -638,8 +638,8 @@ nav:
- ./examples/workflow/rag.ipynb
- ./examples/workflow/react_agent.ipynb
- ./examples/workflow/reflection.ipynb
- ./examples/workflow/workflows_cookbook.ipynb
- ./examples/workflow/sub_question_query_engine.ipynb
- ./examples/workflow/workflows_cookbook.ipynb
- Component Guides:
- ./module_guides/index.md
- Models:
Expand Down
2 changes: 1 addition & 1 deletion llama-index-core/llama_index/core/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""Init file of LlamaIndex."""

__version__ = "0.10.64"
__version__ = "0.10.65"

import logging
from logging import NullHandler
Expand Down
2 changes: 1 addition & 1 deletion llama-index-core/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ name = "llama-index-core"
packages = [{include = "llama_index"}]
readme = "README.md"
repository = "https://github.com/run-llama/llama_index"
version = "0.10.64"
version = "0.10.65"

[tool.poetry.dependencies]
SQLAlchemy = {extras = ["asyncio"], version = ">=1.4.49"}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-embeddings-litellm"
readme = "README.md"
version = "0.1.0"
version = "0.1.1"

[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-embeddings-nvidia"
readme = "README.md"
version = "0.1.4"
version = "0.1.5"

[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ license = "MIT"
name = "llama-index-embeddings-upstage"
packages = [{include = "llama_index/"}]
readme = "README.md"
version = "0.1.2"
version = "0.1.3"

[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-multi-modal-llms-gemini"
readme = "README.md"
version = "0.1.7"
version = "0.1.8"

[tool.poetry.dependencies]
python = ">=3.9,<4.0"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ license = "MIT"
maintainers = ["godwin3737"]
name = "llama-index-readers-microsoft-onedrive"
readme = "README.md"
version = "0.1.8"
version = "0.1.9"

[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
Expand Down
2 changes: 1 addition & 1 deletion llama-index-legacy/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ name = "llama-index-legacy"
packages = [{include = "llama_index"}]
readme = "README.md"
repository = "https://github.com/run-llama/llama_index"
version = "0.9.48"
version = "0.9.48post1"

[tool.poetry.dependencies]
SQLAlchemy = {extras = ["asyncio"], version = ">=1.4.49"}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ license = "MIT"
maintainers = ["carolinedlu"]
name = "llama-index-packs-streamlit-chatbot"
readme = "README.md"
version = "0.1.4"
version = "0.1.5"

[tool.poetry.dependencies]
python = ">=3.8.1,<3.9.7 || >3.9.7,<4.0"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ exclude = ["**/BUILD"]
license = "MIT"
name = "llama-index-utils-workflow"
readme = "README.md"
version = "0.1.0"
version = "0.1.1"

[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
Expand Down
12 changes: 6 additions & 6 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ name = "llama-index"
packages = [{from = "_llama-index", include = "llama_index"}]
readme = "README.md"
repository = "https://github.com/run-llama/llama_index"
version = "0.10.64"
version = "0.10.65"

[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
Expand All @@ -57,7 +57,7 @@ llama-index-agent-openai = ">=0.1.4,<0.3.0"
llama-index-readers-file = "^0.1.4"
llama-index-readers-llama-parse = ">=0.1.2"
llama-index-indices-managed-llama-cloud = ">=0.2.0"
llama-index-core = "^0.10.64"
llama-index-core = "^0.10.65"
llama-index-multi-modal-llms-openai = "^0.1.3"
llama-index-cli = "^0.1.2"

Expand Down

0 comments on commit 682fa27

Please sign in to comment.