mirror of
https://github.com/abetlen/llama-cpp-python.git
synced 2023-09-07 17:34:22 +03:00
Bump version
This commit is contained in:
@@ -7,11 +7,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
## [v0.1.59]
|
||||
|
||||
### Added
|
||||
|
||||
- (llama.cpp) k-quants support
|
||||
- (server) mirostat sampling parameters to server
|
||||
|
||||
### Fixed
|
||||
|
||||
- Support both `.so` and `.dylib` for `libllama` on MacOS
|
||||
|
||||
## [v0.1.58]
|
||||
|
||||
### Added
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "llama_cpp_python"
|
||||
version = "0.1.58"
|
||||
version = "0.1.59"
|
||||
description = "Python bindings for the llama.cpp library"
|
||||
authors = ["Andrei Betlen <abetlen@gmail.com>"]
|
||||
license = "MIT"
|
||||
|
||||
Reference in New Issue
Block a user