mirror of
https://github.com/abetlen/llama-cpp-python.git
synced 2023-09-07 17:34:22 +03:00
Update docs
This commit is contained in:
@@ -1,5 +1,10 @@
|
|||||||
# 🦙 Python Bindings for `llama.cpp`
|
# 🦙 Python Bindings for `llama.cpp`
|
||||||
|
|
||||||
|
[](https://pypi.org/project/llama-cpp-python/)
|
||||||
|
[](https://pypi.org/project/llama-cpp-python/)
|
||||||
|
[](https://pypi.org/project/llama-cpp-python/)
|
||||||
|
[](https://pypi.org/project/llama-cpp-python/)
|
||||||
|
|
||||||
Simple Python bindings for **@ggerganov's** [`llama.cpp`](https://github.com/ggerganov/llama.cpp) library.
|
Simple Python bindings for **@ggerganov's** [`llama.cpp`](https://github.com/ggerganov/llama.cpp) library.
|
||||||
This package provides:
|
This package provides:
|
||||||
|
|
||||||
@@ -8,6 +13,42 @@ This package provides:
|
|||||||
- OpenAI-like API
|
- OpenAI-like API
|
||||||
- LangChain compatibility
|
- LangChain compatibility
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
Install from PyPI:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install llama-cpp-python
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```python
|
||||||
|
>>> from llama_cpp import Llama
|
||||||
|
>>> llm = Llama(model_path="models/7B/...")
|
||||||
|
>>> output = llm("Q: Name the planets in the solar system? A: ", max_tokens=32, stop=["Q:", "\n"], echo=True)
|
||||||
|
>>> print(output)
|
||||||
|
{
|
||||||
|
"id": "cmpl-xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
|
||||||
|
"object": "text_completion",
|
||||||
|
"created": 1679561337,
|
||||||
|
"model": "models/7B/...",
|
||||||
|
"choices": [
|
||||||
|
{
|
||||||
|
"text": "Q: Name the planets in the solar system? A: Mercury, Venus, Earth, Mars, Jupiter, Saturn, Uranus, Neptune and Pluto.",
|
||||||
|
"index": 0,
|
||||||
|
"logprobs": None,
|
||||||
|
"finish_reason": "stop"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"usage": {
|
||||||
|
"prompt_tokens": 14,
|
||||||
|
"completion_tokens": 28,
|
||||||
|
"total_tokens": 42
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## API Reference
|
## API Reference
|
||||||
|
|
||||||
@@ -21,3 +62,7 @@ This package provides:
|
|||||||
::: llama_cpp.llama_cpp
|
::: llama_cpp.llama_cpp
|
||||||
options:
|
options:
|
||||||
show_if_no_docstring: true
|
show_if_no_docstring: true
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
This project is licensed under the terms of the MIT license.
|
||||||
@@ -9,3 +9,12 @@ plugins:
|
|||||||
|
|
||||||
watch:
|
watch:
|
||||||
- llama_cpp
|
- llama_cpp
|
||||||
|
|
||||||
|
markdown_extensions:
|
||||||
|
- pymdownx.highlight:
|
||||||
|
anchor_linenums: true
|
||||||
|
line_spans: __span
|
||||||
|
pygments_lang_class: true
|
||||||
|
- pymdownx.inlinehilite
|
||||||
|
- pymdownx.snippets
|
||||||
|
- pymdownx.superfences
|
||||||
Reference in New Issue
Block a user