mirror of
https://github.com/ivanfioravanti/chatbot-ollama.git
synced 2023-12-01 22:17:38 +03:00
Fix warnings and README
This commit is contained in:
@@ -1,3 +1,3 @@
|
||||
# Chatbot Ollama
|
||||
DEFAULT_MODEL="llama2:latest"
|
||||
DEFAULT_MODEL="mistral:latest"
|
||||
NEXT_PUBLIC_DEFAULT_SYSTEM_PROMPT=""
|
||||
|
||||
@@ -46,12 +46,15 @@ git clone https://github.com/ivanfioravanti/chatbot-ollama.git
|
||||
```bash
|
||||
npm i
|
||||
```
|
||||
|
||||
### 3. Run Ollama server
|
||||
|
||||
Either via the cli:
|
||||
|
||||
```bash
|
||||
ollama serve
|
||||
```
|
||||
|
||||
or via the [desktop client](https://ollama.ai/download)
|
||||
|
||||
### 4. Run App
|
||||
@@ -70,7 +73,7 @@ When deploying the application, the following environment variables can be set:
|
||||
|
||||
| Environment Variable | Default value | Description |
|
||||
| --------------------------------- | ------------------------------ | ----------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| DEFAULT_MODEL | `llama2:latest` | The default model to use on new conversations |
|
||||
| DEFAULT_MODEL | `mistral:latest` | The default model to use on new conversations |
|
||||
| NEXT_PUBLIC_DEFAULT_SYSTEM_PROMPT | [see here](utils/app/const.ts) | The default system prompt to use on new conversations |
|
||||
| NEXT_PUBLIC_DEFAULT_TEMPERATURE | 1 | The default temperature to use on new conversations |
|
||||
|
||||
|
||||
@@ -233,6 +233,7 @@ export const Chat = memo(({ stopConversationRef }: Props) => {
|
||||
conversations,
|
||||
selectedConversation,
|
||||
stopConversationRef,
|
||||
homeDispatch
|
||||
],
|
||||
);
|
||||
|
||||
|
||||
@@ -219,7 +219,7 @@ export const ChatInput = ({
|
||||
textareaRef?.current?.scrollHeight > 400 ? 'auto' : 'hidden'
|
||||
}`;
|
||||
}
|
||||
}, [content]);
|
||||
}, [content, textareaRef]);
|
||||
|
||||
useEffect(() => {
|
||||
const handleOutsideClick = (e: MouseEvent) => {
|
||||
|
||||
@@ -155,7 +155,7 @@ export const Chatbar = () => {
|
||||
value: conversations,
|
||||
});
|
||||
}
|
||||
}, [searchTerm, conversations]);
|
||||
}, [searchTerm, conversations, chatDispatch]);
|
||||
|
||||
return (
|
||||
<ChatbarContext.Provider
|
||||
|
||||
@@ -114,7 +114,7 @@ const Promptbar = () => {
|
||||
} else {
|
||||
promptDispatch({ field: 'filteredPrompts', value: prompts });
|
||||
}
|
||||
}, [searchTerm, prompts]);
|
||||
}, [searchTerm, prompts, promptDispatch]);
|
||||
|
||||
return (
|
||||
<PromptbarContext.Provider
|
||||
|
||||
@@ -209,12 +209,12 @@ const Home = ({ defaultModelId }: Props) => {
|
||||
if (window.innerWidth < 640) {
|
||||
dispatch({ field: 'showChatbar', value: false });
|
||||
}
|
||||
}, [selectedConversation]);
|
||||
}, [selectedConversation, dispatch]);
|
||||
|
||||
useEffect(() => {
|
||||
defaultModelId &&
|
||||
dispatch({ field: 'defaultModelId', value: defaultModelId });
|
||||
}, [defaultModelId]);
|
||||
}, [defaultModelId, dispatch]);
|
||||
|
||||
useEffect(() => {
|
||||
const settings = getSettings();
|
||||
@@ -288,7 +288,7 @@ const Home = ({ defaultModelId }: Props) => {
|
||||
},
|
||||
});
|
||||
}
|
||||
}, [defaultModelId, dispatch]);
|
||||
}, [defaultModelId, dispatch, conversations, t]);
|
||||
|
||||
return (
|
||||
<HomeContext.Provider
|
||||
|
||||
@@ -21,7 +21,7 @@ export const fallbackModelID = OllamaModelID.DEFAULTMODEL;
|
||||
|
||||
export const OllamaModels: Record<OllamaModelID, OllamaModel> = {
|
||||
[OllamaModelID.DEFAULTMODEL]: {
|
||||
name: 'llama2:latest',
|
||||
name: 'mistral:latest',
|
||||
modified_at: new Date(),
|
||||
size: 4000,
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user