Add a model flag

This commit is contained in:
kardolus
2023-05-05 14:33:10 -04:00
parent ec26fea3f6
commit c09c7b8800
3 changed files with 21 additions and 7 deletions

View File

@@ -14,6 +14,7 @@ environment, demonstrating its practicality and effectiveness.
limits.
* **Custom context from local files**: Provide custom context through piping for GPT model reference during
conversation.
* **Custom chat models**: Use a custom chat model by specifying the model name with the `-m` or `--model` flag.
* **Viper integration**: Robust configuration management.
## Installation
@@ -24,31 +25,31 @@ system and architecture:
### Apple M1 chips
```shell
curl -L -o chatgpt https://github.com/kardolus/chatgpt-cli/releases/download/v1.0.2/chatgpt-darwin-arm64 && chmod +x chatgpt && sudo mv chatgpt /usr/local/bin/
curl -L -o chatgpt https://github.com/kardolus/chatgpt-cli/releases/download/v1.0.3/chatgpt-darwin-arm64 && chmod +x chatgpt && sudo mv chatgpt /usr/local/bin/
```
### macOS Intel chips
```shell
curl -L -o chatgpt https://github.com/kardolus/chatgpt-cli/releases/download/v1.0.2/chatgpt-darwin-amd64 && chmod +x chatgpt && sudo mv chatgpt /usr/local/bin/
curl -L -o chatgpt https://github.com/kardolus/chatgpt-cli/releases/download/v1.0.3/chatgpt-darwin-amd64 && chmod +x chatgpt && sudo mv chatgpt /usr/local/bin/
```
### Linux (amd64)
```shell
curl -L -o chatgpt https://github.com/kardolus/chatgpt-cli/releases/download/v1.0.2/chatgpt-linux-amd64 && chmod +x chatgpt && sudo mv chatgpt /usr/local/bin/
curl -L -o chatgpt https://github.com/kardolus/chatgpt-cli/releases/download/v1.0.3/chatgpt-linux-amd64 && chmod +x chatgpt && sudo mv chatgpt /usr/local/bin/
```
### Linux (arm64)
```shell
curl -L -o chatgpt https://github.com/kardolus/chatgpt-cli/releases/download/v1.0.2/chatgpt-linux-arm64 && chmod +x chatgpt && sudo mv chatgpt /usr/local/bin/
curl -L -o chatgpt https://github.com/kardolus/chatgpt-cli/releases/download/v1.0.3/chatgpt-linux-arm64 && chmod +x chatgpt && sudo mv chatgpt /usr/local/bin/
```
### Windows (amd64)
Download the binary
from [this link](https://github.com/kardolus/chatgpt-cli/releases/download/v1.0.2/chatgpt-windows-amd64.exe) and add it
from [this link](https://github.com/kardolus/chatgpt-cli/releases/download/v1.0.3/chatgpt-windows-amd64.exe) and add it
to your PATH.
Choose the appropriate command for your system, which will download the binary, make it executable, and move it to your

View File

@@ -25,8 +25,9 @@ const (
type Client struct {
History []types.Message
caller http.Caller
readWriter history.Store
capacity int
model string
readWriter history.Store
}
func New(caller http.Caller, rw history.Store) *Client {
@@ -34,6 +35,7 @@ func New(caller http.Caller, rw history.Store) *Client {
caller: caller,
readWriter: rw,
capacity: MaxTokenSize,
model: GPTModel,
}
}
@@ -42,6 +44,11 @@ func (c *Client) WithCapacity(capacity int) *Client {
return c
}
func (c *Client) WithModel(model string) *Client {
c.model = model
return c
}
// Query sends a query to the API and returns the response as a string.
// It takes an input string as a parameter and returns a string containing
// the API response or an error if there's any issue during the process.
@@ -119,8 +126,8 @@ func (c *Client) ProvideContext(context string) {
func (c *Client) createBody(stream bool) ([]byte, error) {
body := types.Request{
Model: GPTModel,
Messages: c.History,
Model: c.model,
Stream: stream,
}

View File

@@ -21,6 +21,7 @@ var (
showVersion bool
GitCommit string
GitVersion string
modelName string
)
func main() {
@@ -36,6 +37,7 @@ func main() {
rootCmd.PersistentFlags().BoolVarP(&queryMode, "query", "q", false, "Use query mode instead of stream mode")
rootCmd.PersistentFlags().BoolVarP(&clearHistory, "clear-history", "c", false, "Clear the history of ChatGPT CLI")
rootCmd.PersistentFlags().BoolVarP(&showVersion, "version", "v", false, "Display the version information")
rootCmd.PersistentFlags().StringVarP(&modelName, "model", "m", "", "Use a custom GPT model by specifying the model name")
viper.AutomaticEnv()
@@ -74,6 +76,10 @@ func run(cmd *cobra.Command, args []string) error {
}
client := client.New(http.New().WithSecret(secret), history.New())
if modelName != "" {
client = client.WithModel(modelName)
}
// Check if there is input from the pipe (stdin)
stat, _ := os.Stdin.Stat()
if (stat.Mode() & os.ModeCharDevice) == 0 {