mirror of
				https://github.com/kardolus/chatgpt-cli.git
				synced 2024-09-08 23:15:00 +03:00 
			
		
		
		
	Add error message for non 2XX status codes
This commit is contained in:
		| @@ -38,7 +38,9 @@ func main() { | ||||
| 		Long: "A powerful ChatGPT client that enables seamless interactions with the GPT model. " + | ||||
| 			"Provides multiple modes and context management features, including the ability to " + | ||||
| 			"pipe custom context into the conversation.", | ||||
| 		RunE: run, | ||||
| 		RunE:          run, | ||||
| 		SilenceUsage:  true, | ||||
| 		SilenceErrors: true, | ||||
| 	} | ||||
|  | ||||
| 	rootCmd.PersistentFlags().BoolVarP(&interactiveMode, "interactive", "i", false, "Use interactive mode") | ||||
|   | ||||
| @@ -45,10 +45,11 @@ Output: | ||||
| curl --location --insecure --request POST 'https://api.openai.com/v1/chat/completions' \ | ||||
|   --header "Authorization: Bearer ${OPENAI_API_KEY}" \ | ||||
|   --header 'Content-Type: application/json' \ | ||||
|     --data-raw '{ | ||||
|   --data-raw '{ | ||||
|      "model": "gpt-3.5-turbo", | ||||
|      "messages": [{"role": "user", "content": "What is the OpenAI mission?"}] | ||||
|     }' | jq . | ||||
|      "messages": [{"role": "user", "content": "What is the OpenAI mission?"}], | ||||
|      "stream": false | ||||
|   }' | jq . | ||||
| ``` | ||||
|  | ||||
| Output: | ||||
| @@ -77,6 +78,31 @@ Output: | ||||
| } | ||||
| ``` | ||||
|  | ||||
| Or flip `stream` to `true` (this results in retrieving a ton of `jsonl`). | ||||
|  | ||||
| ```shell | ||||
| curl --location --insecure --request POST 'https://api.openai.com/v1/chat/completions' \ | ||||
|   --header "Authorization: Bearer ${OPENAI_API_KEY}" \ | ||||
|   --header 'Content-Type: application/json' \ | ||||
|   --data-raw '{ | ||||
|      "model": "gpt-3.5-turbo", | ||||
|      "messages": [{"role": "user", "content": "What is the OpenAI mission?"}], | ||||
|      "stream": true | ||||
|   }' | ||||
| ``` | ||||
|  | ||||
| ```shell | ||||
| ... top omitted ...  | ||||
|  | ||||
| data: {"id":"chatcmpl-8B1ELWT5QKYmUbH0Az9anpvoOVdGZ","object":"chat.completion.chunk","created":1697637029,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"delta":{"content":" power"},"finish_reason":null}]} | ||||
|  | ||||
| data: {"id":"chatcmpl-8B1ELWT5QKYmUbH0Az9anpvoOVdGZ","object":"chat.completion.chunk","created":1697637029,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"delta":{"content":"."},"finish_reason":null}]} | ||||
|  | ||||
| data: {"id":"chatcmpl-8B1ELWT5QKYmUbH0Az9anpvoOVdGZ","object":"chat.completion.chunk","created":1697637029,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} | ||||
|  | ||||
| data: [DONE] | ||||
| ``` | ||||
|  | ||||
| ### Providing custom context | ||||
|  | ||||
| You can provide your own context in the messages array in your callout. You can split this data over multiple lines. For | ||||
|   | ||||
							
								
								
									
										15
									
								
								http/http.go
									
									
									
									
									
								
							
							
						
						
									
										15
									
								
								http/http.go
									
									
									
									
									
								
							| @@ -18,7 +18,8 @@ const ( | ||||
| 	errFailedToRead          = "failed to read response: %w" | ||||
| 	errFailedToCreateRequest = "failed to create request: %w" | ||||
| 	errFailedToMakeRequest   = "failed to make request: %w" | ||||
| 	errHTTP                  = "http error: %d" | ||||
| 	errHTTP                  = "http status %d: %s" | ||||
| 	errHTTPStatus            = "http status: %d" | ||||
| 	headerAuthorization      = "Authorization" | ||||
| 	headerContentType        = "Content-Type" | ||||
| ) | ||||
| @@ -104,7 +105,17 @@ func (r *RestCaller) doRequest(method, url string, body []byte, stream bool) ([] | ||||
| 	defer response.Body.Close() | ||||
|  | ||||
| 	if response.StatusCode < 200 || response.StatusCode >= 300 { | ||||
| 		return nil, fmt.Errorf(errHTTP, response.StatusCode) | ||||
| 		errorResponse, err := io.ReadAll(response.Body) | ||||
| 		if err != nil { | ||||
| 			return nil, fmt.Errorf(errHTTPStatus, response.StatusCode) | ||||
| 		} | ||||
|  | ||||
| 		var errorData types.ErrorResponse | ||||
| 		if err := json.Unmarshal(errorResponse, &errorData); err != nil { | ||||
| 			return nil, fmt.Errorf(errHTTPStatus, response.StatusCode) | ||||
| 		} | ||||
|  | ||||
| 		return errorResponse, fmt.Errorf(errHTTP, response.StatusCode, errorData.Error.Message) | ||||
| 	} | ||||
|  | ||||
| 	if stream { | ||||
|   | ||||
| @@ -37,7 +37,7 @@ func testContract(t *testing.T, when spec.G, it spec.S) { | ||||
| 	}) | ||||
|  | ||||
| 	when("accessing the completion endpoint", func() { | ||||
| 		it("should have the expected keys in the response", func() { | ||||
| 		it("should return a successful response with expected keys", func() { | ||||
| 			body := types.CompletionsRequest{ | ||||
| 				Messages: []types.Message{{ | ||||
| 					Role:    client.SystemRole, | ||||
| @@ -64,6 +64,34 @@ func testContract(t *testing.T, when spec.G, it spec.S) { | ||||
| 			Expect(data.Usage).ShouldNot(BeNil(), "Expected Usage to be present in the response") | ||||
| 			Expect(data.Choices).ShouldNot(BeNil(), "Expected Choices to be present in the response") | ||||
| 		}) | ||||
|  | ||||
| 		it("should return an error response with appropriate error details", func() { | ||||
| 			// Set the wrong API key | ||||
| 			restCaller.SetAPIKey("wrong-key") | ||||
|  | ||||
| 			body := types.CompletionsRequest{ | ||||
| 				Messages: []types.Message{{ | ||||
| 					Role:    client.SystemRole, | ||||
| 					Content: defaults.Role, | ||||
| 				}}, | ||||
| 				Model:  defaults.Model, | ||||
| 				Stream: false, | ||||
| 			} | ||||
|  | ||||
| 			bytes, err := json.Marshal(body) | ||||
| 			Expect(err).NotTo(HaveOccurred()) | ||||
|  | ||||
| 			resp, err := restCaller.Post(defaults.URL+defaults.CompletionsPath, bytes, false) | ||||
| 			Expect(err).To(HaveOccurred()) | ||||
|  | ||||
| 			var errorData types.ErrorResponse | ||||
| 			err = json.Unmarshal(resp, &errorData) | ||||
| 			Expect(err).NotTo(HaveOccurred()) | ||||
|  | ||||
| 			Expect(errorData.Error.Message).ShouldNot(BeEmpty(), "Expected error message to be present in the response") | ||||
| 			Expect(errorData.Error.Type).ShouldNot(BeEmpty(), "Expected error type to be present in the response") | ||||
| 			Expect(errorData.Error.Code).ShouldNot(BeEmpty(), "Expected error code to be present in the response") | ||||
| 		}) | ||||
| 	}) | ||||
|  | ||||
| 	when("accessing the models endpoint", func() { | ||||
|   | ||||
| @@ -41,3 +41,11 @@ type Data struct { | ||||
| 		FinishReason string            `json:"finish_reason"` | ||||
| 	} `json:"choices"` | ||||
| } | ||||
|  | ||||
| type ErrorResponse struct { | ||||
| 	Error struct { | ||||
| 		Message string `json:"message"` | ||||
| 		Type    string `json:"type"` | ||||
| 		Code    string `json:"code"` | ||||
| 	} `json:"error"` | ||||
| } | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 kardolus
					kardolus