99 "os"
1010 "testing"
1111
12+ "github.com/sashabaranov/go-openai"
1213 "github.com/sashabaranov/go-openai/internal/test/checks"
1314 "github.com/sashabaranov/go-openai/jsonschema"
1415)
@@ -20,7 +21,7 @@ func TestAPI(t *testing.T) {
2021 }
2122
2223 var err error
23- c := NewClient (apiToken )
24+ c := openai . NewClient (apiToken )
2425 ctx := context .Background ()
2526 _ , err = c .ListEngines (ctx )
2627 checks .NoError (t , err , "ListEngines error" )
@@ -36,23 +37,23 @@ func TestAPI(t *testing.T) {
3637 checks .NoError (t , err , "GetFile error" )
3738 } // else skip
3839
39- embeddingReq := EmbeddingRequest {
40+ embeddingReq := openai. EmbeddingRequest {
4041 Input : []string {
4142 "The food was delicious and the waiter" ,
4243 "Other examples of embedding request" ,
4344 },
44- Model : AdaSearchQuery ,
45+ Model : openai . AdaSearchQuery ,
4546 }
4647 _ , err = c .CreateEmbeddings (ctx , embeddingReq )
4748 checks .NoError (t , err , "Embedding error" )
4849
4950 _ , err = c .CreateChatCompletion (
5051 ctx ,
51- ChatCompletionRequest {
52- Model : GPT3Dot5Turbo ,
53- Messages : []ChatCompletionMessage {
52+ openai. ChatCompletionRequest {
53+ Model : openai . GPT3Dot5Turbo ,
54+ Messages : []openai. ChatCompletionMessage {
5455 {
55- Role : ChatMessageRoleUser ,
56+ Role : openai . ChatMessageRoleUser ,
5657 Content : "Hello!" ,
5758 },
5859 },
@@ -63,11 +64,11 @@ func TestAPI(t *testing.T) {
6364
6465 _ , err = c .CreateChatCompletion (
6566 ctx ,
66- ChatCompletionRequest {
67- Model : GPT3Dot5Turbo ,
68- Messages : []ChatCompletionMessage {
67+ openai. ChatCompletionRequest {
68+ Model : openai . GPT3Dot5Turbo ,
69+ Messages : []openai. ChatCompletionMessage {
6970 {
70- Role : ChatMessageRoleUser ,
71+ Role : openai . ChatMessageRoleUser ,
7172 Name : "John_Doe" ,
7273 Content : "Hello!" ,
7374 },
@@ -76,9 +77,9 @@ func TestAPI(t *testing.T) {
7677 )
7778 checks .NoError (t , err , "CreateChatCompletion (with name) returned error" )
7879
79- stream , err := c .CreateCompletionStream (ctx , CompletionRequest {
80+ stream , err := c .CreateCompletionStream (ctx , openai. CompletionRequest {
8081 Prompt : "Ex falso quodlibet" ,
81- Model : GPT3Ada ,
82+ Model : openai . GPT3Ada ,
8283 MaxTokens : 5 ,
8384 Stream : true ,
8485 })
@@ -103,15 +104,15 @@ func TestAPI(t *testing.T) {
103104
104105 _ , err = c .CreateChatCompletion (
105106 context .Background (),
106- ChatCompletionRequest {
107- Model : GPT3Dot5Turbo ,
108- Messages : []ChatCompletionMessage {
107+ openai. ChatCompletionRequest {
108+ Model : openai . GPT3Dot5Turbo ,
109+ Messages : []openai. ChatCompletionMessage {
109110 {
110- Role : ChatMessageRoleUser ,
111+ Role : openai . ChatMessageRoleUser ,
111112 Content : "What is the weather like in Boston?" ,
112113 },
113114 },
114- Functions : []FunctionDefinition {{
115+ Functions : []openai. FunctionDefinition {{
115116 Name : "get_current_weather" ,
116117 Parameters : jsonschema.Definition {
117118 Type : jsonschema .Object ,
@@ -140,12 +141,12 @@ func TestAPIError(t *testing.T) {
140141 }
141142
142143 var err error
143- c := NewClient (apiToken + "_invalid" )
144+ c := openai . NewClient (apiToken + "_invalid" )
144145 ctx := context .Background ()
145146 _ , err = c .ListEngines (ctx )
146147 checks .HasError (t , err , "ListEngines should fail with an invalid key" )
147148
148- var apiErr * APIError
149+ var apiErr * openai. APIError
149150 if ! errors .As (err , & apiErr ) {
150151 t .Fatalf ("Error is not an APIError: %+v" , err )
151152 }
0 commit comments