go openai
该库为OpenAI API提供了非正式的GO客户。我们支持:
安装
go get github.com/sashabaranov/go-openai
当前,Go-Openai需要GO版本1.18或更高版本。
用法
chatgpt示例用法:
package main import ( \"context\" \"fmt\" openai \"github.com/sashabaranov/go-openai\" ) func main () { client := openai . NewClient ( \"your token\" ) resp , err := client . CreateChatCompletion ( context . Background (), openai. ChatCompletionRequest { Model : openai . GPT3Dot5Turbo , Messages : []openai. ChatCompletionMessage { { Role : openai . ChatMessageRoleUser , Content : \"Hello!\" , }, }, }, ) if err != nil { fmt . Printf ( \"ChatCompletion error: %v \\n \" , err ) return } fmt . Println ( resp . Choices [ 0 ]. Message . Content ) }
获取OpenAI API密钥:
- 访问OpenAI网站https://platform.ope**n*ai.com/account/api-keys。
- 如果您没有帐户,请单击“注册”以创建一个。如果这样做,请单击“登录”。
- 登录后,请导航到您的API密钥管理页面。
- 单击“创建新的秘密密钥”。
- 输入新密钥的名称,然后单击“创建秘密键”。
- 将显示您的新API键。使用此键与OpenAI API进行交互。
注意:您的API密钥是敏感信息。不要与任何人分享。
其他示例:
CHATGPT流媒体完成
package main import ( \"context\" \"errors\" \"fmt\" \"io\" openai \"github.com/sashabaranov/go-openai\" ) func main () { c := openai . NewClient ( \"your token\" ) ctx := context . Background () req := openai. ChatCompletionRequest { Model : openai . GPT3Dot5Turbo , MaxTokens : 20 , Messages : []openai. ChatCompletionMessage { { Role : openai . ChatMessageRoleUser , Content : \"Lorem ipsum\" , }, }, Stream : true , } stream , err := c . CreateChatCompletionStream ( ctx , req ) if err != nil { fmt . Printf ( \"ChatCompletionStream error: %v \\n \" , err ) return } defer stream . Close () fmt . Printf ( \"Stream response: \" ) for { response , err := stream . Recv () if errors . Is ( err , io . EOF ) { fmt . Println ( \" \\n Stream finished\" ) return } if err != nil { fmt . Printf ( \" \\n Stream error: %v \\n \" , err ) return } fmt . Printf ( response . Choices [ 0 ]. Delta . Content ) } }
GPT-3完成
package main import ( \"context\" \"fmt\" openai \"github.com/sashabaranov/go-openai\" ) func main () { c := openai . NewClient ( \"your token\" ) ctx := context . Background () req := openai. CompletionRequest { Model : openai . GPT3Babbage002 , MaxTokens : 5 , Prompt : \"Lorem ipsum\" , } resp , err := c . CreateCompletion ( ctx , req ) if err != nil { fmt . Printf ( \"Completion error: %v \\n \" , err ) return } fmt . Println ( resp . Choices [ 0 ]. Text ) }
GPT-3流媒体完成
package main import ( \"errors\" \"context\" \"fmt\" \"io\" openai \"github.com/sashabaranov/go-openai\" ) func main () { c := openai . NewClient ( \"your token\" ) ctx := context . Background () req := openai. CompletionRequest { Model : openai . GPT3Babbage002 , MaxTokens : 5 , Prompt : \"Lorem ipsum\" , Stream : true , } stream , err := c . CreateCompletionStream ( ctx , req ) if err != nil { fmt . Printf ( \"CompletionStream error: %v \\n \" , err ) return } defer stream . Close () for { response , err := stream . Recv () if errors . Is ( err , io . EOF ) { fmt . Println ( \"Stream finished\" ) return } if err != nil { fmt . Printf ( \"Stream error: %v \\n \" , err ) return } fmt . Printf ( \"Stream response: %v \\n \" , response ) } }
音频语音到文本
package main import ( \"context\" \"fmt\" openai \"github.com/sashabaranov/go-openai\" ) func main () { c := openai . NewClient ( \"your token\" ) ctx := context . Background () req := openai. AudioRequest { Model : openai . Whisper1 , FilePath : \"recording.mp3\" , } resp , err := c . CreateTranscription ( ctx , req ) if err != nil { fmt . Printf ( \"Transcription error: %v \\n \" , err ) return } fmt . Println ( resp . Text ) }
音频字幕
package main import ( \"context\" \"fmt\" \"os\" openai \"github.com/sashabaranov/go-openai\" ) func main () { c := openai . NewClient ( os . Getenv ( \"OPENAI_KEY\" )) req := openai. AudioRequest { Model : openai . Whisper1 , FilePath : os . Args [ 1 ], Format : openai . AudioResponseFormatSRT , } resp , err := c . CreateTranscription ( context . Background (), req ) if err != nil { fmt . Printf ( \"Transcription error: %v \\n \" , err ) return } f , err := os . Create ( os . Args [ 1 ] + \".srt\" ) if err != nil { fmt . Printf ( \"Could not open file: %v \\n \" , err ) return } defer f . Close () if _ , err := f . WriteString ( resp . Text ); err != nil { fmt . Printf ( \"Error writing to file: %v \\n \" , err ) return } }
dall-e 2图像生成
package main import ( \"bytes\" \"context\" \"encoding/base64\" \"fmt\" openai \"github.com/sashabaranov/go-openai\" \"image/png\" \"os\" ) func main () { c := openai . NewClient ( \"your token\" ) ctx := context . Background () // Sample image by link reqUrl := openai. ImageRequest { Prompt : \"Parrot on a skateboard performs a trick, cartoon style, natural light, high detail\" , Size : openai . CreateImageSize256x256 , ResponseFormat : openai . CreateImageResponseFormatURL , N : 1 , } respUrl , err := c . CreateImage ( ctx , reqUrl ) if err != nil { fmt . Printf ( \"Image creation error: %v \\n \" , err ) return } fmt . Println ( respUrl . Data [ 0 ]. URL ) // Example image as base64 reqBase64 := openai. ImageRequest { Prompt : \"Portrait of a humanoid parrot in a classic costume, high detail, realistic light, unreal engine\" , Size : openai . CreateImageSize256x256 , ResponseFormat : openai . CreateImageResponseFormatB64JSON , N : 1 , } respBase64 , err := c . CreateImage ( ctx , reqBase64 ) if err != nil { fmt . Printf ( \"Image creation error: %v \\n \" , err ) return } imgBytes , err := base64 . StdEncoding . DecodeString ( respBase64 . Data [ 0 ]. B64JSON ) if err != nil { fmt . Printf ( \"Base64 decode error: %v \\n \" , err ) return } r := bytes . NewReader ( imgBytes ) imgData , err := png . Decode ( r ) if err != nil { fmt . Printf ( \"PNG decode error: %v \\n \" , err ) return } file , err := os . Create ( \"example.png\" ) if err != nil { fmt . Printf ( \"File creation error: %v \\n \" , err ) return } defer file . Close () if err := png . Encode ( file , imgData ); err != nil { fmt . Printf ( \"PNG encode error: %v \\n \" , err ) return } fmt . Println ( \"The image was saved as example.png\" ) }
GPT图像1图像生成
package main import ( \"context\" \"encoding/base64\" \"fmt\" \"os\" openai \"github.com/sashabaranov/go-openai\" ) func main () { c := openai . NewClient ( \"your token\" ) ctx := context . Background () req := openai. ImageRequest { Prompt : \"Parrot on a skateboard performing a trick. Large bold text \\\" SKATE MASTER \\\" banner at the bottom of the image. Cartoon style, natural light, high detail, 1:1 aspect ratio.\" , Background : openai . CreateImageBackgroundOpaque , Model : openai . CreateImageModelGptImage1 , Size : openai . CreateImageSize1024x1024 , N : 1 , Quality : openai . CreateImageQualityLow , OutputCompression : 100 , OutputFormat : openai . CreateImageOutputFormatJPEG , // Moderation: openai.CreateImageModerationLow, // User: \"\", } resp , err := c . CreateImage ( ctx , req ) if err != nil { fmt . Printf ( \"Image creation Image generation with GPT Image 1error: %v \\n \" , err ) return } fmt . Println ( \"Image Base64:\" , resp . Data [ 0 ]. B64JSON ) // Decode the base64 data imgBytes , err := base64 . StdEncoding . DecodeString ( resp . Data [ 0 ]. B64JSON ) if err != nil { fmt . Printf ( \"Base64 decode error: %v \\n \" , err ) return } // Write image to file outputPath := \"generated_image.jpg\" err = os . WriteFile ( outputPath , imgBytes , 0644 ) if err != nil { fmt . Printf ( \"Failed to write image file: %v \\n \" , err ) return } fmt . Printf ( \"The image was saved as %s \\n \" , outputPath ) }
配置代理
config := openai . DefaultConfig ( \"token\" ) proxyUrl , err := url . Parse ( \"http://localhost:{port}\" ) if err != nil { panic ( err ) } transport := & http. Transport { Proxy : http . ProxyURL ( proxyUrl ), } config . HTTPClient = & http. Client { Transport : transport , } c := openai . NewClientWithConfig ( config )
另请参阅:https://pkg.go.dev/github.com/sashabaranov/go-openai#clientconfig
CHATGPT支持上下文
package main import ( \"bufio\" \"context\" \"fmt\" \"os\" \"strings\" \"github.com/sashabaranov/go-openai\" ) func main () { client := openai . NewClient ( \"your token\" ) messages := make ([]openai. ChatCompletionMessage , 0 ) reader := bufio . NewReader ( os . Stdin ) fmt . Println ( \"Conversation\" ) fmt . Println ( \"---------------------\" ) for { fmt . Print ( \"-> \" ) text , _ := reader . ReadString ( \'\\n\' ) // convert CRLF to LF text = strings . Replace ( text , \" \\n \" , \"\" , - 1 ) messages = append ( messages , openai. ChatCompletionMessage { Role : openai . ChatMessageRoleUser , Content : text , }) resp , err := client . CreateChatCompletion ( context . Background (), openai. ChatCompletionRequest { Model : openai . GPT3Dot5Turbo , Messages : messages , }, ) if err != nil { fmt . Printf ( \"ChatCompletion error: %v \\n \" , err ) continue } content := resp . Choices [ 0 ]. Message . Content messages = append ( messages , openai. ChatCompletionMessage { Role : openai . ChatMessageRoleAssistant , Content : content , }) fmt . Println ( content ) } }
Azure Openai Chatgpt
package main import ( \"context\" \"fmt\" openai \"github.com/sashabaranov/go-openai\" ) func main () { config := openai . DefaultAzureConfig ( \"your Azure OpenAI Key\" , \"https://*yo**ur Azure OpenAI Endpoint\" ) // If you use a deployment name different from the model name, you can customize the AzureModelMapperFunc function // config.AzureModelMapperFunc = func(model string) string { // azureModelMapping := map[string]string{ // \"gpt-3.5-turbo\": \"your gpt-3.5-turbo deployment name\", // } // return azureModelMapping[model] // } client := openai . NewClientWithConfig ( config ) resp , err := client . CreateChatCompletion ( context . Background (), openai. ChatCompletionRequest { Model : openai . GPT3Dot5Turbo , Messages : []openai. ChatCompletionMessage { { Role : openai . ChatMessageRoleUser , Content : \"Hello Azure OpenAI!\" , }, }, }, ) if err != nil { fmt . Printf ( \"ChatCompletion error: %v \\n \" , err ) return } fmt . Println ( resp . Choices [ 0 ]. Message . Content ) }
嵌入语义相似性
package main import ( \"context\" \"log\" openai \"github.com/sashabaranov/go-openai\" ) func main () { client := openai . NewClient ( \"your-token\" ) // Create an EmbeddingRequest for the user query queryReq := openai. EmbeddingRequest { Input : [] string { \"How many chucks would a woodchuck chuck\" }, Model : openai . AdaEmbeddingV2 , } // Create an embedding for the user query queryResponse , err := client . CreateEmbeddings ( context . Background (), queryReq ) if err != nil { log . Fatal ( \"Error creating query embedding:\" , err ) } // Create an EmbeddingRequest for the target text targetReq := openai. EmbeddingRequest { Input : [] string { \"How many chucks would a woodchuck chuck if the woodchuck could chuck wood\" }, Model : openai . AdaEmbeddingV2 , } // Create an embedding for the target text targetResponse , err := client . CreateEmbeddings ( context . Background (), targetReq ) if err != nil { log . Fatal ( \"Error creating target embedding:\" , err ) } // Now that we have the embeddings for the user query and the target text, we // can calculate their similarity. queryEmbedding := queryResponse . Data [ 0 ] targetEmbedding := targetResponse . Data [ 0 ] similarity , err := queryEmbedding . DotProduct ( & targetEmbedding ) if err != nil { log . Fatal ( \"Error calculating dot product:\" , err ) } log . Printf ( \"The similarity score between the query and the target is %f\" , similarity ) }
Azure Openai嵌入
package main import ( \"context\" \"fmt\" openai \"github.com/sashabaranov/go-openai\" ) func main () { config := openai . DefaultAzureConfig ( \"your Azure OpenAI Key\" , \"https://*yo**ur Azure OpenAI Endpoint\" ) config . APIVersion = \"2023-05-15\" // optional update to latest API version //If you use a deployment name different from the model name, you can customize the AzureModelMapperFunc function //config.AzureModelMapperFunc = func(model string) string { // azureModelMapping := map[string]string{ // \"gpt-3.5-turbo\":\"your gpt-3.5-turbo deployment name\", // } // return azureModelMapping[model] //} input := \"Text to vectorize\" client := openai . NewClientWithConfig ( config ) resp , err := client . CreateEmbeddings ( context . Background (), openai. EmbeddingRequest { Input : [] string { input }, Model : openai . AdaEmbeddingV2 , }) if err != nil { fmt . Printf ( \"CreateEmbeddings error: %v \\n \" , err ) return } vectors := resp . Data [ 0 ]. Embedding // []float32 with 1536 dimensions fmt . Println ( vectors [: 10 ], \"...\" , vectors [ len ( vectors ) - 10 :]) }
JSON策略用于调用功能
现在,聊天完成可以选择拨打函数以获取更多信息(请参阅此处的开发人员文档)。
为了描述可以调用的函数的类型,必须提供JSON模式。许多JSON模式库存在,并且比我们在此库中提供的更先进,但是我们为那些想使用此功能而无需格式化自己的JSON模式有效负载的人提供了一个简单的JSonschema软件包。
开发人员文档将此JSON模式定义为一个例子:
{
\"name\" : \" get_current_weather \" ,
\"description\" : \" Get the current weather in a given location \" ,
\"parameters\" :{
\"type\" : \" object \" ,
\"properties\" :{
\"location\" :{
\"type\" : \" string \" ,
\"description\" : \" The city and state, e.g. San Francisco, CA \"
},
\"unit\" :{
\"type\" : \" string \" ,
\"enum\" :[
\" celsius \" ,
\" fahrenheit \"
]
}
},
\"required\" :[
\" location \"
]
}
}
使用Jsonschema软件包,可以使用结构这样创建此架构:
FunctionDefinition { Name : \"get_current_weather\" , Parameters : jsonschema. Definition { Type : jsonschema . Object , Properties : map [ string ]jsonschema. Definition { \"location\" : { Type : jsonschema . String , Description : \"The city and state, e.g. San Francisco, CA\" , }, \"unit\" : { Type : jsonschema . String , Enum : [] string { \"celsius\" , \"fahrenheit\" }, }, }, Required : [] string { \"location\" }, }, }
函数定义的参数字段可以接受上述样式,甚至可以从另一个库中嵌套结构(只要可以将其编码为JSON)即可。
错误处理
Open-AI保留有关如何处理API错误的清晰文档
例子:
e := &openai.APIError{}
if errors.As(err, &e) {
switch e.HTTPStatusCode {
case 401:
// invalid auth or key (do not retry)
case 429:
// rate limiting or engine overload (wait and retry)
case 500:
// openai server error (retry)
default:
// unhandled
}
}
