1
0
This commit is contained in:
Antonio 2023-04-22 11:12:10 +08:00
parent c70c27c697
commit 7c6d1dc92a
2 changed files with 45 additions and 33 deletions

View File

@ -164,23 +164,7 @@ func nightmare(c *gin.Context) {
}
tmp_fulltext := original_response.Message.Content.Parts[0]
original_response.Message.Content.Parts[0] = strings.ReplaceAll(original_response.Message.Content.Parts[0], fulltext, "")
var delta responses.Delta = responses.Delta{
Content: original_response.Message.Content.Parts[0],
Role: "assistant",
}
translated_response := responses.ChatCompletionChunk{
ID: "chatcmpl-QXlha2FBbmROaXhpZUFyZUF3ZXNvbWUK",
Object: "chat.completion.chunk",
Created: int64(original_response.Message.CreateTime),
Model: "gpt-3.5-turbo-0301",
Choices: []responses.Choices{
{
Index: 0,
Delta: delta,
FinishReason: nil,
},
},
}
translated_response := responses.NewChatCompletionChunk(original_response.Message.Content.Parts[0])
// Stream the response to the client
response_string, err := json.Marshal(translated_response)
@ -199,22 +183,7 @@ func nightmare(c *gin.Context) {
fulltext = tmp_fulltext
} else {
if !original_request.Stream {
full_response := responses.ChatCompletion{
ID: "chatcmpl-QXlha2FBbmROaXhpZUFyZUF3ZXNvbWUK",
Object: "chat.completion",
Created: int64(0),
Model: "gpt-3.5-turbo-0301",
Choices: []responses.Choice{
{
Message: responses.Msg{
Content: fulltext,
Role: "assistant",
},
Index: 0,
FinishReason: "stop",
},
},
}
full_response := responses.NewChatCompletion(fulltext)
if err != nil {
return
}

View File

@ -53,6 +53,25 @@ type Delta struct {
Role string `json:"role"`
}
func NewChatCompletionChunk(text string) ChatCompletionChunk {
return ChatCompletionChunk{
ID: "chatcmpl-QXlha2FBbmROaXhpZUFyZUF3ZXNvbWUK",
Object: "chat.completion.chunk",
Created: 0,
Model: "gpt-3.5-turbo-0301",
Choices: []Choices{
{
Index: 0,
Delta: Delta{
Content: text,
Role: "assistant",
},
FinishReason: nil,
},
},
}
}
type ChatCompletion struct {
ID string `json:"id"`
Object string `json:"object"`
@ -75,3 +94,27 @@ type usage struct {
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
}
func NewChatCompletion(full_test string) ChatCompletion {
return ChatCompletion{
ID: "chatcmpl-QXlha2FBbmROaXhpZUFyZUF3ZXNvbWUK",
Object: "chat.completion",
Created: int64(0),
Model: "gpt-3.5-turbo-0301",
Usage: usage{
PromptTokens: 0,
CompletionTokens: 0,
TotalTokens: 0,
},
Choices: []Choice{
{
Message: Msg{
Content: full_test,
Role: "assistant",
},
Index: 0,
FinishReason: "stop",
},
},
}
}