5 Commits

Author SHA1 Message Date
cookeem
e2368fc284 generate pictures through the picture description 2023-03-22 23:09:42 +08:00
cookeem
ff2410ebea v1.0.3 2023-03-22 23:05:24 +08:00
cookeem
3315e5940f support generate image by prompt 2023-03-22 23:04:05 +08:00
cookeem
38f7a73288 update README.md 2023-03-22 16:50:53 +08:00
cookeem
7151dac97d support GPT3/GPt4 2023-03-22 16:36:47 +08:00
7 changed files with 134 additions and 17 deletions

View File

@@ -2,11 +2,11 @@ FROM alpine:3.15.3
LABEL maintainer="cookeem"
LABEL email="cookeem@qq.com"
LABEL version="v1.0.2"
LABEL version="v1.0.3"
RUN adduser -h /chatgpt-service -u 1000 -D dory
COPY chatgpt-service /chatgpt-service/
WORKDIR /chatgpt-service
USER dory
# docker build -t doryengine/chatgpt-service:v1.0.2-alpine .
# docker build -t doryengine/chatgpt-service:v1.0.3-alpine .

View File

@@ -1,4 +1,4 @@
# Real-time ChatGPT service, based on the latest gpt-3.5-turbo-0301 model
# Real-time ChatGPT service, support GPT3/GPT4
- [English README](README.md)
- [中文 README](README_CN.md)
@@ -52,6 +52,9 @@ chatgpt-stream /docker-entrypoint.sh ngin ... Up 0.0.0.0:3000->80/tcp,
# http://localhost:3000
```
- Enter the question directly, it will call the ChatGPT interface to return the answer
- Enter the picture description after `/image`, it will call the DALL-E2 interface to automatically generate pictures through the picture description
## How to build
```bash

View File

@@ -1,4 +1,4 @@
# 实时ChatGPT服务基于最新的gpt-3.5-turbo-0301模型
# 实时ChatGPT服务支持GPT3/GPT4
- [English README](README.md)
- [中文 README](README_CN.md)
@@ -52,6 +52,9 @@ chatgpt-stream /docker-entrypoint.sh ngin ... Up 0.0.0.0:3000->80/tcp,
# http://localhost:3000
```
- 直接输入问题则调用ChatGPT接口返回答案
- `/image `后边输入想要的图片描述则调用DALL-E2接口通过图片描述自动生成图片
## 如何编译
```bash

View File

@@ -1,8 +1,10 @@
package chat
import (
"fmt"
"github.com/sashabaranov/go-openai"
log "github.com/sirupsen/logrus"
"math/rand"
"os"
"time"
)
@@ -39,6 +41,22 @@ func (logger Logger) LogPanic(args ...interface{}) {
log.Panic(args...)
}
func RandomString(n int) string {
var letter []rune
lowerChars := "abcdefghijklmnopqrstuvwxyz"
numberChars := "0123456789"
chars := fmt.Sprintf("%s%s", lowerChars, numberChars)
letter = []rune(chars)
var str string
b := make([]rune, n)
seededRand := rand.New(rand.NewSource(time.Now().UnixNano()))
for i := range b {
b[i] = letter[seededRand.Intn(len(letter))]
}
str = string(b)
return str
}
const (
StatusFail string = "FAIL"

View File

@@ -2,10 +2,12 @@ package chat
import (
"context"
"encoding/base64"
"errors"
"fmt"
"io"
"net/http"
"os"
"strings"
"sync"
"time"
@@ -100,7 +102,7 @@ func (api *Api) GetChatMessage(conn *websocket.Conn, cli *openai.Client, mutex *
stream, err := cli.CreateChatCompletionStream(ctx, req)
if err != nil {
err = fmt.Errorf("[ERROR] create chatGPT stream model=%s error: %s", api.Config.Model, err.Error())
err = fmt.Errorf("[ERROR] create ChatGPT stream model=%s error: %s", api.Config.Model, err.Error())
chatMsg := Message{
Kind: "error",
Msg: err.Error(),
@@ -185,7 +187,7 @@ func (api *Api) GetChatMessage(conn *websocket.Conn, cli *openai.Client, mutex *
stream, err := cli.CreateCompletionStream(ctx, req)
if err != nil {
err = fmt.Errorf("[ERROR] create chatGPT stream model=%s error: %s", api.Config.Model, err.Error())
err = fmt.Errorf("[ERROR] create ChatGPT stream model=%s error: %s", api.Config.Model, err.Error())
chatMsg := Message{
Kind: "error",
Msg: err.Error(),
@@ -262,6 +264,83 @@ func (api *Api) GetChatMessage(conn *websocket.Conn, cli *openai.Client, mutex *
}
}
func (api *Api) GetImageMessage(conn *websocket.Conn, cli *openai.Client, mutex *sync.Mutex, requestMsg string) {
var err error
ctx := context.Background()
prompt := strings.TrimPrefix(requestMsg, "/image ")
req := openai.ImageRequest{
Prompt: prompt,
Size: openai.CreateImageSize256x256,
ResponseFormat: openai.CreateImageResponseFormatB64JSON,
N: 1,
}
sendError := func(err error) {
err = fmt.Errorf("[ERROR] generate image error: %s", err.Error())
chatMsg := Message{
Kind: "error",
Msg: err.Error(),
MsgId: uuid.New().String(),
CreateTime: time.Now().Format("2006-01-02 15:04:05"),
}
mutex.Lock()
_ = conn.WriteJSON(chatMsg)
mutex.Unlock()
api.Logger.LogError(err.Error())
}
resp, err := cli.CreateImage(ctx, req)
if err != nil {
err = fmt.Errorf("[ERROR] generate image error: %s", err.Error())
sendError(err)
return
}
if len(resp.Data) == 0 {
err = fmt.Errorf("[ERROR] generate image error: result is empty")
sendError(err)
return
}
imgBytes, err := base64.StdEncoding.DecodeString(resp.Data[0].B64JSON)
if err != nil {
err = fmt.Errorf("[ERROR] image base64 decode error: %s", err.Error())
sendError(err)
return
}
date := time.Now().Format("2006-01-02")
imageDir := fmt.Sprintf("assets/images/%s", date)
err = os.MkdirAll(imageDir, 0700)
if err != nil {
err = fmt.Errorf("[ERROR] create image directory error: %s", err.Error())
sendError(err)
return
}
imageFileName := fmt.Sprintf("%s.png", RandomString(16))
err = os.WriteFile(fmt.Sprintf("%s/%s", imageDir, imageFileName), imgBytes, 0600)
if err != nil {
err = fmt.Errorf("[ERROR] write png image error: %s", err.Error())
sendError(err)
return
}
msg := fmt.Sprintf("api/%s/%s", imageDir, imageFileName)
chatMsg := Message{
Kind: "image",
Msg: msg,
MsgId: uuid.New().String(),
CreateTime: time.Now().Format("2006-01-02 15:04:05"),
}
mutex.Lock()
_ = conn.WriteJSON(chatMsg)
mutex.Unlock()
api.Logger.LogInfo(fmt.Sprintf("[IMAGE] # %s\n%s", requestMsg, msg))
return
}
func (api *Api) WsChat(c *gin.Context) {
startTime := time.Now()
status := StatusFail
@@ -366,16 +445,29 @@ func (api *Api) WsChat(c *gin.Context) {
mutex.Unlock()
api.Logger.LogError(err.Error())
} else {
chatMsg := Message{
Kind: "receive",
Msg: requestMsg,
MsgId: uuid.New().String(),
CreateTime: time.Now().Format("2006-01-02 15:04:05"),
if strings.HasPrefix(requestMsg, "/image ") {
chatMsg := Message{
Kind: "receive",
Msg: requestMsg,
MsgId: uuid.New().String(),
CreateTime: time.Now().Format("2006-01-02 15:04:05"),
}
mutex.Lock()
_ = conn.WriteJSON(chatMsg)
mutex.Unlock()
go api.GetImageMessage(conn, cli, mutex, requestMsg)
} else {
chatMsg := Message{
Kind: "receive",
Msg: requestMsg,
MsgId: uuid.New().String(),
CreateTime: time.Now().Format("2006-01-02 15:04:05"),
}
mutex.Lock()
_ = conn.WriteJSON(chatMsg)
mutex.Unlock()
go api.GetChatMessage(conn, cli, mutex, requestMsg)
}
mutex.Lock()
_ = conn.WriteJSON(chatMsg)
mutex.Unlock()
go api.GetChatMessage(conn, cli, mutex, requestMsg)
}
}
case websocket.CloseMessage:

View File

@@ -1,7 +1,7 @@
version: "3"
services:
chatgpt-stream:
image: "doryengine/chatgpt-stream:v1.0.2"
image: "doryengine/chatgpt-stream:v1.0.3"
hostname: chatgpt-stream
container_name: chatgpt-stream
ports:
@@ -11,7 +11,7 @@ services:
- chatgpt-service
restart: always
chatgpt-service:
image: "doryengine/chatgpt-service:v1.0.2-alpine"
image: "doryengine/chatgpt-service:v1.0.3-alpine"
hostname: chatgpt-service
container_name: chatgpt-service
ports:

View File

@@ -56,6 +56,7 @@ func main() {
}
groupApi := r.Group("/api")
groupApi.Static("/assets", "assets")
groupWs := groupApi.Group("/ws")
groupWs.GET("chat", api.WsChat)