first commit

This commit is contained in:
Rafi
2023-02-11 17:37:20 +08:00
commit 0c4f782a1b
22 changed files with 5949 additions and 0 deletions

4
.dockerignore Normal file
View File

@@ -0,0 +1,4 @@
node_modules
database.sqlite
.idea
.env

2
.env.example Normal file
View File

@@ -0,0 +1,2 @@
NUXT_OPENAI_API_KEY=YOUR_API_KEY
NUXT_OPENAI_MODEL_NAME=text-davinci-003

10
.gitignore vendored Normal file
View File

@@ -0,0 +1,10 @@
node_modules
*.log*
.nuxt
.nitro
.cache
.output
.env
.idea
dist
database.sqlite

2
.npmrc Normal file
View File

@@ -0,0 +1,2 @@
shamefully-hoist=true
strict-peer-dependencies=false

15
Dockerfile Normal file
View File

@@ -0,0 +1,15 @@
FROM node:18-alpine3.16 as builder
WORKDIR /app
COPY package.json yarn.lock ./
RUN yarn install
COPY . .
RUN yarn build
EXPOSE 3000
ENTRYPOINT ["node", ".output/server/index.mjs"]

38
README.md Normal file
View File

@@ -0,0 +1,38 @@
# ChatGPT UI
A web client for ChatGPT, using OpenAI's API. The implementation of the interface part uses [waylaidwanderer/node-chatgpt-api](https://github.com/waylaidwanderer/node-chatgpt-api)
This project is based on [nuxt3](https://nuxt.com/docs/getting-started/introduction)
## Quick start with docker
Clone the repository and run:
```bash
docker-compose up
```
## Development
### Setup
Make sure to install the dependencies:
```bash
# yarn
yarn install
```
### Development Server
Start the development server on http://localhost:3000
```bash
yarn dev
```
### Production
Build the application for production:
```bash
yarn build
```

192
app.vue Normal file
View File

@@ -0,0 +1,192 @@
<script setup>
import { fetchEventSource } from '@microsoft/fetch-event-source'
import ApiKeyEditor from "./components/ApiKeyEditor";
const runtimeConfig = useRuntimeConfig()
const fetchingResponse = ref(false)
const fetchReply = async (message, parentMessageId) => {
const ctrl = new AbortController()
try {
await fetchEventSource('/api/conversation', {
signal: ctrl.signal,
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
message: message,
parentMessageId: parentMessageId,
conversationId: currentConversation.value.id
}),
onopen(response) {
if (response.status === 200) {
return;
}
throw new Error(`Failed to send message. HTTP ${response.status} - ${response.statusText}`);
},
onclose() {
throw new Error(`Failed to send message. Server closed the connection unexpectedly.`);
},
onerror(err) {
throw err;
},
onmessage(message) {
if (message.event === 'error') {
throw new Error(JSON.parse(message.data).error);
}
const { type, data } = JSON.parse(message.data);
if (type === 'done') {
if (currentConversation.value.id === null) {
currentConversation.value.id = data.conversationId
}
currentConversation.value.messages[currentConversation.value.messages.length - 1].id = data.messageId
ctrl.abort();
fetchingResponse.value = false
return;
}
if (currentConversation.value.messages[currentConversation.value.messages.length - 1].from === 'ai') {
currentConversation.value.messages[currentConversation.value.messages.length - 1].message += data
} else {
currentConversation.value.messages.push({id: null, from: 'ai', message: data})
}
scrollChatWindow()
},
})
} catch (err) {
ctrl.abort()
showSnackbar(err.message)
fetchingResponse.value = false
}
}
const theme = ref('light')
const toggleTheme = () => {
theme.value = theme.value === 'light' ? 'dark' : 'light'
}
const defaultConversation = ref({
id: null,
messages: []
})
const currentConversation = ref({})
const grab = ref(null)
const scrollChatWindow = () => {
grab.value.scrollIntoView({behavior: 'smooth'})
}
const createNewConversation = () => {
currentConversation.value = Object.assign(defaultConversation.value, {
})
}
const send = (message) => {
fetchingResponse.value = true
let parentMessageId = null
if (currentConversation.value.messages.length > 0) {
const lastMessage = currentConversation.value.messages[currentConversation.value.messages.length - 1]
if (lastMessage.from === 'ai' && lastMessage.id !== null) {
parentMessageId = lastMessage.id
}
}
currentConversation.value.messages.push({from: 'me', parentMessageId: parentMessageId, message: message})
fetchReply(message, parentMessageId)
scrollChatWindow()
}
const stop = () => {
ctrl.abort();
fetchingResponse.value = false
}
const snackbar = ref(false)
const snackbarText = ref('')
const showSnackbar = (text) => {
snackbarText.value = text
snackbar.value = true
}
onNuxtReady(() => {
createNewConversation()
})
</script>
<template>
<v-app
:theme="theme"
>
<v-navigation-drawer
theme="dark"
permanent
>
<v-list>
<ModelNameEditor/>
<ApiKeyEditor/>
</v-list>
<template v-slot:append>
<v-divider></v-divider>
<v-list>
<!-- <v-list-item title="Clear conversations"></v-list-item>-->
<v-list-item
:prepend-icon="theme === 'light' ? 'dark_mode' : 'light_mode'"
:title="(theme === 'light' ? 'Dark' : 'Light') + ' mode'"
@click="toggleTheme"
></v-list-item>
</v-list>
</template>
</v-navigation-drawer>
<v-main>
<div ref="chatWindow">
<v-card
rounded="0"
elevation="0"
v-for="(conversation, index) in currentConversation.messages"
:key="index"
:variant="conversation.from === 'ai' ? 'tonal' : ''"
>
<v-container>
<v-card-text class="text-caption text-disabled">{{ conversation.from }}</v-card-text>
<v-card-text>
<MsgContent :content="conversation.message" />
</v-card-text>
</v-container>
<v-divider></v-divider>
</v-card>
<div ref="grab" class="w-100" style="height: 150px;"></div>
</div>
<v-container>
</v-container>
</v-main>
<v-footer app class="d-flex flex-column">
<div class="px-16 w-100 d-flex align-center">
<v-btn
v-show="fetchingResponse"
icon="close"
title="stop"
class="mr-3"
@click="stop"
></v-btn>
<MsgEditor :send-message="send" :disabled="fetchingResponse" :loading="fetchingResponse" />
</div>
<div class="px-4 py-2 text-disabled text-caption font-weight-light text-center w-100">
{{ new Date().getFullYear() }} {{ runtimeConfig.public.appName }}
</div>
</v-footer>
<v-snackbar
v-model="snackbar"
multi-line
>
{{ snackbarText }}
<template v-slot:actions>
<v-btn
color="red"
variant="text"
@click="snackbar = false"
>
Close
</v-btn>
</template>
</v-snackbar>
</v-app>
</template>

View File

@@ -0,0 +1,48 @@
<template>
<v-list-item v-if="showApiKeyEditor">
<v-text-field
label="Api key"
v-model="apiKeyInput"
hide-details
variant="outlined"
></v-text-field>
<template v-slot:append>
<v-icon icon="done" size="small" @click="submitApiKey"></v-icon>
<v-icon icon="close" size="small" @click="showApiKeyEditor = false"></v-icon>
</template>
</v-list-item>
<v-list-item
v-else
:title="currentApiKey"
subtitle="OpenAI API key"
>
<template v-slot:append>
<v-icon icon="edit" @click="showApiKeyEditor = true"></v-icon>
</template>
</v-list-item>
</template>
<script setup>
const { data } = await useFetch('/api/settings/?key=apiKey')
const currentApiKey = ref(data.value.data??'Not set yet')
const apiKeyInput = ref(currentApiKey.value)
const showApiKeyEditor = ref(false)
const submitApiKey = async () => {
try {
const { data } = await useFetch('/api/settings', {
method: 'POST',
body: { key: 'apiKey', value: apiKeyInput.value }
})
if (data.value.status === 'success') {
currentApiKey.value = apiKeyInput.value
showApiKeyEditor.value = false
}
} catch (e) {
console.log(e)
}
}
</script>
<style scoped>
</style>

View File

@@ -0,0 +1,48 @@
<template>
<v-list-item v-if="showModelNameEditor">
<v-text-field
label="Model name"
v-model="modelNameInput"
hide-details
variant="outlined"
></v-text-field>
<template v-slot:append>
<v-icon icon="done" size="small" @click="submitModelName"></v-icon>
<v-icon icon="close" size="small" @click="showModelNameEditor = false"></v-icon>
</template>
</v-list-item>
<v-list-item
v-else
:title="currentModelName"
subtitle="Current model"
>
<template v-slot:append>
<v-icon icon="edit" @click="showModelNameEditor = true"></v-icon>
</template>
</v-list-item>
</template>
<script setup>
const { data } = await useFetch('/api/settings/?key=modelName')
const currentModelName = ref(data.value.data)
const modelNameInput = ref(currentModelName.value)
const showModelNameEditor = ref(false)
const submitModelName = async () => {
try {
const { data } = await useFetch('/api/settings', {
method: 'POST',
body: { key: 'modelName', value: modelNameInput.value }
})
if (data.value.status === 'success') {
currentModelName.value = modelNameInput.value
showModelNameEditor.value = false
}
} catch (e) {
console.log(e)
}
}
</script>
<style scoped>
</style>

25
components/MsgContent.vue Normal file
View File

@@ -0,0 +1,25 @@
<script setup>
import { marked } from "marked"
import hljs from "highlight.js"
marked.setOptions({
highlight: function (code, lang) {
const language = hljs.getLanguage(lang) ? lang : 'plaintext'
return hljs.highlight(code, { language }).value
},
langPrefix: 'hljs language-', // highlight.js css class prefix
})
const props = defineProps(['content'])
const contentHtml = computed(() => {
return props.content ? marked(props.content) : ''
})
</script>
<template>
<div
v-html="contentHtml"
></div>
</template>

56
components/MsgEditor.vue Normal file
View File

@@ -0,0 +1,56 @@
<template>
<v-textarea
v-model="message"
clearable
label="Message"
placeholder="Type your message here"
rows="1"
:auto-grow="autoGrow"
:disabled="disabled"
:loading="loading"
hide-details
append-inner-icon="send"
@keyup.enter="send"
@click:append="send"
></v-textarea>
</template>
<script>
export default {
name: "MsgEditor",
props: {
sendMessage: Function,
disabled: Boolean,
loading: Boolean,
},
data() {
return {
message: "",
rows: 1,
autoGrow: true,
};
},
watch: {
message(val) {
const lines = val.split(/\r\n|\r|\n/).length;
if (lines > 8) {
this.rows = lines;
this.autoGrow = false;
} else {
this.rows = 1;
this.autoGrow = true;
}
},
},
methods: {
send() {
const msg = this.message
this.message = ""
this.sendMessage(msg);
},
},
}
</script>
<style scoped>
</style>

8
docker-compose.yml Normal file
View File

@@ -0,0 +1,8 @@
version: '3'
services:
app:
build:
context: .
dockerfile: ./Dockerfile
ports:
- '${APP_PORT:-3000}:3000'

26
nuxt.config.ts Normal file
View File

@@ -0,0 +1,26 @@
// https://nuxt.com/docs/api/configuration/nuxt-config
const appName = 'ChatGPT UI'
export default defineNuxtConfig({
dev: false,
app: {
head: {
title: appName,
},
},
runtimeConfig: {
openaiApiKey: '',
openaiModelName: 'text-davinci-003',
public: {
appName: appName
}
},
build: {
transpile: ['vuetify']
},
css: [
'vuetify/styles',
'material-design-icons-iconfont/dist/material-design-icons.css',
'highlight.js/styles/panda-syntax-dark.css',
]
})

22
package.json Normal file
View File

@@ -0,0 +1,22 @@
{
"private": true,
"scripts": {
"build": "nuxt build",
"dev": "nuxt dev",
"generate": "nuxt generate",
"preview": "nuxt preview",
"postinstall": "nuxt prepare"
},
"devDependencies": {
"material-design-icons-iconfont": "^6.7.0",
"nuxt": "^3.1.2"
},
"dependencies": {
"@keyv/sqlite": "^3.6.4",
"@microsoft/fetch-event-source": "^2.0.1",
"@waylaidwanderer/chatgpt-api": "^1.12.2",
"highlight.js": "^11.7.0",
"marked": "^4.2.12",
"vuetify": "^3.0.6"
}
}

21
plugins/vuetify.js Normal file
View File

@@ -0,0 +1,21 @@
import { createVuetify } from 'vuetify'
import { aliases, md } from 'vuetify/iconsets/md'
import * as components from 'vuetify/components'
// import * as directives from 'vuetify/directives'
export default defineNuxtPlugin(nuxtApp => {
const vuetify = createVuetify({
ssr: true,
icons: {
defaultSet: 'md',
aliases,
sets: {
md
}
},
components,
// directives
})
nuxtApp.vueApp.use(vuetify)
})

BIN
public/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 KiB

View File

@@ -0,0 +1,113 @@
import ChatGPTClient from '@waylaidwanderer/chatgpt-api'
import { PassThrough } from 'node:stream'
import {getSetting, setSetting} from "~/utils/keyv";
const serializeSSEEvent = (chunk) => {
let payload = "";
if (chunk.id) {
payload += `id: ${chunk.id}\n`;
}
if (chunk.event) {
payload += `event: ${chunk.event}\n`;
}
if (chunk.data) {
payload += `data: ${chunk.data}\n`;
}
if (chunk.retry) {
payload += `retry: ${chunk.retry}\n`;
}
if (!payload) {
return "";
}
payload += "\n";
return payload;
}
export default defineEventHandler(async (event) => {
const runtimeConfig = useRuntimeConfig()
const body = await readBody(event)
const conversationId = body.conversationId ? body.conversationId.toString() : undefined
const parentMessageId = body.parentMessageId ? body.parentMessageId.toString() : undefined
const tunnel = new PassThrough()
const writeToTunnel = (data) => {
tunnel.write(serializeSSEEvent(data))
}
setResponseHeaders(event, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive'
})
const modelName = await getSetting('modelName')
const apiKey = await getSetting('apiKey')
if (!apiKey) {
writeToTunnel({
event: 'error',
data: JSON.stringify({
code: 503,
error: 'You haven\'t set the api key of openai',
}),
})
return sendStream(event, tunnel)
}
const clientOptions = {
// (Optional) Parameters as described in https://platform.openai.com/docs/api-reference/completions
modelOptions: {
// The model is set to text-chat-davinci-002-20221122 by default, but you can override
// it and any other parameters here
model: modelName,
},
// (Optional) Set custom instructions instead of "You are ChatGPT...".
// promptPrefix: 'You are Bob, a cowboy in Western times...',
// (Optional) Set a custom name for the user
// userLabel: 'User',
// (Optional) Set a custom name for ChatGPT
// chatGptLabel: 'ChatGPT',
// (Optional) Set to true to enable `console.debug()` logging
debug: false,
};
const cacheOptions = {
// Options for the Keyv cache, see https://www.npmjs.com/package/keyv
// This is used for storing conversations, and supports additional drivers (conversations are stored in memory by default)
// For example, to use a JSON file (`npm i keyv-file`) as a database:
// store: new KeyvFile({ filename: 'cache.json' }),
uri: 'sqlite://database.sqlite'
};
const chatGptClient = new ChatGPTClient(apiKey, clientOptions, cacheOptions);
try {
const response = await chatGptClient.sendMessage(body.message, {
conversationId,
parentMessageId,
onProgress: (token) => {
// console.log(token)
writeToTunnel({ data: JSON.stringify({
type: 'token',
data: token
})
})
}
});
writeToTunnel({ data: JSON.stringify({
type: 'done',
data: response
}) })
console.log(response)
} catch (e) {
const code = e?.json?.data?.code || 503;
const message = e?.json?.error?.message || 'There was an error communicating with ChatGPT.';
writeToTunnel({
event: 'error',
data: JSON.stringify({
code,
error: message,
}),
})
}
return sendStream(event, tunnel)
})

19
server/api/settings.js Normal file
View File

@@ -0,0 +1,19 @@
import {getSetting, setSetting} from "~/utils/keyv";
import {apiError, apiSuccess} from "~/utils/api";
export default defineEventHandler(async (event) => {
const runtimeConfig = useRuntimeConfig()
const method = getMethod(event)
if (method === 'GET') {
const query = getQuery(event)
let value = await getSetting(query.key)
if (!value && query.key === 'modelName') {
value = runtimeConfig.openaiModelName
}
return apiSuccess(value)
} else if (method === 'POST') {
const body = await readBody(event)
await setSetting(body.key, body.value)
return apiSuccess()
}
})

4
tsconfig.json Normal file
View File

@@ -0,0 +1,4 @@
{
// https://nuxt.com/docs/guide/concepts/typescript
"extends": "./.nuxt/tsconfig.json"
}

16
utils/api.js Normal file
View File

@@ -0,0 +1,16 @@
export const apiSuccess = (data) => {
return {
code: 200,
status: 'success',
data: data
}
}
export const apiError = (message) => {
return {
code: 400,
status: 'error',
error: message
}
}

15
utils/keyv.js Normal file
View File

@@ -0,0 +1,15 @@
import Keyv from 'keyv'
const cacheOptions = {
namespace: 'settings',
uri: 'sqlite://database.sqlite',
}
const cache = new Keyv(cacheOptions);
export const getSetting = async (key) => {
return await cache.get(key)
}
export const setSetting = async (key, value) => {
return await cache.set(key, value)
}

5265
yarn.lock Normal file

File diff suppressed because it is too large Load Diff