Compare commits

...

8 Commits

Author SHA1 Message Date
Rafi
386659109c Added a new message action: delete 2023-03-19 13:49:12 +08:00
Rafi
bd9e8bf45e Optimize the editor and enhance the user experience. 2023-03-19 13:39:20 +08:00
Rafi
4e40530a8c Added a new message action: edit 2023-03-19 13:13:27 +08:00
Rafi
ea69a350f4 add environment variable NUXT_DEV_SERVER 2023-03-19 12:53:44 +08:00
Rafi
18a4251714 feat: Message actions 2023-03-17 18:27:07 +08:00
Rafi
878fda0054 Support configuring model parameters in the front-end and storing them in localStorage. 2023-03-17 17:01:18 +08:00
Rafi
1f3a025918 feature: pwa 2023-03-17 12:36:24 +08:00
Rafi
f9db3e5866 update readme 2023-03-15 11:23:35 +08:00
16 changed files with 2497 additions and 70 deletions

View File

@@ -9,6 +9,14 @@
A ChatGPT web client that supports multiple users, multiple database connections for persistent data storage, supports i18n. Provides Docker images and quick deployment scripts.
## 📢Updates
<details open>
<summary><strong>2023-03-15</strong></summary>
Add "open_registration" setting option in the admin panel to control whether user registration is enabled. You can log in to the admin panel and find this setting option under `Chat->Setting`. The default value of this setting is `True` (allow user registration). If you do not need it, please change it to `False`.
</details>
<details open>
<summary><strong>2023-03-10</strong></summary>

9
app.vue Normal file
View File

@@ -0,0 +1,9 @@
<template>
<div>
<VitePwaManifest />
<NuxtLoadingIndicator />
<NuxtLayout>
<NuxtPage />
</NuxtLayout>
</div>
</template>

View File

@@ -0,0 +1,98 @@
<script setup>
import copy from 'copy-to-clipboard'
const props = defineProps({
message: {
type: Object,
required: true
},
messageIndex: {
type: Number,
required: true
},
usePrompt: {
type: Function,
required: true
},
deleteMessage: {
type: Function,
required: true
}
})
const snackbar = ref(false)
const snackbarText = ref('')
const showSnackbar = (text) => {
snackbarText.value = text
snackbar.value = true
}
const copyMessage = () => {
copy(props.message.message)
showSnackbar('Copied!')
}
const editMessage = () => {
props.usePrompt(props.message.message)
}
const deleteMessage = async () => {
const { data, error } = await useAuthFetch(`/api/chat/messages/${props.message.id}/`, {
method: 'DELETE'
})
if (!error.value) {
this.$emit('deleteMessage', props.messageIndex)
showSnackbar('Deleted!')
}
showSnackbar('Delete failed')
}
</script>
<template>
<v-menu
>
<template v-slot:activator="{ props }">
<v-btn
v-bind="props"
icon
variant="text"
class="mx-1"
>
<v-icon icon="more_horiz"></v-icon>
</v-btn>
</template>
<v-list>
<v-list-item
@click="copyMessage()"
:title="$t('copy')"
prepend-icon="content_copy"
>
</v-list-item>
<v-list-item
@click="editMessage()"
:title="$t('edit')"
prepend-icon="edit"
>
</v-list-item>
<v-list-item
@click="deleteMessage()"
:title="$t('delete')"
prepend-icon="delete"
>
</v-list-item>
</v-list>
</v-menu>
<v-snackbar
v-model="snackbar"
location="top"
timeout="2000"
>
{{ snackbarText }}
</v-snackbar>
</template>
<style scoped>
</style>

View File

@@ -0,0 +1,191 @@
<script setup>
const dialog = ref(false)
const currentModel = useCurrentModel()
const availableModels = [
DEFAULT_MODEL.name
]
watch(currentModel, (newVal, oldVal) => {
saveCurrentModel(newVal)
}, { deep: true })
</script>
<template>
<v-dialog
v-model="dialog"
persistent
>
<template v-slot:activator="{ props }">
<v-list-item
v-bind="props"
rounded="xl"
prepend-icon="tune"
:title="$t('modelParameters')"
></v-list-item>
</template>
<v-card>
<v-toolbar
density="compact"
>
<v-toolbar-title>{{ $t('modelParameters') }}</v-toolbar-title>
<v-spacer></v-spacer>
<v-btn icon="close" @click="dialog = false"></v-btn>
</v-toolbar>
<v-card-text>
<v-select
v-model="currentModel.name"
:label="$t('model')"
:items="availableModels"
variant="underlined"
></v-select>
<v-row
no-gutters
>
<v-col cols="12">
<div class="d-flex justify-space-between align-center">
<v-list-subheader>{{ $t('temperature') }}</v-list-subheader>
<v-text-field
v-model="currentModel.temperature"
hide-details
single-line
density="compact"
type="number"
max="1"
step="0.01"
style="width: 100px"
class="flex-grow-0"
></v-text-field>
</div>
</v-col>
<v-col cols="12">
<v-slider
v-model="currentModel.temperature"
:max="1"
:step="0.01"
hide-details
>
</v-slider>
</v-col>
</v-row>
<v-row
no-gutters
>
<v-col cols="12">
<div class="d-flex justify-space-between align-center">
<v-list-subheader>{{ $t('maxTokens') }}</v-list-subheader>
<v-text-field
v-model="currentModel.max_tokens"
hide-details
single-line
density="compact"
type="number"
max="2048"
step="1"
style="width: 100px"
class="flex-grow-0"
></v-text-field>
</div>
</v-col>
<v-col cols="12">
<v-slider
v-model="currentModel.max_tokens"
:max="2048"
:step="1"
hide-details
>
</v-slider>
</v-col>
</v-row>
<v-row
no-gutters
>
<v-col cols="12">
<div class="d-flex justify-space-between align-center">
<v-list-subheader>{{ $t('topP') }}</v-list-subheader>
<v-text-field
v-model="currentModel.top_p"
hide-details
single-line
density="compact"
type="number"
max="1"
step="0.01"
style="width: 100px"
class="flex-grow-0"
></v-text-field>
</div>
</v-col>
<v-col cols="12">
<v-slider
v-model="currentModel.top_p"
:max="1"
:step="0.01"
hide-details
>
</v-slider>
</v-col>
</v-row>
<v-row no-gutters>
<v-col cols="12">
<div class="d-flex justify-space-between align-center">
<v-list-subheader>{{ $t('frequencyPenalty') }}</v-list-subheader>
<v-text-field
v-model="currentModel.frequency_penalty"
hide-details
single-line
density="compact"
type="number"
max="2"
step="0.01"
style="width: 100px"
class="flex-grow-0"
></v-text-field>
</div>
</v-col>
<v-col cols="12">
<v-slider
v-model="currentModel.frequency_penalty"
:max="2"
:step="0.01"
hide-details
></v-slider>
</v-col>
</v-row>
<v-row no-gutters>
<v-col cols="12">
<div class="d-flex justify-space-between align-center">
<v-list-subheader>{{ $t('presencePenalty') }}</v-list-subheader>
<v-text-field
v-model="currentModel.presence_penalty"
hide-details
single-line
density="compact"
type="number"
max="2"
step="0.01"
style="width: 100px"
class="flex-grow-0"
></v-text-field>
</div>
</v-col>
<v-col cols="12">
<v-slider
v-model="currentModel.presence_penalty"
:max="2"
:step="0.01"
hide-details
></v-slider>
</v-col>
</v-row>
</v-card-text>
</v-card>
</v-dialog>
</template>
<style scoped>
</style>

View File

@@ -1,17 +1,29 @@
<template>
<v-textarea
v-model="message"
:label="$t('writeAMessage')"
:placeholder="hint"
rows="1"
:auto-grow="autoGrow"
:disabled="disabled"
:loading="loading"
:hide-details="true"
append-inner-icon="send"
@keyup.enter.exact="enterOnly"
@click:appendInner="clickSendBtn"
></v-textarea>
<div
class="flex-grow-1 d-flex align-center justify-space-between"
>
<v-textarea
v-model="message"
:label="$t('writeAMessage')"
:placeholder="hint"
:rows="rows"
max-rows="8"
:auto-grow="autoGrow"
:disabled="disabled"
:loading="loading"
:hide-details="true"
clearable
variant="outlined"
@keydown.enter.exact="enterOnly"
></v-textarea>
<v-btn
:disabled="loading"
icon="send"
title="Send"
class="ml-3"
@click="clickSendBtn"
></v-btn>
</div>
</template>
<script>
@@ -39,7 +51,7 @@ export default {
message(val) {
const lines = val.split(/\r\n|\r|\n/).length;
if (lines > 8) {
this.rows = lines;
this.rows = 8;
this.autoGrow = false;
} else {
this.rows = 1;
@@ -65,7 +77,8 @@ export default {
clickSendBtn () {
this.send()
},
enterOnly () {
enterOnly (event) {
event.preventDefault();
if (!isMobile()) {
this.send()
}

View File

@@ -9,6 +9,13 @@
ChatGPT Web 客户端,支持多用户,支持 Mysql、PostgreSQL 等多种数据库连接进行数据持久化存储,支持多语言。提供 Docker 镜像和快速部署脚本。
## 📢 更新
<details open>
<summary><strong>2023-03-15</strong></summary>
在管理后台增加 `open_registration` 设置项,用于控制是否开放用户注册。你可以登录管理后台,在 `Chat->Setting` 中看到这个设置项,默认是 `True` (允许用户注册),如果不需要,请改成 `False`
</details>
<details open>
<summary><strong>2023-03-10</strong></summary>

View File

@@ -18,10 +18,21 @@
"feedback": "Feedback",
"newConversation": "New conversation",
"clearConversations": "Clear conversations",
"modelParameters": "Model Parameters",
"model": "Model",
"temperature": "Temperature",
"topP": "Top P",
"frequencyPenalty": "Frequency Penalty",
"presencePenalty": "Presence Penalty",
"maxTokens": "Max Tokens",
"roles": {
"me": "Me",
"ai": "AI"
},
"edit": "Edit",
"copy": "Copy",
"copied": "Copied",
"delete": "Delete",
"welcomeScreen": {
"introduction1": "is an unofficial client for ChatGPT, but uses the official OpenAI API.",
"introduction2": "You will need an OpenAI API Key before you can use this client.",

View File

@@ -18,10 +18,21 @@
"feedback": "反馈",
"newConversation": "新的对话",
"clearConversations": "清除对话",
"modelParameters": "模型参数",
"model": "模型",
"temperature": "Temperature",
"topP": "Top P",
"frequencyPenalty": "Frequency Penalty",
"presencePenalty": "Presence Penalty",
"maxTokens": "Max Tokens",
"roles": {
"me": "我",
"ai": "AI"
},
"edit": "编辑",
"copy": "复制",
"copied": "已复制",
"delete": "删除",
"welcomeScreen": {
"introduction1": "是一个非官方的ChatGPT客户端但使用OpenAI的官方API",
"introduction2": "在使用本客户端之前您需要一个OpenAI API密钥。",

View File

@@ -228,6 +228,8 @@ onNuxtReady(async () => {
</v-card>
</v-dialog>
<ModelParameters/>
<v-menu
>
<template v-slot:activator="{ props }">
@@ -302,6 +304,49 @@ onNuxtReady(async () => {
<v-main>
<NuxtPage/>
</v-main>
<div>
<div
v-if="$pwa?.offlineReady || $pwa?.needRefresh"
class="pwa-toast"
role="alert"
>
<div class="message">
<span v-if="$pwa.offlineReady">
App ready to work offline
</span>
<span v-else>
New content available, click on reload button to update.
</span>
</div>
<button
v-if="$pwa.needRefresh"
@click="$pwa.updateServiceWorker()"
>
Reload
</button>
<button @click="$pwa.cancelPrompt()">
Close
</button>
</div>
<div
v-if="$pwa?.showInstallPrompt && !$pwa?.offlineReady && !$pwa?.needRefresh"
class="pwa-toast"
role="alert"
>
<div class="message">
<span>
Install PWA
</span>
</div>
<button @click="$pwa.install()">
Install
</button>
<button @click="$pwa.cancelInstall()">
Cancel
</button>
</div>
</div>
</v-app>
</template>
@@ -316,4 +361,27 @@ onNuxtReady(async () => {
background-color: #999;
border-radius: 3px;
}
.pwa-toast {
position: fixed;
right: 0;
bottom: 0;
margin: 16px;
padding: 12px;
border: 1px solid #8885;
border-radius: 4px;
z-index: 1;
text-align: left;
box-shadow: 3px 4px 5px 0 #8885;
}
.pwa-toast .message {
margin-bottom: 8px;
}
.pwa-toast button {
border: 1px solid #8885;
outline: none;
margin-right: 5px;
border-radius: 2px;
padding: 3px 10px;
}
</style>

View File

@@ -25,9 +25,38 @@ export default defineNuxtConfig({
'highlight.js/styles/panda-syntax-dark.css',
],
modules: [
'@vite-pwa/nuxt',
'@nuxtjs/color-mode',
'@nuxtjs/i18n'
'@nuxtjs/i18n',
],
pwa: {
registerType: 'autoUpdate',
manifest: {
name: appName,
short_name: appName,
icons: [
{
src: 'icon-black.svg',
sizes: '900x900',
purpose: 'any maskable',
}
],
},
workbox: {
navigateFallback: '/',
globPatterns: ['**/*.{js,css,html,png,svg,ico}'],
},
client: {
installPrompt: true,
// you don't need to include this: only for testing purposes
// if enabling periodic sync for update use 1 hour or so (periodicSyncForUpdates: 3600)
periodicSyncForUpdates: 20,
},
devOptions: {
enabled: false,
type: 'module',
}
},
i18n: {
strategy: 'no_prefix',
locales: [
@@ -54,7 +83,7 @@ export default defineNuxtConfig({
nitro: {
devProxy: {
"/api": {
target: "http://localhost:8000/api",
target: process.env.NUXT_DEV_SERVER ?? 'http://localhost:8000/api',
prependPath: true,
changeOrigin: true,
}

View File

@@ -15,6 +15,7 @@
},
"dependencies": {
"@microsoft/fetch-event-source": "^2.0.1",
"@vite-pwa/nuxt": "^0.0.7",
"copy-to-clipboard": "^3.3.3",
"highlight.js": "^11.7.0",
"is-mobile": "^3.1.1",

View File

@@ -6,6 +6,7 @@ definePageMeta({
})
import {EventStreamContentType, fetchEventSource} from '@microsoft/fetch-event-source'
import { nextTick } from 'vue'
import MessageActions from "~/components/MessageActions.vue";
const { $i18n, $auth } = useNuxtApp()
const runtimeConfig = useRuntimeConfig()
@@ -51,6 +52,14 @@ const abortFetch = () => {
}
const fetchReply = async (message, parentMessageId) => {
ctrl = new AbortController()
const data = Object.assign({}, currentModel.value, {
openaiApiKey: openaiApiKey.value,
message: message,
parentMessageId: parentMessageId,
conversationId: currentConversation.value.id
})
try {
await fetchEventSource('/api/conversation/', {
signal: ctrl.signal,
@@ -59,13 +68,7 @@ const fetchReply = async (message, parentMessageId) => {
'accept': 'application/json',
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: currentModel.value,
openaiApiKey: openaiApiKey.value,
message: message,
parentMessageId: parentMessageId,
conversationId: currentConversation.value.id
}),
body: JSON.stringify(data),
onopen(response) {
if (response.ok && response.headers.get('content-type') === EventStreamContentType) {
return;
@@ -153,6 +156,10 @@ const usePrompt = (prompt) => {
editor.value.usePrompt(prompt)
}
const deleteMessage = (index) => {
currentConversation.value.messages.splice(index, 1)
}
</script>
<template>
@@ -167,9 +174,16 @@ const usePrompt = (prompt) => {
cols="12"
>
<div
class="d-flex"
:class="message.is_bot ? 'justify-start mr-16' : 'justify-end ml-16'"
class="d-flex align-center"
:class="message.is_bot ? 'justify-start' : 'justify-end'"
>
<MessageActions
v-if="!message.is_bot"
:message="message"
:message-index="index"
:use-prompt="usePrompt"
:delete-message="deleteMessage"
/>
<v-card
:color="message.is_bot ? '' : 'primary'"
rounded="lg"
@@ -178,18 +192,14 @@ const usePrompt = (prompt) => {
<v-card-text>
<MsgContent :content="message.message" />
</v-card-text>
<!-- <v-card-actions-->
<!-- v-if="message.is_bot"-->
<!-- >-->
<!-- <v-spacer></v-spacer>-->
<!-- <v-tooltip text="Copy">-->
<!-- <template v-slot:activator="{ props }">-->
<!-- <v-btn v-bind="props" icon="content_copy"></v-btn>-->
<!-- </template>-->
<!-- </v-tooltip>-->
<!-- </v-card-actions>-->
</v-card>
<MessageActions
v-if="message.is_bot"
:message="message"
:message-index="index"
:use-prompt="usePrompt"
:delete-message="deleteMessage"
/>
</div>
</v-col>
</v-row>

3
public/icon-black.svg Normal file
View File

@@ -0,0 +1,3 @@
<svg width="900" height="900" viewBox="0 0 900 900" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M504.908 750H839.476C850.103 750.001 860.542 747.229 869.745 741.963C878.948 736.696 886.589 729.121 891.9 719.999C897.211 710.876 900.005 700.529 900 689.997C899.995 679.465 897.193 669.12 891.873 660.002L667.187 274.289C661.876 265.169 654.237 257.595 645.036 252.329C635.835 247.064 625.398 244.291 614.773 244.291C604.149 244.291 593.711 247.064 584.511 252.329C575.31 257.595 567.67 265.169 562.36 274.289L504.908 372.979L392.581 179.993C387.266 170.874 379.623 163.301 370.42 158.036C361.216 152.772 350.777 150 340.151 150C329.525 150 319.086 152.772 309.883 158.036C300.679 163.301 293.036 170.874 287.721 179.993L8.12649 660.002C2.80743 669.12 0.00462935 679.465 5.72978e-06 689.997C-0.00461789 700.529 2.78909 710.876 8.10015 719.999C13.4112 729.121 21.0523 736.696 30.255 741.963C39.4576 747.229 49.8973 750.001 60.524 750H270.538C353.748 750 415.112 713.775 457.336 643.101L559.849 467.145L614.757 372.979L779.547 655.834H559.849L504.908 750ZM267.114 655.737L120.551 655.704L340.249 278.586L449.87 467.145L376.474 593.175C348.433 639.03 316.577 655.737 267.114 655.737Z" fill="#0C0C0D"/>
</svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -1,6 +1,15 @@
export const STORAGE_KEY = {
OPENAI_MODELS: 'openai_models',
CURRENT_OPENAI_MODEL: 'current_openai_model',
MODELS: 'models',
CURRENT_MODEL: 'current_model',
OPENAI_API_KEY: 'openai_api_key',
}
export const DEFAULT_MODEL = {
name: 'gpt-3.5-turbo',
frequency_penalty: 0.0,
presence_penalty: 0.0,
max_tokens: 1000,
temperature: 0.7,
top_p: 1.0
}

View File

@@ -11,32 +11,28 @@ const set = (key, val) => {
localStorage.setItem(key, JSON.stringify(val))
}
const DEFAULT_OPENAI_MODEL = 'text-davinci-003'
export const setModels = (val) => {
const models = useModels()
set(STORAGE_KEY.OPENAI_MODELS, val)
set(STORAGE_KEY.MODELS, val)
models.value = val
}
export const getStoredModels = () => {
let models = get(STORAGE_KEY.OPENAI_MODELS)
let models = get(STORAGE_KEY.MODELS)
if (!models) {
models = [DEFAULT_OPENAI_MODEL]
models = [DEFAULT_MODEL]
}
return models
}
export const setCurrentModel = (val) => {
const model = useCurrentModel()
set(STORAGE_KEY.CURRENT_OPENAI_MODEL, val)
model.value = val
export const saveCurrentModel = (val) => {
set(STORAGE_KEY.CURRENT_MODEL, val)
}
export const getCurrentModel = () => {
let model = get(STORAGE_KEY.CURRENT_OPENAI_MODEL)
let model = get(STORAGE_KEY.CURRENT_MODEL)
if (!model) {
model = DEFAULT_OPENAI_MODEL
model = DEFAULT_MODEL
}
return model
}

2003
yarn.lock

File diff suppressed because it is too large Load Diff