Compare commits

...

10 Commits

19 changed files with 2532 additions and 86 deletions

View File

@@ -9,6 +9,23 @@
A ChatGPT web client that supports multiple users, multiple database connections for persistent data storage, supports i18n. Provides Docker images and quick deployment scripts. A ChatGPT web client that supports multiple users, multiple database connections for persistent data storage, supports i18n. Provides Docker images and quick deployment scripts.
## 📢Updates ## 📢Updates
<details open>
<summary><strong>2023-03-15</strong></summary>
Add "open_registration" setting option in the admin panel to control whether user registration is enabled. You can log in to the admin panel and find this setting option under `Chat->Setting`. The default value of this setting is `True` (allow user registration). If you do not need it, please change it to `False`.
</details>
<details open>
<summary><strong>2023-03-10</strong></summary>
Add 2 environment variables to control the typewriter effect:
- `NUXT_PUBLIC_TYPEWRITER=true` to enable/disable the typewriter effect
- `NUXT_PUBLIC_TYPEWRITER_DELAY=50` to set the delay time for each character in milliseconds.
</details>
<details open> <details open>
<summary><strong>2023-03-04</strong></summary> <summary><strong>2023-03-04</strong></summary>
@@ -19,7 +36,7 @@ A ChatGPT web client that supports multiple users, multiple database connections
</details> </details>
<details open> <details>
<summary><strong>2023-02-24</strong></summary> <summary><strong>2023-02-24</strong></summary>
Version 2 is a major update that separates the backend functionality as an independent project, hosted at [chatgpt-ui-server](https://github.com/WongSaang/chatgpt-ui-server). Version 2 is a major update that separates the backend functionality as an independent project, hosted at [chatgpt-ui-server](https://github.com/WongSaang/chatgpt-ui-server).
@@ -73,6 +90,9 @@ services:
image: wongsaang/chatgpt-ui-client:latest image: wongsaang/chatgpt-ui-client:latest
environment: environment:
- SERVER_DOMAIN=http://backend-web-server - SERVER_DOMAIN=http://backend-web-server
- NUXT_PUBLIC_APP_NAME='ChatGPT UI' # App name
- NUXT_PUBLIC_TYPEWRITER=true # Enable typewriter effect, default is false
- NUXT_PUBLIC_TYPEWRITER_DELAY=100 # Typewriter effect delay time, default is 50ms
depends_on: depends_on:
- backend-web-server - backend-web-server
ports: ports:

9
app.vue Normal file
View File

@@ -0,0 +1,9 @@
<template>
<div>
<VitePwaManifest />
<NuxtLoadingIndicator />
<NuxtLayout>
<NuxtPage />
</NuxtLayout>
</div>
</template>

View File

@@ -0,0 +1,78 @@
<script setup>
import copy from 'copy-to-clipboard'
const props = defineProps({
message: {
type: Object,
required: true
},
messageIndex: {
type: Number,
required: true
}
})
const snackbar = ref(false)
const snackbarText = ref('')
const showSnackbar = (text) => {
snackbarText.value = text
snackbar.value = true
}
const copyMessage = () => {
copy(props.message.message)
showSnackbar('Copied!')
}
const deleteMessage = async () => {
const { data, error } = await useAuthFetch(`/api/chat/messages/${props.message.id}/`, {
method: 'DELETE'
})
if (!error.value) {
this.$emit('deleteMessage', props.messageIndex)
showSnackbar('Deleted!')
}
showSnackbar('Delete failed')
}
</script>
<template>
<v-menu
>
<template v-slot:activator="{ props }">
<v-btn
v-bind="props"
icon
variant="text"
class="mx-1"
>
<v-icon icon="more_horiz"></v-icon>
</v-btn>
</template>
<v-list>
<v-list-item
@click="copyMessage()"
>
<v-list-item-title>{{ $t('copy') }}</v-list-item-title>
</v-list-item>
<!-- <v-list-item-->
<!-- @click="deleteMessage()"-->
<!-- >-->
<!-- <v-list-item-title>{{ $t('delete') }}</v-list-item-title>-->
<!-- </v-list-item>-->
</v-list>
</v-menu>
<v-snackbar
v-model="snackbar"
location="top"
timeout="2000"
>
{{ snackbarText }}
</v-snackbar>
</template>
<style scoped>
</style>

View File

@@ -0,0 +1,191 @@
<script setup>
const dialog = ref(false)
const currentModel = useCurrentModel()
const availableModels = [
DEFAULT_MODEL.name
]
watch(currentModel, (newVal, oldVal) => {
saveCurrentModel(newVal)
}, { deep: true })
</script>
<template>
<v-dialog
v-model="dialog"
persistent
>
<template v-slot:activator="{ props }">
<v-list-item
v-bind="props"
rounded="xl"
prepend-icon="tune"
:title="$t('modelParameters')"
></v-list-item>
</template>
<v-card>
<v-toolbar
density="compact"
>
<v-toolbar-title>{{ $t('modelParameters') }}</v-toolbar-title>
<v-spacer></v-spacer>
<v-btn icon="close" @click="dialog = false"></v-btn>
</v-toolbar>
<v-card-text>
<v-select
v-model="currentModel.name"
:label="$t('model')"
:items="availableModels"
variant="underlined"
></v-select>
<v-row
no-gutters
>
<v-col cols="12">
<div class="d-flex justify-space-between align-center">
<v-list-subheader>{{ $t('temperature') }}</v-list-subheader>
<v-text-field
v-model="currentModel.temperature"
hide-details
single-line
density="compact"
type="number"
max="1"
step="0.01"
style="width: 100px"
class="flex-grow-0"
></v-text-field>
</div>
</v-col>
<v-col cols="12">
<v-slider
v-model="currentModel.temperature"
:max="1"
:step="0.01"
hide-details
>
</v-slider>
</v-col>
</v-row>
<v-row
no-gutters
>
<v-col cols="12">
<div class="d-flex justify-space-between align-center">
<v-list-subheader>{{ $t('maxTokens') }}</v-list-subheader>
<v-text-field
v-model="currentModel.max_tokens"
hide-details
single-line
density="compact"
type="number"
max="2048"
step="1"
style="width: 100px"
class="flex-grow-0"
></v-text-field>
</div>
</v-col>
<v-col cols="12">
<v-slider
v-model="currentModel.max_tokens"
:max="2048"
:step="1"
hide-details
>
</v-slider>
</v-col>
</v-row>
<v-row
no-gutters
>
<v-col cols="12">
<div class="d-flex justify-space-between align-center">
<v-list-subheader>{{ $t('topP') }}</v-list-subheader>
<v-text-field
v-model="currentModel.top_p"
hide-details
single-line
density="compact"
type="number"
max="1"
step="0.01"
style="width: 100px"
class="flex-grow-0"
></v-text-field>
</div>
</v-col>
<v-col cols="12">
<v-slider
v-model="currentModel.top_p"
:max="1"
:step="0.01"
hide-details
>
</v-slider>
</v-col>
</v-row>
<v-row no-gutters>
<v-col cols="12">
<div class="d-flex justify-space-between align-center">
<v-list-subheader>{{ $t('frequencyPenalty') }}</v-list-subheader>
<v-text-field
v-model="currentModel.frequency_penalty"
hide-details
single-line
density="compact"
type="number"
max="2"
step="0.01"
style="width: 100px"
class="flex-grow-0"
></v-text-field>
</div>
</v-col>
<v-col cols="12">
<v-slider
v-model="currentModel.frequency_penalty"
:max="2"
:step="0.01"
hide-details
></v-slider>
</v-col>
</v-row>
<v-row no-gutters>
<v-col cols="12">
<div class="d-flex justify-space-between align-center">
<v-list-subheader>{{ $t('presencePenalty') }}</v-list-subheader>
<v-text-field
v-model="currentModel.presence_penalty"
hide-details
single-line
density="compact"
type="number"
max="2"
step="0.01"
style="width: 100px"
class="flex-grow-0"
></v-text-field>
</div>
</v-col>
<v-col cols="12">
<v-slider
v-model="currentModel.presence_penalty"
:max="2"
:step="0.01"
hide-details
></v-slider>
</v-col>
</v-row>
</v-card-text>
</v-card>
</v-dialog>
</template>
<style scoped>
</style>

View File

@@ -63,7 +63,7 @@ onUpdated(() => {
<style> <style>
.chat-msg-content ol { .chat-msg-content ol {
list-style-position: inside; padding-left: 2em;
} }
.hljs-code-container { .hljs-code-container {
border-radius: 3px; border-radius: 3px;

View File

@@ -1,6 +1,28 @@
#!/bin/bash #!/bin/bash
read -p "Please enter a resolved domain name: " domain read -p "Please enter a domain name or external IP address [default: localhost]: " APP_DOMAIN
if [ -z "$APP_DOMAIN" ]; then
APP_DOMAIN="localhost"
fi
read -p "Please set a port for the frontend server [default: 80]: " CLIENT_PORT
if [ -z "$CLIENT_PORT" ]; then
CLIENT_PORT="80"
fi
read -p "Please set a port for the backend server [default: 9000]: " SERVER_PORT
if [ -z "$SERVER_PORT" ]; then
SERVER_PORT="9000"
fi
read -p "Please set a port for the backend WSGI server [default: 8000]: " WSGI_PORT
if [ -z "$WSGI_PORT" ]; then
WSGI_PORT="8000"
fi
if [[ $(which docker) ]]; then if [[ $(which docker) ]]; then
echo "Docker is already installed" echo "Docker is already installed"
@@ -43,6 +65,6 @@ sudo curl -L "https://raw.githubusercontent.com/WongSaang/chatgpt-ui/main/docker
echo "Starting services..." echo "Starting services..."
sudo APP_DOMAIN="${domain}:9000" docker-compose up -d sudo APP_DOMAIN="${APP_DOMAIN}:${SERVER_PORT}" CLIENT_PORT=${CLIENT_PORT} SERVER_PORT=${SERVER_PORT} WSGI_PORT=${WSGI_PORT} docker-compose up --pull -d
echo "Done" echo "Done"

View File

@@ -4,12 +4,16 @@ services:
image: wongsaang/chatgpt-ui-client:latest image: wongsaang/chatgpt-ui-client:latest
environment: environment:
- SERVER_DOMAIN=http://backend-web-server - SERVER_DOMAIN=http://backend-web-server
- NUXT_PUBLIC_APP_NAME='ChatGPT UI'
- NUXT_PUBLIC_TYPEWRITER=true
- NUXT_PUBLIC_TYPEWRITER_DELAY=100
depends_on: depends_on:
- backend-web-server - backend-web-server
ports: ports:
- '80:80' - '${CLIENT_PORT:-80}:80'
networks: networks:
- chatgpt_ui_network - chatgpt_ui_network
restart: always
backend-wsgi-server: backend-wsgi-server:
image: wongsaang/chatgpt-ui-wsgi-server:latest image: wongsaang/chatgpt-ui-wsgi-server:latest
environment: environment:
@@ -25,19 +29,21 @@ services:
# - EMAIL_HOST_PASSWORD= # - EMAIL_HOST_PASSWORD=
# - EMAIL_USE_TLS=True # - EMAIL_USE_TLS=True
ports: ports:
- '8000:8000' - '${WSGI_PORT:-8000}:8000'
networks: networks:
- chatgpt_ui_network - chatgpt_ui_network
restart: always
backend-web-server: backend-web-server:
image: wongsaang/chatgpt-ui-web-server:latest image: wongsaang/chatgpt-ui-web-server:latest
environment: environment:
- BACKEND_URL=http://backend-wsgi-server:8000 - BACKEND_URL=http://backend-wsgi-server:8000
ports: ports:
- '9000:80' - '${SERVER_PORT:-9000}:80'
depends_on: depends_on:
- backend-wsgi-server - backend-wsgi-server
networks: networks:
- chatgpt_ui_network - chatgpt_ui_network
restart: always
networks: networks:
chatgpt_ui_network: chatgpt_ui_network:

View File

@@ -9,6 +9,22 @@
ChatGPT Web 客户端,支持多用户,支持 Mysql、PostgreSQL 等多种数据库连接进行数据持久化存储,支持多语言。提供 Docker 镜像和快速部署脚本。 ChatGPT Web 客户端,支持多用户,支持 Mysql、PostgreSQL 等多种数据库连接进行数据持久化存储,支持多语言。提供 Docker 镜像和快速部署脚本。
## 📢 更新 ## 📢 更新
<details open>
<summary><strong>2023-03-15</strong></summary>
在管理后台增加 `open_registration` 设置项,用于控制是否开放用户注册。你可以登录管理后台,在 `Chat->Setting` 中看到这个设置项,默认是 `True` (允许用户注册),如果不需要,请改成 `False`
</details>
<details open>
<summary><strong>2023-03-10</strong></summary>
增加 2 个环境变量来控制打字机效果, 详见下方 docker-compose 配置的环境变量说明
- `NUXT_PUBLIC_TYPEWRITER` 是否开启打字机效果
- `NUXT_PUBLIC_TYPEWRITER_DELAY` 每个字的延迟时间,单位:毫秒
</details>
<details open> <details open>
<summary><strong>2023-03-04</strong></summary> <summary><strong>2023-03-04</strong></summary>
@@ -19,7 +35,7 @@ ChatGPT Web 客户端,支持多用户,支持 Mysql、PostgreSQL 等多种数
</details> </details>
<details open> <details>
<summary><strong>2023-02-24</strong></summary> <summary><strong>2023-02-24</strong></summary>
V2 是一个重要的更新,将后端功能分离为一个独立的项目,托管在 [chatgpt-ui-server](https://github.com/WongSaang/chatgpt-ui-server), 该项目使用基于 Python 的 Django 框架。 V2 是一个重要的更新,将后端功能分离为一个独立的项目,托管在 [chatgpt-ui-server](https://github.com/WongSaang/chatgpt-ui-server), 该项目使用基于 Python 的 Django 框架。
@@ -72,6 +88,9 @@ services:
image: wongsaang/chatgpt-ui-client:latest image: wongsaang/chatgpt-ui-client:latest
environment: environment:
- SERVER_DOMAIN=http://backend-web-server - SERVER_DOMAIN=http://backend-web-server
- NUXT_PUBLIC_APP_NAME='ChatGPT UI' # App 名称,默认为 ChatGPT UI
- NUXT_PUBLIC_TYPEWRITER=true # 是否启用打字机效果,默认关闭
- NUXT_PUBLIC_TYPEWRITER_DELAY=100 # 打字机效果的延迟时间,默认 50毫秒
depends_on: depends_on:
- backend-web-server - backend-web-server
ports: ports:

View File

@@ -18,10 +18,20 @@
"feedback": "Feedback", "feedback": "Feedback",
"newConversation": "New conversation", "newConversation": "New conversation",
"clearConversations": "Clear conversations", "clearConversations": "Clear conversations",
"modelParameters": "Model Parameters",
"model": "Model",
"temperature": "Temperature",
"topP": "Top P",
"frequencyPenalty": "Frequency Penalty",
"presencePenalty": "Presence Penalty",
"maxTokens": "Max Tokens",
"roles": { "roles": {
"me": "Me", "me": "Me",
"ai": "AI" "ai": "AI"
}, },
"copy": "Copy",
"copied": "Copied",
"delete": "Delete",
"welcomeScreen": { "welcomeScreen": {
"introduction1": "is an unofficial client for ChatGPT, but uses the official OpenAI API.", "introduction1": "is an unofficial client for ChatGPT, but uses the official OpenAI API.",
"introduction2": "You will need an OpenAI API Key before you can use this client.", "introduction2": "You will need an OpenAI API Key before you can use this client.",

View File

@@ -18,10 +18,20 @@
"feedback": "反馈", "feedback": "反馈",
"newConversation": "新的对话", "newConversation": "新的对话",
"clearConversations": "清除对话", "clearConversations": "清除对话",
"modelParameters": "模型参数",
"model": "模型",
"temperature": "Temperature",
"topP": "Top P",
"frequencyPenalty": "Frequency Penalty",
"presencePenalty": "Presence Penalty",
"maxTokens": "Max Tokens",
"roles": { "roles": {
"me": "我", "me": "我",
"ai": "AI" "ai": "AI"
}, },
"copy": "复制",
"copied": "已复制",
"delete": "删除",
"welcomeScreen": { "welcomeScreen": {
"introduction1": "是一个非官方的ChatGPT客户端但使用OpenAI的官方API", "introduction1": "是一个非官方的ChatGPT客户端但使用OpenAI的官方API",
"introduction2": "在使用本客户端之前您需要一个OpenAI API密钥。", "introduction2": "在使用本客户端之前您需要一个OpenAI API密钥。",

View File

@@ -1,6 +1,4 @@
<script setup> <script setup>
import {useConversions} from "../composables/states";
import {getConversions} from "../utils/helper";
import {useDisplay} from "vuetify"; import {useDisplay} from "vuetify";
const { $i18n } = useNuxtApp() const { $i18n } = useNuxtApp()
@@ -25,6 +23,7 @@ const setLang = (lang) => {
} }
const conversations = useConversions() const conversations = useConversions()
const currentConversation = useConversion()
const editingConversation = ref(null) const editingConversation = ref(null)
const deletingConversationIndex = ref(null) const deletingConversationIndex = ref(null)
@@ -54,6 +53,9 @@ const deleteConversation = async (index) => {
}) })
deletingConversationIndex.value = null deletingConversationIndex.value = null
if (!error.value) { if (!error.value) {
if (conversations.value[index].id === currentConversation.value.id) {
createNewConversion()
}
conversations.value.splice(index, 1) conversations.value.splice(index, 1)
} }
} }
@@ -162,7 +164,7 @@ onNuxtReady(async () => {
icon="edit" icon="edit"
size="small" size="small"
variant="text" variant="text"
@click="editConversation(cIdx)" @click.stop="editConversation(cIdx)"
> >
</v-btn> </v-btn>
<v-btn <v-btn
@@ -170,7 +172,7 @@ onNuxtReady(async () => {
size="small" size="small"
variant="text" variant="text"
:loading="deletingConversationIndex === cIdx" :loading="deletingConversationIndex === cIdx"
@click="deleteConversation(cIdx)" @click.stop="deleteConversation(cIdx)"
> >
</v-btn> </v-btn>
</div> </div>
@@ -226,6 +228,8 @@ onNuxtReady(async () => {
</v-card> </v-card>
</v-dialog> </v-dialog>
<ModelParameters/>
<v-menu <v-menu
> >
<template v-slot:activator="{ props }"> <template v-slot:activator="{ props }">
@@ -300,6 +304,49 @@ onNuxtReady(async () => {
<v-main> <v-main>
<NuxtPage/> <NuxtPage/>
</v-main> </v-main>
<div>
<div
v-if="$pwa?.offlineReady || $pwa?.needRefresh"
class="pwa-toast"
role="alert"
>
<div class="message">
<span v-if="$pwa.offlineReady">
App ready to work offline
</span>
<span v-else>
New content available, click on reload button to update.
</span>
</div>
<button
v-if="$pwa.needRefresh"
@click="$pwa.updateServiceWorker()"
>
Reload
</button>
<button @click="$pwa.cancelPrompt()">
Close
</button>
</div>
<div
v-if="$pwa?.showInstallPrompt && !$pwa?.offlineReady && !$pwa?.needRefresh"
class="pwa-toast"
role="alert"
>
<div class="message">
<span>
Install PWA
</span>
</div>
<button @click="$pwa.install()">
Install
</button>
<button @click="$pwa.cancelInstall()">
Cancel
</button>
</div>
</div>
</v-app> </v-app>
</template> </template>
@@ -314,4 +361,27 @@ onNuxtReady(async () => {
background-color: #999; background-color: #999;
border-radius: 3px; border-radius: 3px;
} }
.pwa-toast {
position: fixed;
right: 0;
bottom: 0;
margin: 16px;
padding: 12px;
border: 1px solid #8885;
border-radius: 4px;
z-index: 1;
text-align: left;
box-shadow: 3px 4px 5px 0 #8885;
}
.pwa-toast .message {
margin-bottom: 8px;
}
.pwa-toast button {
border: 1px solid #8885;
outline: none;
margin-right: 5px;
border-radius: 2px;
padding: 3px 10px;
}
</style> </style>

View File

@@ -11,7 +11,9 @@ export default defineNuxtConfig({
}, },
runtimeConfig: { runtimeConfig: {
public: { public: {
appName: appName appName: appName,
typewriter: false,
typewriterDelay: 50,
} }
}, },
build: { build: {
@@ -23,9 +25,38 @@ export default defineNuxtConfig({
'highlight.js/styles/panda-syntax-dark.css', 'highlight.js/styles/panda-syntax-dark.css',
], ],
modules: [ modules: [
'@vite-pwa/nuxt',
'@nuxtjs/color-mode', '@nuxtjs/color-mode',
'@nuxtjs/i18n' '@nuxtjs/i18n',
], ],
pwa: {
registerType: 'autoUpdate',
manifest: {
name: appName,
short_name: appName,
icons: [
{
src: 'icon-black.svg',
sizes: '900x900',
purpose: 'any maskable',
}
],
},
workbox: {
navigateFallback: '/',
globPatterns: ['**/*.{js,css,html,png,svg,ico}'],
},
client: {
installPrompt: true,
// you don't need to include this: only for testing purposes
// if enabling periodic sync for update use 1 hour or so (periodicSyncForUpdates: 3600)
periodicSyncForUpdates: 20,
},
devOptions: {
enabled: true,
type: 'module',
}
},
i18n: { i18n: {
strategy: 'no_prefix', strategy: 'no_prefix',
locales: [ locales: [

View File

@@ -15,6 +15,7 @@
}, },
"dependencies": { "dependencies": {
"@microsoft/fetch-event-source": "^2.0.1", "@microsoft/fetch-event-source": "^2.0.1",
"@vite-pwa/nuxt": "^0.0.7",
"copy-to-clipboard": "^3.3.3", "copy-to-clipboard": "^3.3.3",
"highlight.js": "^11.7.0", "highlight.js": "^11.7.0",
"is-mobile": "^3.1.1", "is-mobile": "^3.1.1",

View File

@@ -67,7 +67,11 @@ const submit = async () => {
errorMsg.value = error.value.data.non_field_errors[0] errorMsg.value = error.value.data.non_field_errors[0]
} }
} else { } else {
errorMsg.value = 'Something went wrong. Please try again.' if (error.value.data.detail) {
errorMsg.value = error.value.data.detail
} else {
errorMsg.value = 'Something went wrong. Please try again.'
}
} }
} else { } else {
$auth.setUser(data.value.user) $auth.setUser(data.value.user)

View File

@@ -6,6 +6,7 @@ definePageMeta({
}) })
import {EventStreamContentType, fetchEventSource} from '@microsoft/fetch-event-source' import {EventStreamContentType, fetchEventSource} from '@microsoft/fetch-event-source'
import { nextTick } from 'vue' import { nextTick } from 'vue'
import MessageActions from "~/components/MessageActions.vue";
const { $i18n, $auth } = useNuxtApp() const { $i18n, $auth } = useNuxtApp()
const runtimeConfig = useRuntimeConfig() const runtimeConfig = useRuntimeConfig()
@@ -24,19 +25,22 @@ const processMessageQueue = () => {
} }
isProcessingQueue = true isProcessingQueue = true
const nextMessage = messageQueue.shift() const nextMessage = messageQueue.shift()
currentConversation.value.messages[currentConversation.value.messages.length - 1].message += nextMessage if (runtimeConfig.public.typewriter) {
isProcessingQueue = false let wordIndex = 0;
processMessageQueue() const intervalId = setInterval(() => {
// let wordIndex = 0; currentConversation.value.messages[currentConversation.value.messages.length - 1].message += nextMessage[wordIndex]
// const intervalId = setInterval(() => { wordIndex++
// currentConversation.value.messages[currentConversation.value.messages.length - 1].message += nextMessage[wordIndex] if (wordIndex === nextMessage.length) {
// wordIndex++ clearInterval(intervalId)
// if (wordIndex === nextMessage.length) { isProcessingQueue = false
// clearInterval(intervalId) processMessageQueue()
// isProcessingQueue = false }
// processMessageQueue() }, runtimeConfig.public.typewriterDelay)
// } } else {
// }, 50) currentConversation.value.messages[currentConversation.value.messages.length - 1].message += nextMessage
isProcessingQueue = false
processMessageQueue()
}
} }
let ctrl let ctrl
@@ -48,6 +52,14 @@ const abortFetch = () => {
} }
const fetchReply = async (message, parentMessageId) => { const fetchReply = async (message, parentMessageId) => {
ctrl = new AbortController() ctrl = new AbortController()
const data = Object.assign({}, currentModel.value, {
openaiApiKey: openaiApiKey.value,
message: message,
parentMessageId: parentMessageId,
conversationId: currentConversation.value.id
})
try { try {
await fetchEventSource('/api/conversation/', { await fetchEventSource('/api/conversation/', {
signal: ctrl.signal, signal: ctrl.signal,
@@ -56,13 +68,7 @@ const fetchReply = async (message, parentMessageId) => {
'accept': 'application/json', 'accept': 'application/json',
'Content-Type': 'application/json', 'Content-Type': 'application/json',
}, },
body: JSON.stringify({ body: JSON.stringify(data),
model: currentModel.value,
openaiApiKey: openaiApiKey.value,
message: message,
parentMessageId: parentMessageId,
conversationId: currentConversation.value.id
}),
onopen(response) { onopen(response) {
if (response.ok && response.headers.get('content-type') === EventStreamContentType) { if (response.ok && response.headers.get('content-type') === EventStreamContentType) {
return; return;
@@ -150,6 +156,10 @@ const usePrompt = (prompt) => {
editor.value.usePrompt(prompt) editor.value.usePrompt(prompt)
} }
const deleteMessage = (index) => {
currentConversation.value.messages.splice(index, 1)
}
</script> </script>
<template> <template>
@@ -164,9 +174,14 @@ const usePrompt = (prompt) => {
cols="12" cols="12"
> >
<div <div
class="d-flex" class="d-flex align-center"
:class="message.is_bot ? 'justify-start mr-16' : 'justify-end ml-16'" :class="message.is_bot ? 'justify-start' : 'justify-end'"
> >
<MessageActions
v-if="!message.is_bot"
:message="message"
:message-index="index"
/>
<v-card <v-card
:color="message.is_bot ? '' : 'primary'" :color="message.is_bot ? '' : 'primary'"
rounded="lg" rounded="lg"
@@ -175,18 +190,12 @@ const usePrompt = (prompt) => {
<v-card-text> <v-card-text>
<MsgContent :content="message.message" /> <MsgContent :content="message.message" />
</v-card-text> </v-card-text>
<!-- <v-card-actions-->
<!-- v-if="message.is_bot"-->
<!-- >-->
<!-- <v-spacer></v-spacer>-->
<!-- <v-tooltip text="Copy">-->
<!-- <template v-slot:activator="{ props }">-->
<!-- <v-btn v-bind="props" icon="content_copy"></v-btn>-->
<!-- </template>-->
<!-- </v-tooltip>-->
<!-- </v-card-actions>-->
</v-card> </v-card>
<MessageActions
v-if="message.is_bot"
:message="message"
:message-index="index"
/>
</div> </div>
</v-col> </v-col>
</v-row> </v-row>

3
public/icon-black.svg Normal file
View File

@@ -0,0 +1,3 @@
<svg width="900" height="900" viewBox="0 0 900 900" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M504.908 750H839.476C850.103 750.001 860.542 747.229 869.745 741.963C878.948 736.696 886.589 729.121 891.9 719.999C897.211 710.876 900.005 700.529 900 689.997C899.995 679.465 897.193 669.12 891.873 660.002L667.187 274.289C661.876 265.169 654.237 257.595 645.036 252.329C635.835 247.064 625.398 244.291 614.773 244.291C604.149 244.291 593.711 247.064 584.511 252.329C575.31 257.595 567.67 265.169 562.36 274.289L504.908 372.979L392.581 179.993C387.266 170.874 379.623 163.301 370.42 158.036C361.216 152.772 350.777 150 340.151 150C329.525 150 319.086 152.772 309.883 158.036C300.679 163.301 293.036 170.874 287.721 179.993L8.12649 660.002C2.80743 669.12 0.00462935 679.465 5.72978e-06 689.997C-0.00461789 700.529 2.78909 710.876 8.10015 719.999C13.4112 729.121 21.0523 736.696 30.255 741.963C39.4576 747.229 49.8973 750.001 60.524 750H270.538C353.748 750 415.112 713.775 457.336 643.101L559.849 467.145L614.757 372.979L779.547 655.834H559.849L504.908 750ZM267.114 655.737L120.551 655.704L340.249 278.586L449.87 467.145L376.474 593.175C348.433 639.03 316.577 655.737 267.114 655.737Z" fill="#0C0C0D"/>
</svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -1,6 +1,15 @@
export const STORAGE_KEY = { export const STORAGE_KEY = {
OPENAI_MODELS: 'openai_models', MODELS: 'models',
CURRENT_OPENAI_MODEL: 'current_openai_model', CURRENT_MODEL: 'current_model',
OPENAI_API_KEY: 'openai_api_key', OPENAI_API_KEY: 'openai_api_key',
} }
export const DEFAULT_MODEL = {
name: 'gpt-3.5-turbo',
frequency_penalty: 0.0,
presence_penalty: 0.0,
max_tokens: 1000,
temperature: 0.7,
top_p: 1.0
}

View File

@@ -11,32 +11,28 @@ const set = (key, val) => {
localStorage.setItem(key, JSON.stringify(val)) localStorage.setItem(key, JSON.stringify(val))
} }
const DEFAULT_OPENAI_MODEL = 'text-davinci-003'
export const setModels = (val) => { export const setModels = (val) => {
const models = useModels() const models = useModels()
set(STORAGE_KEY.OPENAI_MODELS, val) set(STORAGE_KEY.MODELS, val)
models.value = val models.value = val
} }
export const getStoredModels = () => { export const getStoredModels = () => {
let models = get(STORAGE_KEY.OPENAI_MODELS) let models = get(STORAGE_KEY.MODELS)
if (!models) { if (!models) {
models = [DEFAULT_OPENAI_MODEL] models = [DEFAULT_MODEL]
} }
return models return models
} }
export const setCurrentModel = (val) => { export const saveCurrentModel = (val) => {
const model = useCurrentModel() set(STORAGE_KEY.CURRENT_MODEL, val)
set(STORAGE_KEY.CURRENT_OPENAI_MODEL, val)
model.value = val
} }
export const getCurrentModel = () => { export const getCurrentModel = () => {
let model = get(STORAGE_KEY.CURRENT_OPENAI_MODEL) let model = get(STORAGE_KEY.CURRENT_MODEL)
if (!model) { if (!model) {
model = DEFAULT_OPENAI_MODEL model = DEFAULT_MODEL
} }
return model return model
} }

2008
yarn.lock

File diff suppressed because it is too large Load Diff