Compare commits

..

2 Commits

Author SHA1 Message Date
Rafi
18a4251714 feat: Message actions 2023-03-17 18:27:07 +08:00
Rafi
878fda0054 Support configuring model parameters in the front-end and storing them in localStorage. 2023-03-17 17:01:18 +08:00
8 changed files with 335 additions and 33 deletions

View File

@@ -0,0 +1,78 @@
<script setup>
import copy from 'copy-to-clipboard'
const props = defineProps({
message: {
type: Object,
required: true
},
messageIndex: {
type: Number,
required: true
}
})
const snackbar = ref(false)
const snackbarText = ref('')
const showSnackbar = (text) => {
snackbarText.value = text
snackbar.value = true
}
const copyMessage = () => {
copy(props.message.message)
showSnackbar('Copied!')
}
const deleteMessage = async () => {
const { data, error } = await useAuthFetch(`/api/chat/messages/${props.message.id}/`, {
method: 'DELETE'
})
if (!error.value) {
this.$emit('deleteMessage', props.messageIndex)
showSnackbar('Deleted!')
}
showSnackbar('Delete failed')
}
</script>
<template>
<v-menu
>
<template v-slot:activator="{ props }">
<v-btn
v-bind="props"
icon
variant="text"
class="mx-1"
>
<v-icon icon="more_horiz"></v-icon>
</v-btn>
</template>
<v-list>
<v-list-item
@click="copyMessage()"
>
<v-list-item-title>{{ $t('copy') }}</v-list-item-title>
</v-list-item>
<!-- <v-list-item-->
<!-- @click="deleteMessage()"-->
<!-- >-->
<!-- <v-list-item-title>{{ $t('delete') }}</v-list-item-title>-->
<!-- </v-list-item>-->
</v-list>
</v-menu>
<v-snackbar
v-model="snackbar"
location="top"
timeout="2000"
>
{{ snackbarText }}
</v-snackbar>
</template>
<style scoped>
</style>

View File

@@ -0,0 +1,191 @@
<script setup>
const dialog = ref(false)
const currentModel = useCurrentModel()
const availableModels = [
DEFAULT_MODEL.name
]
watch(currentModel, (newVal, oldVal) => {
saveCurrentModel(newVal)
}, { deep: true })
</script>
<template>
<v-dialog
v-model="dialog"
persistent
>
<template v-slot:activator="{ props }">
<v-list-item
v-bind="props"
rounded="xl"
prepend-icon="tune"
:title="$t('modelParameters')"
></v-list-item>
</template>
<v-card>
<v-toolbar
density="compact"
>
<v-toolbar-title>{{ $t('modelParameters') }}</v-toolbar-title>
<v-spacer></v-spacer>
<v-btn icon="close" @click="dialog = false"></v-btn>
</v-toolbar>
<v-card-text>
<v-select
v-model="currentModel.name"
:label="$t('model')"
:items="availableModels"
variant="underlined"
></v-select>
<v-row
no-gutters
>
<v-col cols="12">
<div class="d-flex justify-space-between align-center">
<v-list-subheader>{{ $t('temperature') }}</v-list-subheader>
<v-text-field
v-model="currentModel.temperature"
hide-details
single-line
density="compact"
type="number"
max="1"
step="0.01"
style="width: 100px"
class="flex-grow-0"
></v-text-field>
</div>
</v-col>
<v-col cols="12">
<v-slider
v-model="currentModel.temperature"
:max="1"
:step="0.01"
hide-details
>
</v-slider>
</v-col>
</v-row>
<v-row
no-gutters
>
<v-col cols="12">
<div class="d-flex justify-space-between align-center">
<v-list-subheader>{{ $t('maxTokens') }}</v-list-subheader>
<v-text-field
v-model="currentModel.max_tokens"
hide-details
single-line
density="compact"
type="number"
max="2048"
step="1"
style="width: 100px"
class="flex-grow-0"
></v-text-field>
</div>
</v-col>
<v-col cols="12">
<v-slider
v-model="currentModel.max_tokens"
:max="2048"
:step="1"
hide-details
>
</v-slider>
</v-col>
</v-row>
<v-row
no-gutters
>
<v-col cols="12">
<div class="d-flex justify-space-between align-center">
<v-list-subheader>{{ $t('topP') }}</v-list-subheader>
<v-text-field
v-model="currentModel.top_p"
hide-details
single-line
density="compact"
type="number"
max="1"
step="0.01"
style="width: 100px"
class="flex-grow-0"
></v-text-field>
</div>
</v-col>
<v-col cols="12">
<v-slider
v-model="currentModel.top_p"
:max="1"
:step="0.01"
hide-details
>
</v-slider>
</v-col>
</v-row>
<v-row no-gutters>
<v-col cols="12">
<div class="d-flex justify-space-between align-center">
<v-list-subheader>{{ $t('frequencyPenalty') }}</v-list-subheader>
<v-text-field
v-model="currentModel.frequency_penalty"
hide-details
single-line
density="compact"
type="number"
max="2"
step="0.01"
style="width: 100px"
class="flex-grow-0"
></v-text-field>
</div>
</v-col>
<v-col cols="12">
<v-slider
v-model="currentModel.frequency_penalty"
:max="2"
:step="0.01"
hide-details
></v-slider>
</v-col>
</v-row>
<v-row no-gutters>
<v-col cols="12">
<div class="d-flex justify-space-between align-center">
<v-list-subheader>{{ $t('presencePenalty') }}</v-list-subheader>
<v-text-field
v-model="currentModel.presence_penalty"
hide-details
single-line
density="compact"
type="number"
max="2"
step="0.01"
style="width: 100px"
class="flex-grow-0"
></v-text-field>
</div>
</v-col>
<v-col cols="12">
<v-slider
v-model="currentModel.presence_penalty"
:max="2"
:step="0.01"
hide-details
></v-slider>
</v-col>
</v-row>
</v-card-text>
</v-card>
</v-dialog>
</template>
<style scoped>
</style>

View File

@@ -18,10 +18,20 @@
"feedback": "Feedback", "feedback": "Feedback",
"newConversation": "New conversation", "newConversation": "New conversation",
"clearConversations": "Clear conversations", "clearConversations": "Clear conversations",
"modelParameters": "Model Parameters",
"model": "Model",
"temperature": "Temperature",
"topP": "Top P",
"frequencyPenalty": "Frequency Penalty",
"presencePenalty": "Presence Penalty",
"maxTokens": "Max Tokens",
"roles": { "roles": {
"me": "Me", "me": "Me",
"ai": "AI" "ai": "AI"
}, },
"copy": "Copy",
"copied": "Copied",
"delete": "Delete",
"welcomeScreen": { "welcomeScreen": {
"introduction1": "is an unofficial client for ChatGPT, but uses the official OpenAI API.", "introduction1": "is an unofficial client for ChatGPT, but uses the official OpenAI API.",
"introduction2": "You will need an OpenAI API Key before you can use this client.", "introduction2": "You will need an OpenAI API Key before you can use this client.",

View File

@@ -18,10 +18,20 @@
"feedback": "反馈", "feedback": "反馈",
"newConversation": "新的对话", "newConversation": "新的对话",
"clearConversations": "清除对话", "clearConversations": "清除对话",
"modelParameters": "模型参数",
"model": "模型",
"temperature": "Temperature",
"topP": "Top P",
"frequencyPenalty": "Frequency Penalty",
"presencePenalty": "Presence Penalty",
"maxTokens": "Max Tokens",
"roles": { "roles": {
"me": "我", "me": "我",
"ai": "AI" "ai": "AI"
}, },
"copy": "复制",
"copied": "已复制",
"delete": "删除",
"welcomeScreen": { "welcomeScreen": {
"introduction1": "是一个非官方的ChatGPT客户端但使用OpenAI的官方API", "introduction1": "是一个非官方的ChatGPT客户端但使用OpenAI的官方API",
"introduction2": "在使用本客户端之前您需要一个OpenAI API密钥。", "introduction2": "在使用本客户端之前您需要一个OpenAI API密钥。",

View File

@@ -228,6 +228,8 @@ onNuxtReady(async () => {
</v-card> </v-card>
</v-dialog> </v-dialog>
<ModelParameters/>
<v-menu <v-menu
> >
<template v-slot:activator="{ props }"> <template v-slot:activator="{ props }">

View File

@@ -6,6 +6,7 @@ definePageMeta({
}) })
import {EventStreamContentType, fetchEventSource} from '@microsoft/fetch-event-source' import {EventStreamContentType, fetchEventSource} from '@microsoft/fetch-event-source'
import { nextTick } from 'vue' import { nextTick } from 'vue'
import MessageActions from "~/components/MessageActions.vue";
const { $i18n, $auth } = useNuxtApp() const { $i18n, $auth } = useNuxtApp()
const runtimeConfig = useRuntimeConfig() const runtimeConfig = useRuntimeConfig()
@@ -51,6 +52,14 @@ const abortFetch = () => {
} }
const fetchReply = async (message, parentMessageId) => { const fetchReply = async (message, parentMessageId) => {
ctrl = new AbortController() ctrl = new AbortController()
const data = Object.assign({}, currentModel.value, {
openaiApiKey: openaiApiKey.value,
message: message,
parentMessageId: parentMessageId,
conversationId: currentConversation.value.id
})
try { try {
await fetchEventSource('/api/conversation/', { await fetchEventSource('/api/conversation/', {
signal: ctrl.signal, signal: ctrl.signal,
@@ -59,13 +68,7 @@ const fetchReply = async (message, parentMessageId) => {
'accept': 'application/json', 'accept': 'application/json',
'Content-Type': 'application/json', 'Content-Type': 'application/json',
}, },
body: JSON.stringify({ body: JSON.stringify(data),
model: currentModel.value,
openaiApiKey: openaiApiKey.value,
message: message,
parentMessageId: parentMessageId,
conversationId: currentConversation.value.id
}),
onopen(response) { onopen(response) {
if (response.ok && response.headers.get('content-type') === EventStreamContentType) { if (response.ok && response.headers.get('content-type') === EventStreamContentType) {
return; return;
@@ -153,6 +156,10 @@ const usePrompt = (prompt) => {
editor.value.usePrompt(prompt) editor.value.usePrompt(prompt)
} }
const deleteMessage = (index) => {
currentConversation.value.messages.splice(index, 1)
}
</script> </script>
<template> <template>
@@ -167,9 +174,14 @@ const usePrompt = (prompt) => {
cols="12" cols="12"
> >
<div <div
class="d-flex" class="d-flex align-center"
:class="message.is_bot ? 'justify-start mr-16' : 'justify-end ml-16'" :class="message.is_bot ? 'justify-start' : 'justify-end'"
> >
<MessageActions
v-if="!message.is_bot"
:message="message"
:message-index="index"
/>
<v-card <v-card
:color="message.is_bot ? '' : 'primary'" :color="message.is_bot ? '' : 'primary'"
rounded="lg" rounded="lg"
@@ -178,18 +190,12 @@ const usePrompt = (prompt) => {
<v-card-text> <v-card-text>
<MsgContent :content="message.message" /> <MsgContent :content="message.message" />
</v-card-text> </v-card-text>
<!-- <v-card-actions-->
<!-- v-if="message.is_bot"-->
<!-- >-->
<!-- <v-spacer></v-spacer>-->
<!-- <v-tooltip text="Copy">-->
<!-- <template v-slot:activator="{ props }">-->
<!-- <v-btn v-bind="props" icon="content_copy"></v-btn>-->
<!-- </template>-->
<!-- </v-tooltip>-->
<!-- </v-card-actions>-->
</v-card> </v-card>
<MessageActions
v-if="message.is_bot"
:message="message"
:message-index="index"
/>
</div> </div>
</v-col> </v-col>
</v-row> </v-row>

View File

@@ -1,6 +1,15 @@
export const STORAGE_KEY = { export const STORAGE_KEY = {
OPENAI_MODELS: 'openai_models', MODELS: 'models',
CURRENT_OPENAI_MODEL: 'current_openai_model', CURRENT_MODEL: 'current_model',
OPENAI_API_KEY: 'openai_api_key', OPENAI_API_KEY: 'openai_api_key',
}
export const DEFAULT_MODEL = {
name: 'gpt-3.5-turbo',
frequency_penalty: 0.0,
presence_penalty: 0.0,
max_tokens: 1000,
temperature: 0.7,
top_p: 1.0
} }

View File

@@ -11,32 +11,28 @@ const set = (key, val) => {
localStorage.setItem(key, JSON.stringify(val)) localStorage.setItem(key, JSON.stringify(val))
} }
const DEFAULT_OPENAI_MODEL = 'text-davinci-003'
export const setModels = (val) => { export const setModels = (val) => {
const models = useModels() const models = useModels()
set(STORAGE_KEY.OPENAI_MODELS, val) set(STORAGE_KEY.MODELS, val)
models.value = val models.value = val
} }
export const getStoredModels = () => { export const getStoredModels = () => {
let models = get(STORAGE_KEY.OPENAI_MODELS) let models = get(STORAGE_KEY.MODELS)
if (!models) { if (!models) {
models = [DEFAULT_OPENAI_MODEL] models = [DEFAULT_MODEL]
} }
return models return models
} }
export const setCurrentModel = (val) => { export const saveCurrentModel = (val) => {
const model = useCurrentModel() set(STORAGE_KEY.CURRENT_MODEL, val)
set(STORAGE_KEY.CURRENT_OPENAI_MODEL, val)
model.value = val
} }
export const getCurrentModel = () => { export const getCurrentModel = () => {
let model = get(STORAGE_KEY.CURRENT_OPENAI_MODEL) let model = get(STORAGE_KEY.CURRENT_MODEL)
if (!model) { if (!model) {
model = DEFAULT_OPENAI_MODEL model = DEFAULT_MODEL
} }
return model return model
} }