Compare commits
3 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8175f199d2 | ||
|
|
f8c2f396c1 | ||
|
|
8217647df8 |
@@ -1,11 +1,15 @@
|
||||
<script setup>
|
||||
|
||||
const dialog = ref(false)
|
||||
const currentModel = useCurrentModel()
|
||||
const availableModels = [
|
||||
DEFAULT_MODEL.name
|
||||
'gpt-3.5-turbo',
|
||||
'gpt-4'
|
||||
]
|
||||
const currentModelDefault = ref(MODELS[currentModel.value.name])
|
||||
|
||||
watch(currentModel, (newVal, oldVal) => {
|
||||
currentModelDefault.value = MODELS[newVal.name]
|
||||
saveCurrentModel(newVal)
|
||||
}, { deep: true })
|
||||
|
||||
@@ -83,7 +87,7 @@ watch(currentModel, (newVal, oldVal) => {
|
||||
single-line
|
||||
density="compact"
|
||||
type="number"
|
||||
max="2048"
|
||||
:max="currentModelDefault.total_tokens"
|
||||
step="1"
|
||||
style="width: 100px"
|
||||
class="flex-grow-0"
|
||||
@@ -93,7 +97,7 @@ watch(currentModel, (newVal, oldVal) => {
|
||||
<v-col cols="12">
|
||||
<v-slider
|
||||
v-model="currentModel.max_tokens"
|
||||
:max="2048"
|
||||
:max="currentModelDefault.total_tokens"
|
||||
:step="1"
|
||||
hide-details
|
||||
>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
|
||||
export const useModels = () => useState('models', () => getStoredModels())
|
||||
// export const useModels = () => useState('models', () => getStoredModels())
|
||||
|
||||
export const useCurrentModel = () => useState('currentModel', () => getCurrentModel())
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
version: '3'
|
||||
services:
|
||||
client:
|
||||
platform: linux/x86_64
|
||||
image: wongsaang/chatgpt-ui-client:latest
|
||||
environment:
|
||||
- SERVER_DOMAIN=http://backend-web-server
|
||||
@@ -15,6 +16,7 @@ services:
|
||||
- chatgpt_ui_network
|
||||
restart: always
|
||||
backend-wsgi-server:
|
||||
platform: linux/x86_64
|
||||
image: wongsaang/chatgpt-ui-wsgi-server:latest
|
||||
environment:
|
||||
- APP_DOMAIN=${APP_DOMAIN:-localhost:9000}
|
||||
@@ -36,6 +38,7 @@ services:
|
||||
- chatgpt_ui_network
|
||||
restart: always
|
||||
backend-web-server:
|
||||
platform: linux/x86_64
|
||||
image: wongsaang/chatgpt-ui-web-server:latest
|
||||
environment:
|
||||
- BACKEND_URL=http://backend-wsgi-server:8000
|
||||
|
||||
@@ -90,12 +90,13 @@ const fetchReply = async (message) => {
|
||||
throw err;
|
||||
},
|
||||
async onmessage(message) {
|
||||
// console.log(message)
|
||||
const event = message.event
|
||||
const data = JSON.parse(message.data)
|
||||
|
||||
if (event === 'error') {
|
||||
throw new Error(data.error);
|
||||
abortFetch()
|
||||
showSnackbar(data.error)
|
||||
return;
|
||||
}
|
||||
|
||||
if (event === 'userMessageId') {
|
||||
|
||||
@@ -5,11 +5,25 @@ export const STORAGE_KEY = {
|
||||
OPENAI_API_KEY: 'openai_api_key',
|
||||
}
|
||||
|
||||
export const DEFAULT_MODEL = {
|
||||
name: 'gpt-3.5-turbo',
|
||||
frequency_penalty: 0.0,
|
||||
presence_penalty: 0.0,
|
||||
max_tokens: 1000,
|
||||
temperature: 0.7,
|
||||
top_p: 1.0
|
||||
export const MODELS = {
|
||||
'gpt-3.5-turbo': {
|
||||
name: 'gpt-3.5-turbo',
|
||||
frequency_penalty: 0.0,
|
||||
presence_penalty: 0.0,
|
||||
total_tokens: 4096,
|
||||
max_tokens: 1000,
|
||||
temperature: 0.7,
|
||||
top_p: 1.0
|
||||
},
|
||||
'gpt-4': {
|
||||
name: 'gpt-4',
|
||||
frequency_penalty: 0.0,
|
||||
presence_penalty: 0.0,
|
||||
total_tokens: 8192,
|
||||
max_tokens: 2000,
|
||||
temperature: 0.7,
|
||||
top_p: 1.0
|
||||
}
|
||||
}
|
||||
|
||||
export const DEFAULT_MODEL_NAME = 'gpt-3.5-turbo'
|
||||
@@ -1,3 +1,4 @@
|
||||
import {MODELS} from "~/utils/enums";
|
||||
|
||||
const get = (key) => {
|
||||
let val = localStorage.getItem(key)
|
||||
@@ -17,13 +18,13 @@ export const setModels = (val) => {
|
||||
models.value = val
|
||||
}
|
||||
|
||||
export const getStoredModels = () => {
|
||||
let models = get(STORAGE_KEY.MODELS)
|
||||
if (!models) {
|
||||
models = [DEFAULT_MODEL]
|
||||
}
|
||||
return models
|
||||
}
|
||||
// export const getStoredModels = () => {
|
||||
// let models = get(STORAGE_KEY.MODELS)
|
||||
// if (!models) {
|
||||
// models = [DEFAULT_MODEL]
|
||||
// }
|
||||
// return models
|
||||
// }
|
||||
|
||||
export const saveCurrentModel = (val) => {
|
||||
set(STORAGE_KEY.CURRENT_MODEL, val)
|
||||
@@ -32,7 +33,7 @@ export const saveCurrentModel = (val) => {
|
||||
export const getCurrentModel = () => {
|
||||
let model = get(STORAGE_KEY.CURRENT_MODEL)
|
||||
if (!model) {
|
||||
model = DEFAULT_MODEL
|
||||
model = MODELS[DEFAULT_MODEL_NAME]
|
||||
}
|
||||
return model
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user