Change the model cache to local storage and support caching multiple models

This commit is contained in:
Rafi
2023-02-12 12:40:47 +08:00
parent 2f2ed46911
commit edde4112c1
7 changed files with 179 additions and 55 deletions

View File

@@ -38,7 +38,6 @@ export default defineEventHandler(async (event) => {
'Connection': 'keep-alive'
})
const modelName = await getSetting('modelName')
const apiKey = await getSetting('apiKey')
if (!apiKey) {
@@ -57,7 +56,7 @@ export default defineEventHandler(async (event) => {
modelOptions: {
// The model is set to text-chat-davinci-002-20221122 by default, but you can override
// it and any other parameters here
model: modelName,
model: body.model,
},
// (Optional) Set custom instructions instead of "You are ChatGPT...".
// promptPrefix: 'You are Bob, a cowboy in Western times...',