Change the model cache to local storage and support caching multiple models

This commit is contained in:
Rafi
2023-02-12 12:40:47 +08:00
parent 2f2ed46911
commit edde4112c1
7 changed files with 179 additions and 55 deletions

5
utils/enums.js Normal file
View File

@@ -0,0 +1,5 @@
export const STORAGE_KEY = {
OPENAI_MODELS: 'openai_models',
CURRENT_OPENAI_MODEL: 'current_openai_model',
}