diff --git a/components/MsgContent.vue b/components/MsgContent.vue
index b3f0cc6..ec0036e 100644
--- a/components/MsgContent.vue
+++ b/components/MsgContent.vue
@@ -13,7 +13,7 @@ hljs.addPlugin({
}
header = Object.assign(document.createElement("div"), {
- className: "hljs-code-header d-flex align-center justify-space-between bg-black pa-1",
+ className: "hljs-code-header d-flex align-center justify-space-between bg-grey-darken-3 pa-1",
innerHTML: `
${result.language}
`
});
@@ -56,16 +56,22 @@ const contentHtml = ref('')
const contentElm = ref(null)
const highlightCode = () => {
+ if (!contentElm.value) {
+ return
+ }
contentElm.value.querySelectorAll('pre code').forEach((block) => {
hljs.highlightElement(block)
})
}
watchEffect(() => {
+ console.log('content changed', props.content)
contentHtml.value = props.content ? marked(props.content) : ''
- nextTick(() => {
- highlightCode()
- })
+ if (props.content && props.content.endsWith('```')) {
+ nextTick(() => {
+ highlightCode()
+ })
+ }
})
diff --git a/pages/index.vue b/pages/index.vue
index 6fb9e33..baf1867 100644
--- a/pages/index.vue
+++ b/pages/index.vue
@@ -10,6 +10,29 @@ const runtimeConfig = useRuntimeConfig()
const currentModel = useCurrentModel()
const openaiApiKey = useApiKey()
const fetchingResponse = ref(false)
+const messageQueue = []
+let isProcessingQueue = false
+
+const processMessageQueue = () => {
+ if (isProcessingQueue || messageQueue.length === 0) {
+ return
+ }
+ if (!currentConversation.value.messages[currentConversation.value.messages.length - 1].is_bot) {
+ currentConversation.value.messages.push({id: null, is_bot: true, message: ''})
+ }
+ isProcessingQueue = true
+ const nextMessage = messageQueue.shift()
+ let wordIndex = 0;
+ const intervalId = setInterval(() => {
+ currentConversation.value.messages[currentConversation.value.messages.length - 1].message += nextMessage[wordIndex]
+ wordIndex++
+ if (wordIndex === nextMessage.length) {
+ clearInterval(intervalId)
+ isProcessingQueue = false
+ processMessageQueue()
+ }
+ }, 50)
+}
let ctrl
const abortFetch = () => {
@@ -69,11 +92,8 @@ const fetchReply = async (message, parentMessageId) => {
return;
}
- if (currentConversation.value.messages[currentConversation.value.messages.length - 1].is_bot) {
- currentConversation.value.messages[currentConversation.value.messages.length - 1].message += data.content
- } else {
- currentConversation.value.messages.push({id: null, is_bot: true, message: data.content})
- }
+ messageQueue.push(data.content)
+ processMessageQueue()
scrollChatWindow()
},