Skip to content

Commit 2d23cfb

Browse files
authored
Merge pull request transformerlab#397 from transformerlab/fix/token-count-interact
Fix Token Count in Interact tab
2 parents f1fc0cb + a091b99 commit 2d23cfb

File tree

1 file changed

+9
-6
lines changed

1 file changed

+9
-6
lines changed

src/renderer/components/Experiment/Interact/Interact.tsx

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,10 @@ export default function Chat({
9595
// For now this is helpful a rough indicator of the number of tokens used.
9696
// But we should improve this later
9797
if (mode === 'chat' || mode === 'tools') {
98-
textToDebounce += experimentInfo?.config?.prompt_template?.system_message;
98+
// textToDebounce += experimentInfo?.config?.prompt_template?.system_message;
99+
const systemMessage =
100+
document.getElementsByName('system-message')[0]?.value;
101+
textToDebounce += systemMessage || '';
99102
textToDebounce += '\n';
100103
chats.forEach((c) => {
101104
textToDebounce += c.t;
@@ -122,7 +125,7 @@ export default function Chat({
122125
}
123126
}
124127
scrollChatToBottom();
125-
}, []);
128+
}, [debouncedText, chats, mode]);
126129

127130
// If the model changes, check the location of the inference service
128131
// And reset the global pointer to the inference server
@@ -704,7 +707,7 @@ export default function Chat({
704707
};
705708

706709
async function countTokens() {
707-
var count = await chatAPI.countTokens(currentModel, [debouncedText]);
710+
let count = await chatAPI.countTokens(currentModel, [debouncedText]);
708711
setTokenCount(count);
709712
}
710713

@@ -719,10 +722,10 @@ export default function Chat({
719722
};
720723
});
721724

722-
texts.push({ role: 'user', content: debouncedText });
723-
724-
var count = await chatAPI.countChatTokens(currentModel, texts);
725+
// Only add debouncedText if it exists, otherwise use empty string
726+
texts.push({ role: 'user', content: debouncedText || '' });
725727

728+
let count = await chatAPI.countChatTokens(currentModel, texts);
726729
setTokenCount(count);
727730
}
728731

0 commit comments

Comments
 (0)