353 lines
17 KiB
JavaScript
353 lines
17 KiB
JavaScript
import { DiffMatchPatch, DOMPurify, localforage } from '../lib.js';
|
|
import { chat, event_types, eventSource, getCurrentChatId, reloadCurrentChat } from '../script.js';
|
|
import { t } from './i18n.js';
|
|
import { oai_settings } from './openai.js';
|
|
import { Popup, POPUP_TYPE } from './popup.js';
|
|
import { power_user, registerDebugFunction } from './power-user.js';
|
|
import { isMobile } from './RossAscends-mods.js';
|
|
import { renderTemplateAsync } from './templates.js';
|
|
import { getFriendlyTokenizerName, getTokenCountAsync } from './tokenizers.js';
|
|
import { copyText } from './utils.js';
|
|
|
|
let PromptArrayItemForRawPromptDisplay;
|
|
let priorPromptArrayItemForRawPromptDisplay;
|
|
|
|
const promptStorage = localforage.createInstance({ name: 'SillyTavern_Prompts' });
|
|
export let itemizedPrompts = [];
|
|
|
|
/**
|
|
* Gets the itemized prompts for a chat.
|
|
* @param {string} chatId Chat ID to load
|
|
*/
|
|
export async function loadItemizedPrompts(chatId) {
|
|
try {
|
|
if (!chatId) {
|
|
itemizedPrompts = [];
|
|
return;
|
|
}
|
|
|
|
itemizedPrompts = await promptStorage.getItem(chatId);
|
|
|
|
if (!itemizedPrompts) {
|
|
itemizedPrompts = [];
|
|
}
|
|
} catch {
|
|
console.log('Error loading itemized prompts for chat', chatId);
|
|
itemizedPrompts = [];
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Saves the itemized prompts for a chat.
|
|
* @param {string} chatId Chat ID to save itemized prompts for
|
|
*/
|
|
export async function saveItemizedPrompts(chatId) {
|
|
try {
|
|
if (!chatId) {
|
|
return;
|
|
}
|
|
|
|
await promptStorage.setItem(chatId, itemizedPrompts);
|
|
} catch {
|
|
console.log('Error saving itemized prompts for chat', chatId);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Replaces the itemized prompt text for a message.
|
|
* @param {number} mesId Message ID to get itemized prompt for
|
|
* @param {string} promptText New raw prompt text
|
|
* @returns
|
|
*/
|
|
export async function replaceItemizedPromptText(mesId, promptText) {
|
|
if (!Array.isArray(itemizedPrompts)) {
|
|
itemizedPrompts = [];
|
|
}
|
|
|
|
const itemizedPrompt = itemizedPrompts.find(x => x.mesId === mesId);
|
|
|
|
if (!itemizedPrompt) {
|
|
return;
|
|
}
|
|
|
|
itemizedPrompt.rawPrompt = promptText;
|
|
}
|
|
|
|
/**
|
|
* Deletes the itemized prompts for a chat.
|
|
* @param {string} chatId Chat ID to delete itemized prompts for
|
|
*/
|
|
export async function deleteItemizedPrompts(chatId) {
|
|
try {
|
|
if (!chatId) {
|
|
return;
|
|
}
|
|
|
|
await promptStorage.removeItem(chatId);
|
|
} catch {
|
|
console.log('Error deleting itemized prompts for chat', chatId);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Empties the itemized prompts array and caches.
|
|
*/
|
|
export async function clearItemizedPrompts() {
|
|
try {
|
|
await promptStorage.clear();
|
|
itemizedPrompts = [];
|
|
} catch {
|
|
console.log('Error clearing itemized prompts');
|
|
}
|
|
}
|
|
|
|
export async function itemizedParams(itemizedPrompts, thisPromptSet, incomingMesId) {
|
|
const params = {
|
|
charDescriptionTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].charDescription),
|
|
charPersonalityTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].charPersonality),
|
|
scenarioTextTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].scenarioText),
|
|
userPersonaStringTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].userPersona),
|
|
worldInfoStringTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].worldInfoString),
|
|
allAnchorsTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].allAnchors),
|
|
summarizeStringTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].summarizeString),
|
|
authorsNoteStringTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].authorsNoteString),
|
|
smartContextStringTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].smartContextString),
|
|
beforeScenarioAnchorTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].beforeScenarioAnchor),
|
|
afterScenarioAnchorTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].afterScenarioAnchor),
|
|
zeroDepthAnchorTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].zeroDepthAnchor), // TODO: unused
|
|
thisPrompt_padding: itemizedPrompts[thisPromptSet].padding,
|
|
this_main_api: itemizedPrompts[thisPromptSet].main_api,
|
|
chatInjects: await getTokenCountAsync(itemizedPrompts[thisPromptSet].chatInjects),
|
|
chatVectorsStringTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].chatVectorsString),
|
|
dataBankVectorsStringTokens: await getTokenCountAsync(itemizedPrompts[thisPromptSet].dataBankVectorsString),
|
|
modelUsed: chat[incomingMesId]?.extra?.model,
|
|
apiUsed: chat[incomingMesId]?.extra?.api,
|
|
presetName: itemizedPrompts[thisPromptSet].presetName || t`(Unknown)`,
|
|
messagesCount: String(itemizedPrompts[thisPromptSet].messagesCount ?? ''),
|
|
examplesCount: String(itemizedPrompts[thisPromptSet].examplesCount ?? ''),
|
|
};
|
|
|
|
const getFriendlyName = (value) => $(`#rm_api_block select option[value="${value}"]`).first().text() || value;
|
|
|
|
if (params.apiUsed) {
|
|
params.apiUsed = getFriendlyName(params.apiUsed);
|
|
}
|
|
|
|
if (params.this_main_api) {
|
|
params.mainApiFriendlyName = getFriendlyName(params.this_main_api);
|
|
}
|
|
|
|
if (params.chatInjects) {
|
|
params.ActualChatHistoryTokens = params.ActualChatHistoryTokens - params.chatInjects;
|
|
}
|
|
|
|
if (params.this_main_api == 'openai') {
|
|
//for OAI API
|
|
//console.log('-- Counting OAI Tokens');
|
|
|
|
//params.finalPromptTokens = itemizedPrompts[thisPromptSet].oaiTotalTokens;
|
|
params.oaiMainTokens = itemizedPrompts[thisPromptSet].oaiMainTokens;
|
|
params.oaiStartTokens = itemizedPrompts[thisPromptSet].oaiStartTokens;
|
|
params.ActualChatHistoryTokens = itemizedPrompts[thisPromptSet].oaiConversationTokens;
|
|
params.examplesStringTokens = itemizedPrompts[thisPromptSet].oaiExamplesTokens;
|
|
params.oaiPromptTokens = itemizedPrompts[thisPromptSet].oaiPromptTokens - (params.afterScenarioAnchorTokens + params.beforeScenarioAnchorTokens) + params.examplesStringTokens;
|
|
params.oaiBiasTokens = itemizedPrompts[thisPromptSet].oaiBiasTokens;
|
|
params.oaiJailbreakTokens = itemizedPrompts[thisPromptSet].oaiJailbreakTokens;
|
|
params.oaiNudgeTokens = itemizedPrompts[thisPromptSet].oaiNudgeTokens;
|
|
params.oaiImpersonateTokens = itemizedPrompts[thisPromptSet].oaiImpersonateTokens;
|
|
params.oaiNsfwTokens = itemizedPrompts[thisPromptSet].oaiNsfwTokens;
|
|
params.finalPromptTokens =
|
|
params.oaiStartTokens +
|
|
params.oaiPromptTokens +
|
|
params.oaiMainTokens +
|
|
params.oaiNsfwTokens +
|
|
params.oaiBiasTokens +
|
|
params.oaiImpersonateTokens +
|
|
params.oaiJailbreakTokens +
|
|
params.oaiNudgeTokens +
|
|
params.ActualChatHistoryTokens +
|
|
//charDescriptionTokens +
|
|
//charPersonalityTokens +
|
|
//allAnchorsTokens +
|
|
params.worldInfoStringTokens +
|
|
params.beforeScenarioAnchorTokens +
|
|
params.afterScenarioAnchorTokens;
|
|
// Max context size - max completion tokens
|
|
params.thisPrompt_max_context = (oai_settings.openai_max_context - oai_settings.openai_max_tokens);
|
|
|
|
//console.log('-- applying % on OAI tokens');
|
|
params.oaiStartTokensPercentage = ((params.oaiStartTokens / (params.finalPromptTokens)) * 100).toFixed(2);
|
|
params.storyStringTokensPercentage = (((params.afterScenarioAnchorTokens + params.beforeScenarioAnchorTokens + params.oaiPromptTokens) / (params.finalPromptTokens)) * 100).toFixed(2);
|
|
params.ActualChatHistoryTokensPercentage = ((params.ActualChatHistoryTokens / (params.finalPromptTokens)) * 100).toFixed(2);
|
|
params.promptBiasTokensPercentage = ((params.oaiBiasTokens / (params.finalPromptTokens)) * 100).toFixed(2);
|
|
params.worldInfoStringTokensPercentage = ((params.worldInfoStringTokens / (params.finalPromptTokens)) * 100).toFixed(2);
|
|
params.allAnchorsTokensPercentage = ((params.allAnchorsTokens / (params.finalPromptTokens)) * 100).toFixed(2);
|
|
params.selectedTokenizer = getFriendlyTokenizerName(params.this_main_api).tokenizerName;
|
|
params.oaiSystemTokens = params.oaiImpersonateTokens + params.oaiJailbreakTokens + params.oaiNudgeTokens + params.oaiStartTokens + params.oaiNsfwTokens + params.oaiMainTokens;
|
|
params.oaiSystemTokensPercentage = ((params.oaiSystemTokens / (params.finalPromptTokens)) * 100).toFixed(2);
|
|
} else {
|
|
//for non-OAI APIs
|
|
//console.log('-- Counting non-OAI Tokens');
|
|
params.finalPromptTokens = await getTokenCountAsync(itemizedPrompts[thisPromptSet].finalPrompt);
|
|
params.storyStringTokens = await getTokenCountAsync(itemizedPrompts[thisPromptSet].storyString) - params.worldInfoStringTokens;
|
|
params.examplesStringTokens = await getTokenCountAsync(itemizedPrompts[thisPromptSet].examplesString);
|
|
params.mesSendStringTokens = await getTokenCountAsync(itemizedPrompts[thisPromptSet].mesSendString);
|
|
params.ActualChatHistoryTokens = params.mesSendStringTokens - (params.allAnchorsTokens - (params.beforeScenarioAnchorTokens + params.afterScenarioAnchorTokens)) + power_user.token_padding;
|
|
params.instructionTokens = await getTokenCountAsync(itemizedPrompts[thisPromptSet].instruction);
|
|
params.promptBiasTokens = await getTokenCountAsync(itemizedPrompts[thisPromptSet].promptBias);
|
|
|
|
params.totalTokensInPrompt =
|
|
params.storyStringTokens + //chardefs total
|
|
params.worldInfoStringTokens +
|
|
params.examplesStringTokens + // example messages
|
|
params.ActualChatHistoryTokens + //chat history
|
|
params.allAnchorsTokens + // AN and/or legacy anchors
|
|
//afterScenarioAnchorTokens + //only counts if AN is set to 'after scenario'
|
|
//zeroDepthAnchorTokens + //same as above, even if AN not on 0 depth
|
|
params.promptBiasTokens; //{{}}
|
|
//- thisPrompt_padding; //not sure this way of calculating is correct, but the math results in same value as 'finalPrompt'
|
|
params.thisPrompt_max_context = itemizedPrompts[thisPromptSet].this_max_context;
|
|
params.thisPrompt_actual = params.thisPrompt_max_context - params.thisPrompt_padding;
|
|
|
|
//console.log('-- applying % on non-OAI tokens');
|
|
params.storyStringTokensPercentage = ((params.storyStringTokens / (params.totalTokensInPrompt)) * 100).toFixed(2);
|
|
params.ActualChatHistoryTokensPercentage = ((params.ActualChatHistoryTokens / (params.totalTokensInPrompt)) * 100).toFixed(2);
|
|
params.promptBiasTokensPercentage = ((params.promptBiasTokens / (params.totalTokensInPrompt)) * 100).toFixed(2);
|
|
params.worldInfoStringTokensPercentage = ((params.worldInfoStringTokens / (params.totalTokensInPrompt)) * 100).toFixed(2);
|
|
params.allAnchorsTokensPercentage = ((params.allAnchorsTokens / (params.totalTokensInPrompt)) * 100).toFixed(2);
|
|
params.selectedTokenizer = itemizedPrompts[thisPromptSet]?.tokenizer || getFriendlyTokenizerName(params.this_main_api).tokenizerName;
|
|
}
|
|
return params;
|
|
}
|
|
|
|
export function findItemizedPromptSet(itemizedPrompts, incomingMesId) {
|
|
let thisPromptSet = undefined;
|
|
|
|
for (let i = 0; i < itemizedPrompts.length; i++) {
|
|
console.log(`looking for ${incomingMesId} vs ${itemizedPrompts[i].mesId}`);
|
|
if (itemizedPrompts[i].mesId === incomingMesId) {
|
|
console.log(`found matching mesID ${i}`);
|
|
thisPromptSet = i;
|
|
PromptArrayItemForRawPromptDisplay = i;
|
|
console.log(`wanting to raw display of ArrayItem: ${PromptArrayItemForRawPromptDisplay} which is mesID ${incomingMesId}`);
|
|
console.log(itemizedPrompts[thisPromptSet]);
|
|
break;
|
|
} else if (itemizedPrompts[i].rawPrompt) {
|
|
priorPromptArrayItemForRawPromptDisplay = i;
|
|
}
|
|
}
|
|
return thisPromptSet;
|
|
}
|
|
|
|
export async function promptItemize(itemizedPrompts, requestedMesId) {
|
|
console.log('PROMPT ITEMIZE ENTERED');
|
|
var incomingMesId = Number(requestedMesId);
|
|
console.debug(`looking for MesId ${incomingMesId}`);
|
|
var thisPromptSet = findItemizedPromptSet(itemizedPrompts, incomingMesId);
|
|
|
|
if (thisPromptSet === undefined) {
|
|
console.log(`couldnt find the right mesId. looked for ${incomingMesId}`);
|
|
console.log(itemizedPrompts);
|
|
return null;
|
|
}
|
|
|
|
const params = await itemizedParams(itemizedPrompts, thisPromptSet, incomingMesId);
|
|
const flatten = (rawPrompt) => Array.isArray(rawPrompt) ? rawPrompt.map(x => x.content).join('\n') : rawPrompt;
|
|
|
|
const template = params.this_main_api == 'openai'
|
|
? await renderTemplateAsync('itemizationChat', params)
|
|
: await renderTemplateAsync('itemizationText', params);
|
|
|
|
const popup = new Popup(template, POPUP_TYPE.TEXT);
|
|
|
|
/** @type {HTMLElement} */
|
|
const diffPrevPrompt = popup.dlg.querySelector('#diffPrevPrompt');
|
|
if (priorPromptArrayItemForRawPromptDisplay) {
|
|
diffPrevPrompt.style.display = '';
|
|
diffPrevPrompt.addEventListener('click', function () {
|
|
const dmp = new DiffMatchPatch();
|
|
const text1 = flatten(itemizedPrompts[priorPromptArrayItemForRawPromptDisplay].rawPrompt);
|
|
const text2 = flatten(itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt);
|
|
|
|
dmp.Diff_Timeout = 2.0;
|
|
|
|
const d = dmp.diff_main(text1, text2);
|
|
let ds = dmp.diff_prettyHtml(d);
|
|
// make it readable
|
|
ds = ds.replaceAll('background:#e6ffe6;', 'background:#b9f3b9; color:black;');
|
|
ds = ds.replaceAll('background:#ffe6e6;', 'background:#f5b4b4; color:black;');
|
|
ds = ds.replaceAll('¶', '');
|
|
const container = document.createElement('div');
|
|
container.innerHTML = DOMPurify.sanitize(ds);
|
|
const rawPromptWrapper = document.getElementById('rawPromptWrapper');
|
|
rawPromptWrapper.replaceChildren(container);
|
|
$('#rawPromptPopup').slideToggle();
|
|
});
|
|
} else {
|
|
diffPrevPrompt.style.display = 'none';
|
|
}
|
|
popup.dlg.querySelector('#copyPromptToClipboard').addEventListener('pointerup', async function () {
|
|
let rawPrompt = itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt;
|
|
let rawPromptValues = rawPrompt;
|
|
|
|
if (Array.isArray(rawPrompt)) {
|
|
rawPromptValues = rawPrompt.map(x => x.content).join('\n');
|
|
}
|
|
|
|
await copyText(rawPromptValues);
|
|
toastr.info(t`Copied!`);
|
|
});
|
|
|
|
popup.dlg.querySelector('#showRawPrompt').addEventListener('click', async function () {
|
|
//console.log(itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt);
|
|
console.log(PromptArrayItemForRawPromptDisplay);
|
|
console.log(itemizedPrompts);
|
|
console.log(itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt);
|
|
|
|
const rawPrompt = flatten(itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt);
|
|
|
|
// Mobile needs special handholding. The side-view on the popup wouldn't work,
|
|
// so we just show an additional popup for this.
|
|
if (isMobile()) {
|
|
const content = document.createElement('div');
|
|
content.classList.add('tokenItemizingMaintext');
|
|
content.innerText = rawPrompt;
|
|
const popup = new Popup(content, POPUP_TYPE.TEXT, null, { allowVerticalScrolling: true, leftAlign: true });
|
|
await popup.show();
|
|
return;
|
|
}
|
|
|
|
//let DisplayStringifiedPrompt = JSON.stringify(itemizedPrompts[PromptArrayItemForRawPromptDisplay].rawPrompt).replace(/\n+/g, '<br>');
|
|
const rawPromptWrapper = document.getElementById('rawPromptWrapper');
|
|
rawPromptWrapper.innerText = rawPrompt;
|
|
$('#rawPromptPopup').slideToggle();
|
|
});
|
|
|
|
await popup.show();
|
|
}
|
|
|
|
export function initItemizedPrompts() {
|
|
registerDebugFunction('clearPrompts', 'Delete itemized prompts', 'Deletes all itemized prompts from the local storage.', async () => {
|
|
await clearItemizedPrompts();
|
|
toastr.info('Itemized prompts deleted.');
|
|
if (getCurrentChatId()) {
|
|
await reloadCurrentChat();
|
|
}
|
|
});
|
|
|
|
$(document).on('pointerup', '.mes_prompt', async function () {
|
|
let mesIdForItemization = $(this).closest('.mes').attr('mesId');
|
|
console.log(`looking for mesID: ${mesIdForItemization}`);
|
|
if (itemizedPrompts.length !== undefined && itemizedPrompts.length !== 0) {
|
|
await promptItemize(itemizedPrompts, mesIdForItemization);
|
|
}
|
|
});
|
|
|
|
eventSource.on(event_types.CHAT_DELETED, async (name) => {
|
|
await deleteItemizedPrompts(name);
|
|
});
|
|
eventSource.on(event_types.GROUP_CHAT_DELETED, async (name) => {
|
|
await deleteItemizedPrompts(name);
|
|
});
|
|
}
|