🎉 初始化项目
This commit is contained in:
250
web-app/src/additional-headers.js
Normal file
250
web-app/src/additional-headers.js
Normal file
@@ -0,0 +1,250 @@
|
||||
import { TEXTGEN_TYPES, OPENROUTER_HEADERS, FEATHERLESS_HEADERS } from './constants.js';
|
||||
import { SECRET_KEYS, readSecret } from './endpoints/secrets.js';
|
||||
import { getConfigValue } from './util.js';
|
||||
|
||||
/**
|
||||
* Gets the headers for the Mancer API.
|
||||
* @param {import('./users.js').UserDirectoryList} directories User directories
|
||||
* @returns {object} Headers for the request
|
||||
*/
|
||||
function getMancerHeaders(directories) {
|
||||
const apiKey = readSecret(directories, SECRET_KEYS.MANCER);
|
||||
|
||||
return apiKey ? ({
|
||||
'X-API-KEY': apiKey,
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
}) : {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the headers for the TogetherAI API.
|
||||
* @param {import('./users.js').UserDirectoryList} directories User directories
|
||||
* @returns {object} Headers for the request
|
||||
*/
|
||||
function getTogetherAIHeaders(directories) {
|
||||
const apiKey = readSecret(directories, SECRET_KEYS.TOGETHERAI);
|
||||
|
||||
return apiKey ? ({
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
}) : {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the headers for the InfermaticAI API.
|
||||
* @param {import('./users.js').UserDirectoryList} directories User directories
|
||||
* @returns {object} Headers for the request
|
||||
*/
|
||||
function getInfermaticAIHeaders(directories) {
|
||||
const apiKey = readSecret(directories, SECRET_KEYS.INFERMATICAI);
|
||||
|
||||
return apiKey ? ({
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
}) : {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the headers for the DreamGen API.
|
||||
* @param {import('./users.js').UserDirectoryList} directories User directories
|
||||
* @returns {object} Headers for the request
|
||||
*/
|
||||
function getDreamGenHeaders(directories) {
|
||||
const apiKey = readSecret(directories, SECRET_KEYS.DREAMGEN);
|
||||
|
||||
return apiKey ? ({
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
}) : {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the headers for the OpenRouter API.
|
||||
* @param {import('./users.js').UserDirectoryList} directories User directories
|
||||
* @returns {object} Headers for the request
|
||||
*/
|
||||
function getOpenRouterHeaders(directories) {
|
||||
const apiKey = readSecret(directories, SECRET_KEYS.OPENROUTER);
|
||||
const baseHeaders = { ...OPENROUTER_HEADERS };
|
||||
|
||||
return apiKey ? Object.assign(baseHeaders, { 'Authorization': `Bearer ${apiKey}` }) : baseHeaders;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the headers for the vLLM API.
|
||||
* @param {import('./users.js').UserDirectoryList} directories User directories
|
||||
* @returns {object} Headers for the request
|
||||
*/
|
||||
function getVllmHeaders(directories) {
|
||||
const apiKey = readSecret(directories, SECRET_KEYS.VLLM);
|
||||
|
||||
return apiKey ? ({
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
}) : {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the headers for the Aphrodite API.
|
||||
* @param {import('./users.js').UserDirectoryList} directories User directories
|
||||
* @returns {object} Headers for the request
|
||||
*/
|
||||
function getAphroditeHeaders(directories) {
|
||||
const apiKey = readSecret(directories, SECRET_KEYS.APHRODITE);
|
||||
|
||||
return apiKey ? ({
|
||||
'X-API-KEY': apiKey,
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
}) : {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the headers for the Tabby API.
|
||||
* @param {import('./users.js').UserDirectoryList} directories User directories
|
||||
* @returns {object} Headers for the request
|
||||
*/
|
||||
function getTabbyHeaders(directories) {
|
||||
const apiKey = readSecret(directories, SECRET_KEYS.TABBY);
|
||||
|
||||
return apiKey ? ({
|
||||
'x-api-key': apiKey,
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
}) : {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the headers for the LlamaCPP API.
|
||||
* @param {import('./users.js').UserDirectoryList} directories User directories
|
||||
* @returns {object} Headers for the request
|
||||
*/
|
||||
function getLlamaCppHeaders(directories) {
|
||||
const apiKey = readSecret(directories, SECRET_KEYS.LLAMACPP);
|
||||
|
||||
return apiKey ? ({
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
}) : {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the headers for the Ooba API.
|
||||
* @param {import('./users.js').UserDirectoryList} directories
|
||||
* @returns {object} Headers for the request
|
||||
*/
|
||||
function getOobaHeaders(directories) {
|
||||
const apiKey = readSecret(directories, SECRET_KEYS.OOBA);
|
||||
|
||||
return apiKey ? ({
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
}) : {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the headers for the KoboldCpp API.
|
||||
* @param {import('./users.js').UserDirectoryList} directories
|
||||
* @returns {object} Headers for the request
|
||||
*/
|
||||
function getKoboldCppHeaders(directories) {
|
||||
const apiKey = readSecret(directories, SECRET_KEYS.KOBOLDCPP);
|
||||
|
||||
return apiKey ? ({
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
}) : {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the headers for the Featherless API.
|
||||
* @param {import('./users.js').UserDirectoryList} directories
|
||||
* @returns {object} Headers for the request
|
||||
*/
|
||||
function getFeatherlessHeaders(directories) {
|
||||
const apiKey = readSecret(directories, SECRET_KEYS.FEATHERLESS);
|
||||
const baseHeaders = { ...FEATHERLESS_HEADERS };
|
||||
|
||||
return apiKey ? Object.assign(baseHeaders, { 'Authorization': `Bearer ${apiKey}` }) : baseHeaders;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the headers for the HuggingFace API.
|
||||
* @param {import('./users.js').UserDirectoryList} directories
|
||||
* @returns {object} Headers for the request
|
||||
*/
|
||||
function getHuggingFaceHeaders(directories) {
|
||||
const apiKey = readSecret(directories, SECRET_KEYS.HUGGINGFACE);
|
||||
|
||||
return apiKey ? ({
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
}) : {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the headers for the Generic text completion API.
|
||||
* @param {import('./users.js').UserDirectoryList} directories
|
||||
* @returns {object} Headers for the request
|
||||
*/
|
||||
function getGenericHeaders(directories) {
|
||||
const apiKey = readSecret(directories, SECRET_KEYS.GENERIC);
|
||||
|
||||
return apiKey ? ({
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
}) : {};
|
||||
}
|
||||
|
||||
export function getOverrideHeaders(urlHost) {
|
||||
const requestOverrides = getConfigValue('requestOverrides', []);
|
||||
const overrideHeaders = requestOverrides?.find((e) => e.hosts?.includes(urlHost))?.headers;
|
||||
if (overrideHeaders && urlHost) {
|
||||
return overrideHeaders;
|
||||
} else {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets additional headers for the request.
|
||||
* @param {import('express').Request} request Original request body
|
||||
* @param {object} args New request arguments
|
||||
* @param {string|null} server API server for new request
|
||||
*/
|
||||
export function setAdditionalHeaders(request, args, server) {
|
||||
setAdditionalHeadersByType(args.headers, request.body.api_type, server, request.user.directories);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {object} requestHeaders Request headers
|
||||
* @param {string} type API type
|
||||
* @param {string|null} server API server for new request
|
||||
* @param {import('./users.js').UserDirectoryList} directories User directories
|
||||
*/
|
||||
export function setAdditionalHeadersByType(requestHeaders, type, server, directories) {
|
||||
const headerGetters = {
|
||||
[TEXTGEN_TYPES.MANCER]: getMancerHeaders,
|
||||
[TEXTGEN_TYPES.VLLM]: getVllmHeaders,
|
||||
[TEXTGEN_TYPES.APHRODITE]: getAphroditeHeaders,
|
||||
[TEXTGEN_TYPES.TABBY]: getTabbyHeaders,
|
||||
[TEXTGEN_TYPES.TOGETHERAI]: getTogetherAIHeaders,
|
||||
[TEXTGEN_TYPES.OOBA]: getOobaHeaders,
|
||||
[TEXTGEN_TYPES.INFERMATICAI]: getInfermaticAIHeaders,
|
||||
[TEXTGEN_TYPES.DREAMGEN]: getDreamGenHeaders,
|
||||
[TEXTGEN_TYPES.OPENROUTER]: getOpenRouterHeaders,
|
||||
[TEXTGEN_TYPES.KOBOLDCPP]: getKoboldCppHeaders,
|
||||
[TEXTGEN_TYPES.LLAMACPP]: getLlamaCppHeaders,
|
||||
[TEXTGEN_TYPES.FEATHERLESS]: getFeatherlessHeaders,
|
||||
[TEXTGEN_TYPES.HUGGINGFACE]: getHuggingFaceHeaders,
|
||||
[TEXTGEN_TYPES.GENERIC]: getGenericHeaders,
|
||||
};
|
||||
|
||||
const getHeaders = headerGetters[type];
|
||||
const headers = getHeaders ? getHeaders(directories) : {};
|
||||
|
||||
if (typeof server === 'string' && server.length > 0) {
|
||||
try {
|
||||
const url = new URL(server);
|
||||
const overrideHeaders = getOverrideHeaders(url.host);
|
||||
|
||||
if (overrideHeaders && Object.keys(overrideHeaders).length > 0) {
|
||||
Object.assign(headers, overrideHeaders);
|
||||
}
|
||||
} catch {
|
||||
// Do nothing
|
||||
}
|
||||
}
|
||||
|
||||
Object.assign(requestHeaders, headers);
|
||||
}
|
||||
449
web-app/src/byaf.js
Normal file
449
web-app/src/byaf.js
Normal file
@@ -0,0 +1,449 @@
|
||||
import { promises as fsPromises } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import urlJoin from 'url-join';
|
||||
import { DEFAULT_AVATAR_PATH } from './constants.js';
|
||||
import { extractFileFromZipBuffer } from './util.js';
|
||||
|
||||
/**
|
||||
* A parser for BYAF (Backyard Archive Format) files.
|
||||
*/
|
||||
export class ByafParser {
|
||||
/**
|
||||
* @param {ArrayBufferLike} data BYAF ZIP buffer
|
||||
*/
|
||||
#data;
|
||||
|
||||
/**
|
||||
* Creates an instance of ByafParser.
|
||||
* @param {ArrayBufferLike} data BYAF ZIP buffer
|
||||
*/
|
||||
constructor(data) {
|
||||
this.#data = data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaces known macros in a string.
|
||||
* @param {string} [str] String to process
|
||||
* @returns {string} String with macros replaced
|
||||
* @private
|
||||
*/
|
||||
static replaceMacros(str) {
|
||||
return String(str || '')
|
||||
.replace(/#{user}:/gi, '{{user}}:')
|
||||
.replace(/#{character}:/gi, '{{char}}:')
|
||||
.replace(/{character}(?!})/gi, '{{char}}')
|
||||
.replace(/{user}(?!})/gi, '{{user}}');
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats example messages for a character.
|
||||
* @param {ByafExampleMessage[]} [examples] Array of example objects
|
||||
* @returns {string} Formatted example messages
|
||||
* @private
|
||||
*/
|
||||
static formatExampleMessages(examples) {
|
||||
if (!Array.isArray(examples)) {
|
||||
return '';
|
||||
}
|
||||
|
||||
let formattedExamples = '';
|
||||
|
||||
examples.forEach((example) => {
|
||||
if (!example?.text) {
|
||||
return;
|
||||
}
|
||||
formattedExamples += `<START>\n${ByafParser.replaceMacros(example.text)}\n`;
|
||||
});
|
||||
|
||||
return formattedExamples.trimEnd();
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats alternate greetings for a character.
|
||||
* @param {Partial<ByafScenario>[]} [scenarios] Array of scenario objects
|
||||
* @returns {string[]} Formatted alternate greetings
|
||||
* @private
|
||||
*/
|
||||
formatAlternateGreetings(scenarios) {
|
||||
if (!Array.isArray(scenarios)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Skip one because it goes into 'first_mes'
|
||||
if (scenarios.length <= 1) {
|
||||
return [];
|
||||
}
|
||||
const greetings = new Set();
|
||||
const firstScenarioFirstMessage = scenarios?.[0]?.firstMessages?.[0]?.text;
|
||||
for (const scenario of scenarios.slice(1).filter(s => Array.isArray(s.firstMessages) && s.firstMessages.length > 0)) {
|
||||
// As per the BYAF spec, "firstMessages" array MUST contain AT MOST one message.
|
||||
// So we only consider the first one if it exists.
|
||||
const firstMessage = scenario?.firstMessages?.[0];
|
||||
if (firstMessage?.text && firstMessage.text !== firstScenarioFirstMessage) {
|
||||
greetings.add(ByafParser.replaceMacros(firstMessage.text));
|
||||
}
|
||||
}
|
||||
return Array.from(greetings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts character book items to a structured format.
|
||||
* @param {ByafLoreItem[]} items Array of key-value pairs
|
||||
* @returns {CharacterBook|undefined} Converted character book or undefined if invalid
|
||||
* @private
|
||||
*/
|
||||
convertCharacterBook(items) {
|
||||
if (!Array.isArray(items) || items.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/** @type {CharacterBook} */
|
||||
const book = {
|
||||
entries: [],
|
||||
extensions: {},
|
||||
};
|
||||
|
||||
items.forEach((item, index) => {
|
||||
if (!item) {
|
||||
return;
|
||||
}
|
||||
book.entries.push({
|
||||
keys: ByafParser.replaceMacros(item?.key).split(',').map(key => key.trim()).filter(Boolean),
|
||||
content: ByafParser.replaceMacros(item?.value),
|
||||
extensions: {},
|
||||
enabled: true,
|
||||
insertion_order: index,
|
||||
});
|
||||
});
|
||||
|
||||
return book;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts a character object from BYAF buffer.
|
||||
* @param {ByafManifest} manifest BYAF manifest
|
||||
* @returns {Promise<{character:ByafCharacter,characterPath:string}>} Character object
|
||||
* @private
|
||||
*/
|
||||
async getCharacterFromManifest(manifest) {
|
||||
const charactersArray = manifest?.characters;
|
||||
|
||||
if (!Array.isArray(charactersArray)) {
|
||||
throw new Error('Invalid BYAF file: missing characters array');
|
||||
}
|
||||
|
||||
if (charactersArray.length === 0) {
|
||||
throw new Error('Invalid BYAF file: characters array is empty');
|
||||
}
|
||||
|
||||
if (charactersArray.length > 1) {
|
||||
console.warn('Warning: BYAF manifest contains more than one character, only the first one will be imported');
|
||||
}
|
||||
|
||||
const characterPath = charactersArray[0];
|
||||
if (!characterPath) {
|
||||
throw new Error('Invalid BYAF file: missing character path');
|
||||
}
|
||||
|
||||
const characterBuffer = await extractFileFromZipBuffer(this.#data, characterPath);
|
||||
if (!characterBuffer) {
|
||||
throw new Error('Invalid BYAF file: failed to extract character JSON');
|
||||
}
|
||||
|
||||
try {
|
||||
const character = JSON.parse(characterBuffer.toString());
|
||||
return { character, characterPath };
|
||||
} catch (error) {
|
||||
console.error('Failed to parse character JSON from BYAF:', error);
|
||||
throw new Error('Invalid BYAF file: character is not a valid JSON');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts all scenario objects from BYAF buffer.
|
||||
* @param {ByafManifest} manifest BYAF manifest
|
||||
* @returns {Promise<Partial<ByafScenario>[]>} Scenarios array
|
||||
* @private
|
||||
*/
|
||||
async getScenariosFromManifest(manifest) {
|
||||
const scenariosArray = manifest?.scenarios;
|
||||
|
||||
if (!Array.isArray(scenariosArray) || scenariosArray.length === 0) {
|
||||
console.warn('Warning: BYAF manifest contains no scenarios');
|
||||
return [{}];
|
||||
}
|
||||
|
||||
const scenarios = [];
|
||||
|
||||
for (const scenarioPath of scenariosArray) {
|
||||
const scenarioBuffer = await extractFileFromZipBuffer(this.#data, scenarioPath);
|
||||
if (!scenarioBuffer) {
|
||||
console.warn('Warning: failed to extract BYAF scenario JSON');
|
||||
}
|
||||
if (scenarioBuffer) {
|
||||
try {
|
||||
scenarios.push(JSON.parse(scenarioBuffer.toString()));
|
||||
} catch (error) {
|
||||
console.warn('Warning: BYAF scenario is not a valid JSON', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (scenarios.length === 0) {
|
||||
console.warn('Warning: BYAF manifest contains no valid scenarios');
|
||||
return [{}];
|
||||
}
|
||||
|
||||
return scenarios;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts all character icon images from BYAF buffer.
|
||||
* @param {ByafCharacter} character Character object
|
||||
* @param {string} characterPath Path to the character in the BYAF manifest
|
||||
* @return {Promise<{filename: string, image: Buffer, label: string}[]>} Image buffer
|
||||
* @private
|
||||
*/
|
||||
async getCharacterImages(character, characterPath) {
|
||||
const defaultAvatarBuffer = await fsPromises.readFile(DEFAULT_AVATAR_PATH);
|
||||
const characterImages = character?.images;
|
||||
|
||||
if (!Array.isArray(characterImages) || characterImages.length === 0) {
|
||||
console.warn('Warning: BYAF character has no images');
|
||||
return [{ filename: '', image: defaultAvatarBuffer, label: '' }];
|
||||
}
|
||||
|
||||
const imageBuffers = [];
|
||||
for (const image of characterImages) {
|
||||
const imagePath = image?.path;
|
||||
if (!imagePath) {
|
||||
console.warn('Warning: BYAF character image path is empty');
|
||||
continue;
|
||||
}
|
||||
|
||||
const fullImagePath = urlJoin(path.dirname(characterPath), imagePath);
|
||||
const imageBuffer = await extractFileFromZipBuffer(this.#data, fullImagePath);
|
||||
if (!imageBuffer) {
|
||||
console.warn('Warning: failed to extract BYAF character image');
|
||||
continue;
|
||||
}
|
||||
|
||||
imageBuffers.push({ filename: path.basename(imagePath), image: imageBuffer, label: image?.label || '' });
|
||||
}
|
||||
if (imageBuffers.length === 0) {
|
||||
console.warn('Warning: BYAF character has no valid images');
|
||||
return [{ filename: '', image: defaultAvatarBuffer, label: '' }];
|
||||
}
|
||||
return imageBuffers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats BYAF data as a character card.
|
||||
* @param {ByafManifest} manifest BYAF manifest
|
||||
* @param {ByafCharacter} character Character object
|
||||
* @param {Partial<ByafScenario>[]} scenarios Scenarios array
|
||||
* @return {TavernCardV2} Character card object
|
||||
* @private
|
||||
*/
|
||||
getCharacterCard(manifest, character, scenarios) {
|
||||
return {
|
||||
spec: 'chara_card_v2',
|
||||
spec_version: '2.0',
|
||||
data: {
|
||||
name: character?.name || character?.displayName || '',
|
||||
description: ByafParser.replaceMacros(character?.persona),
|
||||
personality: '',
|
||||
scenario: ByafParser.replaceMacros(scenarios[0]?.narrative),
|
||||
first_mes: ByafParser.replaceMacros(scenarios[0]?.firstMessages?.[0]?.text),
|
||||
mes_example: ByafParser.formatExampleMessages(scenarios[0]?.exampleMessages),
|
||||
creator_notes: manifest?.author?.backyardURL || '', // To preserve the link to the author from BYAF manifest, this is a good place.
|
||||
system_prompt: ByafParser.replaceMacros(scenarios[0]?.formattingInstructions),
|
||||
post_history_instructions: '',
|
||||
alternate_greetings: this.formatAlternateGreetings(scenarios),
|
||||
character_book: this.convertCharacterBook(character?.loreItems),
|
||||
tags: character?.isNSFW ? ['nsfw'] : [], // Since there are no tags in BYAF spec, we can use this to preserve the isNSFW flag.
|
||||
creator: manifest?.author?.name || '',
|
||||
character_version: '',
|
||||
extensions: { ...(character?.displayName && { 'display_name': character?.displayName }) }, // Preserve display name unmodified using extensions. "display_name" is not used by SillyTavern currently.
|
||||
},
|
||||
// @ts-ignore Non-standard spec extension
|
||||
create_date: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Gets chat backgrounds from BYAF data mapped to their respective scenarios.
|
||||
* @param {ByafCharacter} character Character object
|
||||
* @param {Partial<ByafScenario>[]} scenarios Scenarios array
|
||||
* @returns {Promise<Array<ByafChatBackground>>} Chat backgrounds
|
||||
* @private
|
||||
*/
|
||||
async getChatBackgrounds(character, scenarios) {
|
||||
// Implementation for extracting chat backgrounds from BYAF data
|
||||
const backgrounds = [];
|
||||
let i = 1;
|
||||
for (const scenario of scenarios) {
|
||||
const bgImagePath = scenario?.backgroundImage;
|
||||
if (bgImagePath) {
|
||||
const data = await extractFileFromZipBuffer(this.#data, bgImagePath);
|
||||
if (data) {
|
||||
const existingIndex = backgrounds.findIndex(bg => bg.data.compare(data) === 0);
|
||||
if (existingIndex !== -1) {
|
||||
backgrounds[existingIndex].paths.push(bgImagePath);
|
||||
continue; // Skip adding a new background since it already exists
|
||||
}
|
||||
backgrounds.push({
|
||||
name: `${character?.name} bg ${i++}` || '',
|
||||
data: data,
|
||||
paths: [bgImagePath],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
return backgrounds;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the manifest from the BYAF data.
|
||||
* @returns {Promise<ByafManifest>} Parsed manifest
|
||||
* @private
|
||||
*/
|
||||
async getManifest() {
|
||||
const manifestBuffer = await extractFileFromZipBuffer(this.#data, 'manifest.json');
|
||||
if (!manifestBuffer) {
|
||||
throw new Error('Failed to extract manifest.json from BYAF file');
|
||||
}
|
||||
|
||||
const manifest = JSON.parse(manifestBuffer.toString());
|
||||
if (!manifest || typeof manifest !== 'object') {
|
||||
throw new Error('Invalid BYAF manifest');
|
||||
}
|
||||
|
||||
return manifest;
|
||||
}
|
||||
|
||||
/**
|
||||
* Imports a chat from BYAF format.
|
||||
* @param {Partial<ByafScenario>} scenario Scenario object
|
||||
* @param {string} userName User name
|
||||
* @param {string} characterName Character name
|
||||
* @param {Array<ByafChatBackground>} chatBackgrounds Chat backgrounds
|
||||
* @returns {string} Chat data
|
||||
*/
|
||||
static getChatFromScenario(scenario, userName, characterName, chatBackgrounds) {
|
||||
const chatStartDate = scenario?.messages?.length == 0 ? new Date().toISOString() : scenario?.messages?.filter(m => 'createdAt' in m)[0].createdAt;
|
||||
const chatBackground = chatBackgrounds.find(bg => bg.paths.includes(scenario?.backgroundImage || ''))?.name || '';
|
||||
/** @type {object[]} */
|
||||
const chat = [{
|
||||
user_name: 'unused',
|
||||
character_name: 'unused',
|
||||
chat_metadata: {
|
||||
scenario: scenario?.narrative ?? '',
|
||||
mes_example: ByafParser.formatExampleMessages(scenario?.exampleMessages),
|
||||
system_prompt: ByafParser.replaceMacros(scenario?.formattingInstructions),
|
||||
mes_examples_optional: scenario?.canDeleteExampleMessages ?? false,
|
||||
byaf_model_settings: {
|
||||
model: scenario?.model ?? '',
|
||||
temperature: scenario?.temperature ?? 1.2,
|
||||
top_k: scenario?.topK ?? 40,
|
||||
top_p: scenario?.topP ?? 0.9,
|
||||
min_p: scenario?.minP ?? 0.1,
|
||||
min_p_enabled: scenario?.minPEnabled ?? true,
|
||||
repeat_penalty: scenario?.repeatPenalty ?? 1.05,
|
||||
repeat_penalty_tokens: scenario?.repeatLastN ?? 256,
|
||||
by_prompt_template: scenario?.promptTemplate ?? 'general',
|
||||
grammar: scenario?.grammar ?? null,
|
||||
},
|
||||
chat_backgrounds: chatBackground ? [chatBackground] : [],
|
||||
custom_background: chatBackground ? `url("${encodeURI(chatBackground)}")` : '',
|
||||
},
|
||||
}];
|
||||
// Add the first message IF it exists.
|
||||
if (scenario?.firstMessages?.length && scenario?.firstMessages?.length > 0 && scenario?.firstMessages?.[0]?.text) {
|
||||
chat.push({
|
||||
name: characterName,
|
||||
is_user: false,
|
||||
send_date: chatStartDate,
|
||||
mes: scenario?.firstMessages?.[0]?.text || '',
|
||||
});
|
||||
}
|
||||
|
||||
const sortByTimestamp = (newest, curr) => {
|
||||
const aTime = new Date(newest.activeTimestamp);
|
||||
const bTime = new Date(curr.activeTimestamp);
|
||||
return aTime >= bTime ? newest : curr;
|
||||
};
|
||||
|
||||
const getNewestAiMessage = (message) => {
|
||||
return message.outputs.reduce(sortByTimestamp);
|
||||
};
|
||||
const getSwipesForAiMessage = (aiMessage) => {
|
||||
return aiMessage.outputs.map(output => output.text);
|
||||
};
|
||||
|
||||
const userMessages = scenario?.messages?.filter(msg => msg.type === 'human');
|
||||
const characterMessages = scenario?.messages?.filter(msg => msg.type === 'ai');
|
||||
/**
|
||||
* Reorders messages by interleaving user and character messages so that they are in correct chronological order.
|
||||
* This is only needed to import old chats from Backyard AI that were incorrectly imported by an earlier version
|
||||
* that completely messed up the order of messages. Backyard AI Windows frontend never supported creation of chats
|
||||
* with which were ordered like this in the first place, so for most users this is desired functionality.
|
||||
*/
|
||||
if (userMessages && characterMessages && userMessages.length === characterMessages.length) { // Only do the reordering if there are equal numbers of user and character messages, otherwise just import in existing order, because it's probably correct already.
|
||||
for (let i = 0; i < userMessages.length; i++) {
|
||||
chat.push({
|
||||
name: userName,
|
||||
is_user: true,
|
||||
send_date: Number(userMessages[i]?.createdAt),
|
||||
mes: userMessages[i]?.text,
|
||||
});
|
||||
const aiMessage = getNewestAiMessage(characterMessages[i]);
|
||||
const aiSwipes = getSwipesForAiMessage(characterMessages[i]);
|
||||
chat.push({
|
||||
name: characterName,
|
||||
is_user: false,
|
||||
send_date: Number(aiMessage.createdAt),
|
||||
mes: aiMessage.text,
|
||||
swipes: aiSwipes,
|
||||
swipe_id: aiSwipes.findIndex(s => s === aiMessage.text),
|
||||
});
|
||||
}
|
||||
} else if (scenario?.messages) {
|
||||
for (const message of scenario.messages) {
|
||||
const isUser = message.type === 'human';
|
||||
const aiMessage = !isUser ? getNewestAiMessage(message) : null;
|
||||
const chatMessage = {
|
||||
name: isUser ? userName : characterName,
|
||||
is_user: isUser,
|
||||
send_date: Number(isUser ? message.createdAt : aiMessage.createdAt),
|
||||
mes: isUser ? message.text : aiMessage.text,
|
||||
};
|
||||
if (!isUser) {
|
||||
const aiSwipes = getSwipesForAiMessage(message);
|
||||
chatMessage.swipes = aiSwipes;
|
||||
chatMessage.swipe_id = aiSwipes.findIndex(s => s === aiMessage.text);
|
||||
}
|
||||
chat.push(chatMessage);
|
||||
}
|
||||
} else {
|
||||
console.warn('Warning: BYAF scenario contained no messages property.');
|
||||
}
|
||||
|
||||
return chat.map(obj => JSON.stringify(obj)).join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the BYAF data.
|
||||
* @return {Promise<ByafParseResult>} Parsed character card and image buffer
|
||||
*/
|
||||
async parse() {
|
||||
const manifest = await this.getManifest();
|
||||
const { character, characterPath } = await this.getCharacterFromManifest(manifest);
|
||||
const scenarios = await this.getScenariosFromManifest(manifest);
|
||||
const images = await this.getCharacterImages(character, characterPath);
|
||||
const card = this.getCharacterCard(manifest, character, scenarios);
|
||||
const chatBackgrounds = await this.getChatBackgrounds(character, scenarios);
|
||||
return { card, images, scenarios, chatBackgrounds, character };
|
||||
}
|
||||
}
|
||||
|
||||
export default ByafParser;
|
||||
98
web-app/src/character-card-parser.js
Normal file
98
web-app/src/character-card-parser.js
Normal file
@@ -0,0 +1,98 @@
|
||||
import fs from 'node:fs';
|
||||
import { Buffer } from 'node:buffer';
|
||||
|
||||
import encode from './png/encode.js';
|
||||
import extract from 'png-chunks-extract';
|
||||
import PNGtext from 'png-chunk-text';
|
||||
|
||||
/**
|
||||
* Writes Character metadata to a PNG image buffer.
|
||||
* Writes only 'chara', 'ccv3' is not supported and removed not to create a mismatch.
|
||||
* @param {Buffer} image PNG image buffer
|
||||
* @param {string} data Character data to write
|
||||
* @returns {Buffer} PNG image buffer with metadata
|
||||
*/
|
||||
export const write = (image, data) => {
|
||||
const chunks = extract(new Uint8Array(image));
|
||||
const tEXtChunks = chunks.filter(chunk => chunk.name === 'tEXt');
|
||||
|
||||
// Remove existing tEXt chunks
|
||||
for (const tEXtChunk of tEXtChunks) {
|
||||
const data = PNGtext.decode(tEXtChunk.data);
|
||||
if (data.keyword.toLowerCase() === 'chara' || data.keyword.toLowerCase() === 'ccv3') {
|
||||
chunks.splice(chunks.indexOf(tEXtChunk), 1);
|
||||
}
|
||||
}
|
||||
|
||||
// Add new v2 chunk before the IEND chunk
|
||||
const base64EncodedData = Buffer.from(data, 'utf8').toString('base64');
|
||||
chunks.splice(-1, 0, PNGtext.encode('chara', base64EncodedData));
|
||||
|
||||
// Try adding v3 chunk before the IEND chunk
|
||||
try {
|
||||
//change v2 format to v3
|
||||
const v3Data = JSON.parse(data);
|
||||
v3Data.spec = 'chara_card_v3';
|
||||
v3Data.spec_version = '3.0';
|
||||
|
||||
const base64EncodedData = Buffer.from(JSON.stringify(v3Data), 'utf8').toString('base64');
|
||||
chunks.splice(-1, 0, PNGtext.encode('ccv3', base64EncodedData));
|
||||
} catch (error) {
|
||||
// Ignore errors when adding v3 chunk
|
||||
}
|
||||
|
||||
const newBuffer = Buffer.from(encode(chunks));
|
||||
return newBuffer;
|
||||
};
|
||||
|
||||
/**
|
||||
* Reads Character metadata from a PNG image buffer.
|
||||
* Supports both V2 (chara) and V3 (ccv3). V3 (ccv3) takes precedence.
|
||||
* @param {Buffer} image PNG image buffer
|
||||
* @returns {string} Character data
|
||||
*/
|
||||
export const read = (image) => {
|
||||
const chunks = extract(new Uint8Array(image));
|
||||
|
||||
const textChunks = chunks.filter((chunk) => chunk.name === 'tEXt').map((chunk) => PNGtext.decode(chunk.data));
|
||||
|
||||
if (textChunks.length === 0) {
|
||||
console.error('PNG metadata does not contain any text chunks.');
|
||||
throw new Error('No PNG metadata.');
|
||||
}
|
||||
|
||||
const ccv3Index = textChunks.findIndex((chunk) => chunk.keyword.toLowerCase() === 'ccv3');
|
||||
|
||||
if (ccv3Index > -1) {
|
||||
return Buffer.from(textChunks[ccv3Index].text, 'base64').toString('utf8');
|
||||
}
|
||||
|
||||
const charaIndex = textChunks.findIndex((chunk) => chunk.keyword.toLowerCase() === 'chara');
|
||||
|
||||
if (charaIndex > -1) {
|
||||
return Buffer.from(textChunks[charaIndex].text, 'base64').toString('utf8');
|
||||
}
|
||||
|
||||
console.error('PNG metadata does not contain any character data.');
|
||||
throw new Error('No PNG metadata.');
|
||||
};
|
||||
|
||||
/**
|
||||
* Parses a card image and returns the character metadata.
|
||||
* @param {string} cardUrl Path to the card image
|
||||
* @param {string} format File format
|
||||
* @returns {Promise<string>} Character data
|
||||
*/
|
||||
export const parse = async (cardUrl, format) => {
|
||||
let fileFormat = format === undefined ? 'png' : format;
|
||||
|
||||
switch (fileFormat) {
|
||||
case 'png': {
|
||||
const buffer = fs.readFileSync(cardUrl);
|
||||
return read(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('Unsupported format');
|
||||
};
|
||||
|
||||
399
web-app/src/charx.js
Normal file
399
web-app/src/charx.js
Normal file
@@ -0,0 +1,399 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import _ from 'lodash';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||
import { extractFileFromZipBuffer, extractFilesFromZipBuffer, normalizeZipEntryPath, ensureDirectory } from './util.js';
|
||||
import { DEFAULT_AVATAR_PATH } from './constants.js';
|
||||
|
||||
// 'embeded://' is intentional - RisuAI exports use this misspelling
|
||||
const CHARX_EMBEDDED_URI_PREFIXES = ['embeded://', 'embedded://', '__asset:'];
|
||||
const CHARX_IMAGE_EXTENSIONS = new Set(['png', 'jpg', 'jpeg', 'webp', 'gif', 'apng', 'avif', 'bmp', 'jfif']);
|
||||
const CHARX_SPRITE_TYPES = new Set(['emotion', 'expression']);
|
||||
const CHARX_BACKGROUND_TYPES = new Set(['background']);
|
||||
|
||||
// ZIP local file header signature: PK\x03\x04
|
||||
const ZIP_SIGNATURE = Buffer.from([0x50, 0x4B, 0x03, 0x04]);
|
||||
|
||||
/**
|
||||
* Find ZIP data start in buffer (handles SFX/self-extracting archives).
|
||||
* @param {Buffer} buffer
|
||||
* @returns {Buffer} Buffer starting at ZIP signature, or original if not found
|
||||
*/
|
||||
function findZipStart(buffer) {
|
||||
const buf = Buffer.isBuffer(buffer) ? buffer : Buffer.from(buffer);
|
||||
const index = buf.indexOf(ZIP_SIGNATURE);
|
||||
if (index > 0) {
|
||||
return buf.slice(index);
|
||||
}
|
||||
return buf;
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {Object} CharXAsset
|
||||
* @property {string} type - Asset type (emotion, expression, background, etc.)
|
||||
* @property {string} name - Asset name from metadata
|
||||
* @property {string} ext - File extension (lowercase, no dot)
|
||||
* @property {string} zipPath - Normalized path within the ZIP archive
|
||||
* @property {number} order - Original index in assets array
|
||||
* @property {string} [storageCategory] - 'sprite' | 'background' | 'misc' (set by mapCharXAssetsForStorage)
|
||||
* @property {string} [baseName] - Normalized filename base (set by mapCharXAssetsForStorage)
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} CharXParseResult
|
||||
* @property {Object} card - Parsed card.json (CCv2 or CCv3 spec)
|
||||
* @property {string|Buffer} avatar - Avatar image buffer or DEFAULT_AVATAR_PATH
|
||||
* @property {CharXAsset[]} auxiliaryAssets - Assets mapped for storage
|
||||
* @property {Map<string, Buffer>} extractedBuffers - Map of zipPath to extracted buffer
|
||||
*/
|
||||
|
||||
export class CharXParser {
|
||||
#data;
|
||||
|
||||
/**
|
||||
* @param {ArrayBuffer|Buffer} data
|
||||
*/
|
||||
constructor(data) {
|
||||
// Handle SFX (self-extracting) ZIP archives by finding the actual ZIP start
|
||||
this.#data = findZipStart(Buffer.isBuffer(data) ? data : Buffer.from(data));
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the CharX archive and extract card data and assets.
|
||||
* @returns {Promise<CharXParseResult>}
|
||||
*/
|
||||
async parse() {
|
||||
console.info('Importing from CharX');
|
||||
const cardBuffer = await extractFileFromZipBuffer(this.#data, 'card.json');
|
||||
|
||||
if (!cardBuffer) {
|
||||
throw new Error('Failed to extract card.json from CharX file');
|
||||
}
|
||||
|
||||
const card = JSON.parse(cardBuffer.toString());
|
||||
|
||||
if (card.spec === undefined) {
|
||||
throw new Error('Invalid CharX card file: missing spec field');
|
||||
}
|
||||
|
||||
const embeddedAssets = this.collectCharXAssets(card);
|
||||
const iconAsset = this.pickCharXIconAsset(embeddedAssets);
|
||||
const auxiliaryAssets = this.mapCharXAssetsForStorage(embeddedAssets);
|
||||
|
||||
const archivePaths = new Set();
|
||||
|
||||
if (iconAsset?.zipPath) {
|
||||
archivePaths.add(iconAsset.zipPath);
|
||||
}
|
||||
for (const asset of auxiliaryAssets) {
|
||||
if (asset?.zipPath) {
|
||||
archivePaths.add(asset.zipPath);
|
||||
}
|
||||
}
|
||||
|
||||
let extractedBuffers = new Map();
|
||||
if (archivePaths.size > 0) {
|
||||
extractedBuffers = await extractFilesFromZipBuffer(this.#data, [...archivePaths]);
|
||||
}
|
||||
|
||||
/** @type {string|Buffer} */
|
||||
let avatar = DEFAULT_AVATAR_PATH;
|
||||
if (iconAsset?.zipPath) {
|
||||
const iconBuffer = extractedBuffers.get(iconAsset.zipPath);
|
||||
if (iconBuffer) {
|
||||
avatar = iconBuffer;
|
||||
}
|
||||
}
|
||||
|
||||
return { card, avatar, auxiliaryAssets, extractedBuffers };
|
||||
}
|
||||
|
||||
getEmbeddedZipPathFromUri(uri) {
|
||||
if (typeof uri !== 'string') {
|
||||
return null;
|
||||
}
|
||||
|
||||
const trimmed = uri.trim();
|
||||
if (!trimmed) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const lower = trimmed.toLowerCase();
|
||||
for (const prefix of CHARX_EMBEDDED_URI_PREFIXES) {
|
||||
if (lower.startsWith(prefix)) {
|
||||
const rawPath = trimmed.slice(prefix.length);
|
||||
return normalizeZipEntryPath(rawPath);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize extension string: lowercase, strip leading dot.
|
||||
* @param {string} ext
|
||||
* @returns {string}
|
||||
*/
|
||||
normalizeExtString(ext) {
|
||||
if (typeof ext !== 'string') return '';
|
||||
return ext.trim().toLowerCase().replace(/^\./, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Strip trailing image extension from asset name if present.
|
||||
* Handles cases like "image.png" with ext "png" → "image" (avoids "image.png.png")
|
||||
* @param {string} name - Asset name that may contain extension
|
||||
* @param {string} expectedExt - The expected extension (lowercase, no dot)
|
||||
* @returns {string} Name with trailing extension stripped if it matched
|
||||
*/
|
||||
stripTrailingImageExtension(name, expectedExt) {
|
||||
if (!name || !expectedExt) return name;
|
||||
const lower = name.toLowerCase();
|
||||
// Check if name ends with the expected extension
|
||||
if (lower.endsWith(`.${expectedExt}`)) {
|
||||
return name.slice(0, -(expectedExt.length + 1));
|
||||
}
|
||||
// Also check for any known image extension at the end
|
||||
for (const ext of CHARX_IMAGE_EXTENSIONS) {
|
||||
if (lower.endsWith(`.${ext}`)) {
|
||||
return name.slice(0, -(ext.length + 1));
|
||||
}
|
||||
}
|
||||
return name;
|
||||
}
|
||||
|
||||
deriveCharXAssetExtension(assetExt, zipPath) {
|
||||
const metaExt = this.normalizeExtString(assetExt);
|
||||
const pathExt = this.normalizeExtString(path.extname(zipPath || ''));
|
||||
return metaExt || pathExt;
|
||||
}
|
||||
|
||||
collectCharXAssets(card) {
|
||||
const assets = _.get(card, 'data.assets');
|
||||
if (!Array.isArray(assets)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return assets.map((asset, index) => {
|
||||
if (!asset) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const zipPath = this.getEmbeddedZipPathFromUri(asset.uri);
|
||||
if (!zipPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const ext = this.deriveCharXAssetExtension(asset.ext, zipPath);
|
||||
const type = typeof asset.type === 'string' ? asset.type.toLowerCase() : '';
|
||||
const name = typeof asset.name === 'string' ? asset.name : '';
|
||||
|
||||
return {
|
||||
type,
|
||||
name,
|
||||
ext,
|
||||
zipPath,
|
||||
order: index,
|
||||
};
|
||||
}).filter(Boolean);
|
||||
}
|
||||
|
||||
pickCharXIconAsset(assets) {
|
||||
const iconAssets = assets.filter(asset => asset.type === 'icon' && CHARX_IMAGE_EXTENSIONS.has(asset.ext) && asset.zipPath);
|
||||
if (iconAssets.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const mainIcon = iconAssets.find(asset => asset.name?.toLowerCase() === 'main');
|
||||
return mainIcon || iconAssets[0];
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize asset name for filesystem storage.
|
||||
* @param {string} name - Original asset name
|
||||
* @param {string} fallback - Fallback name if normalization fails
|
||||
* @param {boolean} useHyphens - Use hyphens instead of underscores (for sprites)
|
||||
* @returns {string} Normalized filename base (without extension)
|
||||
*/
|
||||
getCharXAssetBaseName(name, fallback, useHyphens = false) {
|
||||
const cleaned = (String(name ?? '').trim() || '');
|
||||
if (!cleaned) {
|
||||
return fallback.toLowerCase();
|
||||
}
|
||||
|
||||
const separator = useHyphens ? '-' : '_';
|
||||
// Convert to lowercase, collapse non-alphanumeric runs to separator, trim edges
|
||||
const base = cleaned
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9]+/g, separator)
|
||||
.replace(new RegExp(`^${separator}|${separator}$`, 'g'), '');
|
||||
|
||||
if (!base) {
|
||||
return fallback.toLowerCase();
|
||||
}
|
||||
|
||||
const sanitized = sanitize(base);
|
||||
return (sanitized || fallback).toLowerCase();
|
||||
}
|
||||
|
||||
mapCharXAssetsForStorage(assets) {
|
||||
return assets.reduce((acc, asset) => {
|
||||
if (!asset?.zipPath) {
|
||||
return acc;
|
||||
}
|
||||
|
||||
const ext = (asset.ext || '').toLowerCase();
|
||||
if (!CHARX_IMAGE_EXTENSIONS.has(ext)) {
|
||||
return acc;
|
||||
}
|
||||
|
||||
if (asset.type === 'icon' || asset.type === 'user_icon') {
|
||||
return acc;
|
||||
}
|
||||
|
||||
let storageCategory;
|
||||
if (CHARX_SPRITE_TYPES.has(asset.type)) {
|
||||
storageCategory = 'sprite';
|
||||
} else if (CHARX_BACKGROUND_TYPES.has(asset.type)) {
|
||||
storageCategory = 'background';
|
||||
} else {
|
||||
storageCategory = 'misc';
|
||||
}
|
||||
|
||||
// Use hyphens for sprites so ST's expression label extraction works correctly
|
||||
// (sprites.js extracts label via regex that splits on dash or dot)
|
||||
const useHyphens = storageCategory === 'sprite';
|
||||
// Strip trailing extension from name if present (e.g., "image.png" with ext "png")
|
||||
const nameWithoutExt = this.stripTrailingImageExtension(asset.name, ext);
|
||||
acc.push({
|
||||
...asset,
|
||||
ext,
|
||||
storageCategory,
|
||||
baseName: this.getCharXAssetBaseName(nameWithoutExt, `${storageCategory}-${asset.order ?? 0}`, useHyphens),
|
||||
});
|
||||
|
||||
return acc;
|
||||
}, []);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete existing file with same base name (any extension) before overwriting.
|
||||
* Matches ST's sprite upload behavior in sprites.js.
|
||||
* @param {string} dirPath - Directory path
|
||||
* @param {string} baseName - Base filename without extension
|
||||
*/
|
||||
function deleteExistingByBaseName(dirPath, baseName) {
|
||||
try {
|
||||
const files = fs.readdirSync(dirPath, { withFileTypes: true }).filter(f => f.isFile()).map(f => f.name);
|
||||
for (const file of files) {
|
||||
if (path.parse(file).name === baseName) {
|
||||
fs.unlinkSync(path.join(dirPath, file));
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Directory doesn't exist yet or other error, that's fine
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist extracted CharX assets to appropriate ST directories.
|
||||
* Note: Uses sync writes consistent with ST's existing file handling.
|
||||
* @param {Array} assets - Mapped assets from CharXParser
|
||||
* @param {Map<string, Buffer>} bufferMap - Extracted file buffers
|
||||
* @param {Object} directories - User directories object
|
||||
* @param {string} characterFolder - Character folder name (sanitized)
|
||||
* @returns {{sprites: number, backgrounds: number, misc: number}}
|
||||
*/
|
||||
export function persistCharXAssets(assets, bufferMap, directories, characterFolder) {
|
||||
/** @type {{sprites: number, backgrounds: number, misc: number}} */
|
||||
const summary = { sprites: 0, backgrounds: 0, misc: 0 };
|
||||
if (!Array.isArray(assets) || assets.length === 0) {
|
||||
return summary;
|
||||
}
|
||||
|
||||
let spritesPath = null;
|
||||
let miscPath = null;
|
||||
|
||||
const ensureSpritesPath = () => {
|
||||
if (spritesPath) {
|
||||
return spritesPath;
|
||||
}
|
||||
const candidate = path.join(directories.characters, characterFolder);
|
||||
if (!ensureDirectory(candidate)) {
|
||||
return null;
|
||||
}
|
||||
spritesPath = candidate;
|
||||
return spritesPath;
|
||||
};
|
||||
|
||||
const ensureMiscPath = () => {
|
||||
if (miscPath) {
|
||||
return miscPath;
|
||||
}
|
||||
// Use the image gallery path: user/images/{characterName}/
|
||||
const candidate = path.join(directories.userImages, characterFolder);
|
||||
if (!ensureDirectory(candidate)) {
|
||||
return null;
|
||||
}
|
||||
miscPath = candidate;
|
||||
return miscPath;
|
||||
};
|
||||
|
||||
for (const asset of assets) {
|
||||
if (!asset?.zipPath) {
|
||||
continue;
|
||||
}
|
||||
const buffer = bufferMap.get(asset.zipPath);
|
||||
if (!buffer) {
|
||||
console.warn(`CharX: Asset ${asset.zipPath} missing or unsupported, skipping.`);
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
if (asset.storageCategory === 'sprite') {
|
||||
const targetDir = ensureSpritesPath();
|
||||
if (!targetDir) {
|
||||
continue;
|
||||
}
|
||||
// Delete existing sprite with same base name (any extension) - matches sprites.js behavior
|
||||
deleteExistingByBaseName(targetDir, asset.baseName);
|
||||
const filePath = path.join(targetDir, `${asset.baseName}.${asset.ext || 'png'}`);
|
||||
writeFileAtomicSync(filePath, buffer);
|
||||
summary.sprites += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (asset.storageCategory === 'background') {
|
||||
// Store in character-specific backgrounds folder: characters/{charName}/backgrounds/
|
||||
const backgroundDir = path.join(directories.characters, characterFolder, 'backgrounds');
|
||||
if (!ensureDirectory(backgroundDir)) {
|
||||
continue;
|
||||
}
|
||||
// Delete existing background with same base name
|
||||
deleteExistingByBaseName(backgroundDir, asset.baseName);
|
||||
const fileName = `${asset.baseName}.${asset.ext || 'png'}`;
|
||||
const filePath = path.join(backgroundDir, fileName);
|
||||
writeFileAtomicSync(filePath, buffer);
|
||||
summary.backgrounds += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (asset.storageCategory === 'misc') {
|
||||
const miscDir = ensureMiscPath();
|
||||
if (!miscDir) {
|
||||
continue;
|
||||
}
|
||||
// Overwrite existing misc asset with same name
|
||||
const filePath = path.join(miscDir, `${asset.baseName}.${asset.ext || 'png'}`);
|
||||
writeFileAtomicSync(filePath, buffer);
|
||||
summary.misc += 1;
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(`CharX: Failed to save asset "${asset.name}": ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
return summary;
|
||||
}
|
||||
363
web-app/src/command-line.js
Normal file
363
web-app/src/command-line.js
Normal file
@@ -0,0 +1,363 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import yargs from 'yargs/yargs';
|
||||
import { hideBin } from 'yargs/helpers';
|
||||
import ipRegex from 'ip-regex';
|
||||
import envPaths from 'env-paths';
|
||||
import { color, getConfigValue, stringToBool } from './util.js';
|
||||
import { initConfig } from './config-init.js';
|
||||
|
||||
/**
|
||||
* @typedef {object} CommandLineArguments Parsed command line arguments
|
||||
* @property {string} configPath Path to the config file
|
||||
* @property {string} dataRoot Data root directory
|
||||
* @property {number} port Port number
|
||||
* @property {boolean} listen If SillyTavern is listening on all network interfaces
|
||||
* @property {string} listenAddressIPv6 IPv6 address to listen to
|
||||
* @property {string} listenAddressIPv4 IPv4 address to listen to
|
||||
* @property {boolean|string} enableIPv4 If enable IPv4 protocol ("auto" is also allowed)
|
||||
* @property {boolean|string} enableIPv6 If enable IPv6 protocol ("auto" is also allowed)
|
||||
* @property {boolean} dnsPreferIPv6 If prefer IPv6 for DNS
|
||||
* @property {boolean} browserLaunchEnabled If automatically launch SillyTavern in the browser
|
||||
* @property {string} browserLaunchHostname Browser launch hostname
|
||||
* @property {number} browserLaunchPort Browser launch port override (-1 is use server port)
|
||||
* @property {boolean} browserLaunchAvoidLocalhost If avoid using 'localhost' for browser launch in auto mode
|
||||
* @property {boolean} enableCorsProxy If enable CORS proxy
|
||||
* @property {boolean} disableCsrf If disable CSRF protection
|
||||
* @property {boolean} ssl If enable SSL
|
||||
* @property {string} certPath Path to certificate
|
||||
* @property {string} keyPath Path to private key
|
||||
* @property {string} keyPassphrase SSL private key passphrase
|
||||
* @property {boolean} whitelistMode If enable whitelist mode
|
||||
* @property {boolean} basicAuthMode If enable basic authentication
|
||||
* @property {boolean} requestProxyEnabled If enable outgoing request proxy
|
||||
* @property {string} requestProxyUrl Request proxy URL
|
||||
* @property {string[]} requestProxyBypass Request proxy bypass list
|
||||
* @property {function(): URL} getIPv4ListenUrl Get IPv4 listen URL
|
||||
* @property {function(): URL} getIPv6ListenUrl Get IPv6 listen URL
|
||||
* @property {function(import('./server-startup.js').ServerStartupResult): Promise<string>} getBrowserLaunchHostname Get browser launch hostname
|
||||
* @property {function(string): URL} getBrowserLaunchUrl Get browser launch URL
|
||||
*/
|
||||
|
||||
/**
|
||||
* Provides a command line arguments parser.
|
||||
*/
|
||||
export class CommandLineParser {
|
||||
/**
|
||||
* Gets the default configuration values.
|
||||
* @param {boolean} isGlobal If the configuration is global or not
|
||||
* @returns {CommandLineArguments} Default configuration values
|
||||
*/
|
||||
getDefaultConfig(isGlobal) {
|
||||
const appPaths = envPaths('SillyTavern', { suffix: '' });
|
||||
const configPath = isGlobal ? path.join(appPaths.data, 'config.yaml') : './config.yaml';
|
||||
const dataPath = isGlobal ? path.join(appPaths.data, 'data') : './data';
|
||||
return Object.freeze({
|
||||
configPath: configPath,
|
||||
dataRoot: dataPath,
|
||||
port: 8000,
|
||||
listen: false,
|
||||
listenAddressIPv6: '[::]',
|
||||
listenAddressIPv4: '0.0.0.0',
|
||||
enableIPv4: true,
|
||||
enableIPv6: false,
|
||||
dnsPreferIPv6: false,
|
||||
browserLaunchEnabled: false,
|
||||
browserLaunchHostname: 'auto',
|
||||
browserLaunchPort: -1,
|
||||
browserLaunchAvoidLocalhost: false,
|
||||
enableCorsProxy: false,
|
||||
disableCsrf: false,
|
||||
ssl: false,
|
||||
certPath: 'certs/cert.pem',
|
||||
keyPath: 'certs/privkey.pem',
|
||||
keyPassphrase: '',
|
||||
whitelistMode: true,
|
||||
basicAuthMode: false,
|
||||
requestProxyEnabled: false,
|
||||
requestProxyUrl: '',
|
||||
requestProxyBypass: [],
|
||||
getIPv4ListenUrl: function () {
|
||||
throw new Error('getIPv4ListenUrl is not implemented');
|
||||
},
|
||||
getIPv6ListenUrl: function () {
|
||||
throw new Error('getIPv6ListenUrl is not implemented');
|
||||
},
|
||||
getBrowserLaunchHostname: async function () {
|
||||
throw new Error('getBrowserLaunchHostname is not implemented');
|
||||
},
|
||||
getBrowserLaunchUrl: function () {
|
||||
throw new Error('getBrowserLaunchUrl is not implemented');
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
constructor() {
|
||||
this.booleanAutoOptions = [true, false, 'auto'];
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses command line arguments.
|
||||
* Arguments that are not provided will be filled with config values.
|
||||
* @param {string[]} args Process startup arguments.
|
||||
* @returns {CommandLineArguments} Parsed command line arguments.
|
||||
*/
|
||||
parse(args) {
|
||||
const cliArguments = yargs(hideBin(args))
|
||||
.usage('Usage: <your-start-script> [options]\nOptions that are not provided will be filled with config values.')
|
||||
.option('global', {
|
||||
type: 'boolean',
|
||||
default: null,
|
||||
describe: 'Use global data and config paths instead of the server directory',
|
||||
})
|
||||
.option('configPath', {
|
||||
type: 'string',
|
||||
default: null,
|
||||
describe: 'Path to the config file (only for standalone mode)',
|
||||
})
|
||||
.option('enableIPv6', {
|
||||
type: 'string',
|
||||
default: null,
|
||||
describe: 'Enables IPv6 protocol',
|
||||
})
|
||||
.option('enableIPv4', {
|
||||
type: 'string',
|
||||
default: null,
|
||||
describe: 'Enables IPv4 protocol',
|
||||
})
|
||||
.option('port', {
|
||||
type: 'number',
|
||||
default: null,
|
||||
describe: 'Sets the server listening port',
|
||||
})
|
||||
.option('dnsPreferIPv6', {
|
||||
type: 'boolean',
|
||||
default: null,
|
||||
describe: 'Prefers IPv6 for DNS\nYou should probably have the enabled if you\'re on an IPv6 only network',
|
||||
})
|
||||
.option('browserLaunchEnabled', {
|
||||
type: 'boolean',
|
||||
default: null,
|
||||
describe: 'Automatically launch SillyTavern in the browser',
|
||||
})
|
||||
.option('browserLaunchHostname', {
|
||||
type: 'string',
|
||||
default: null,
|
||||
describe: 'Sets the browser launch hostname, best left on \'auto\'.\nUse values like \'localhost\', \'st.example.com\'',
|
||||
})
|
||||
.option('browserLaunchPort', {
|
||||
type: 'number',
|
||||
default: null,
|
||||
describe: 'Overrides the port for browser launch with open your browser with this port and ignore what port the server is running on. -1 is use server port',
|
||||
})
|
||||
.option('browserLaunchAvoidLocalhost', {
|
||||
type: 'boolean',
|
||||
default: null,
|
||||
describe: 'Avoids using \'localhost\' for browser launch in auto mode.\nUse if you don\'t have \'localhost\' in your hosts file',
|
||||
})
|
||||
.option('listen', {
|
||||
type: 'boolean',
|
||||
default: null,
|
||||
describe: 'Whether to listen on all network interfaces',
|
||||
})
|
||||
.option('listenAddressIPv6', {
|
||||
type: 'string',
|
||||
default: null,
|
||||
describe: 'Specific IPv6 address to listen to',
|
||||
})
|
||||
.option('listenAddressIPv4', {
|
||||
type: 'string',
|
||||
default: null,
|
||||
describe: 'Specific IPv4 address to listen to',
|
||||
})
|
||||
.option('corsProxy', {
|
||||
type: 'boolean',
|
||||
default: null,
|
||||
describe: 'Enables CORS proxy',
|
||||
})
|
||||
.option('disableCsrf', {
|
||||
type: 'boolean',
|
||||
default: null,
|
||||
describe: 'Disables CSRF protection - NOT RECOMMENDED',
|
||||
})
|
||||
.option('ssl', {
|
||||
type: 'boolean',
|
||||
default: null,
|
||||
describe: 'Enables SSL',
|
||||
})
|
||||
.option('certPath', {
|
||||
type: 'string',
|
||||
default: null,
|
||||
describe: 'Path to SSL certificate file',
|
||||
})
|
||||
.option('keyPath', {
|
||||
type: 'string',
|
||||
default: null,
|
||||
describe: 'Path to SSL private key file',
|
||||
})
|
||||
.option('keyPassphrase', {
|
||||
type: 'string',
|
||||
default: null,
|
||||
describe: 'Passphrase for the SSL private key',
|
||||
})
|
||||
.option('whitelist', {
|
||||
type: 'boolean',
|
||||
default: null,
|
||||
describe: 'Enables whitelist mode',
|
||||
})
|
||||
.option('dataRoot', {
|
||||
type: 'string',
|
||||
default: null,
|
||||
describe: 'Root directory for data storage (only for standalone mode)',
|
||||
})
|
||||
.option('basicAuthMode', {
|
||||
type: 'boolean',
|
||||
default: null,
|
||||
describe: 'Enables basic authentication',
|
||||
})
|
||||
.option('requestProxyEnabled', {
|
||||
type: 'boolean',
|
||||
default: null,
|
||||
describe: 'Enables a use of proxy for outgoing requests',
|
||||
})
|
||||
.option('requestProxyUrl', {
|
||||
type: 'string',
|
||||
default: null,
|
||||
describe: 'Request proxy URL (HTTP or SOCKS protocols)',
|
||||
})
|
||||
.option('requestProxyBypass', {
|
||||
type: 'array',
|
||||
describe: 'Request proxy bypass list (space separated list of hosts)',
|
||||
})
|
||||
/* DEPRECATED options */
|
||||
.option('autorun', {
|
||||
type: 'boolean',
|
||||
default: null,
|
||||
describe: 'DEPRECATED: Use "browserLaunchEnabled" instead.',
|
||||
})
|
||||
.option('autorunHostname', {
|
||||
type: 'string',
|
||||
default: null,
|
||||
describe: 'DEPRECATED: Use "browserLaunchHostname" instead.',
|
||||
})
|
||||
.option('autorunPortOverride', {
|
||||
type: 'number',
|
||||
default: null,
|
||||
describe: 'DEPRECATED: Use "browserLaunchPort" instead.',
|
||||
})
|
||||
.option('avoidLocalhost', {
|
||||
type: 'boolean',
|
||||
default: null,
|
||||
describe: 'DEPRECATED: Use "browserLaunchAvoidLocalhost" instead.',
|
||||
})
|
||||
.parseSync();
|
||||
|
||||
const isGlobal = globalThis.FORCE_GLOBAL_MODE ?? cliArguments.global ?? false;
|
||||
const defaultConfig = this.getDefaultConfig(isGlobal);
|
||||
|
||||
if (isGlobal && cliArguments.configPath) {
|
||||
console.warn(color.yellow('Warning: "--configPath" argument is ignored in global mode'));
|
||||
}
|
||||
|
||||
if (isGlobal && cliArguments.dataRoot) {
|
||||
console.warn(color.yellow('Warning: "--dataRoot" argument is ignored in global mode'));
|
||||
}
|
||||
|
||||
const configPath = isGlobal
|
||||
? defaultConfig.configPath
|
||||
: (cliArguments.configPath ?? defaultConfig.configPath);
|
||||
if (isGlobal && !fs.existsSync(path.dirname(configPath))) {
|
||||
fs.mkdirSync(path.dirname(configPath), { recursive: true });
|
||||
}
|
||||
initConfig(configPath);
|
||||
|
||||
const dataRoot = isGlobal
|
||||
? defaultConfig.dataRoot
|
||||
: (cliArguments.dataRoot ?? getConfigValue('dataRoot', defaultConfig.dataRoot));
|
||||
if (isGlobal && !fs.existsSync(dataRoot)) {
|
||||
fs.mkdirSync(dataRoot, { recursive: true });
|
||||
}
|
||||
|
||||
/** @type {CommandLineArguments} */
|
||||
const result = {
|
||||
configPath: configPath,
|
||||
dataRoot: dataRoot,
|
||||
port: cliArguments.port ?? getConfigValue('port', defaultConfig.port, 'number'),
|
||||
listen: cliArguments.listen ?? getConfigValue('listen', defaultConfig.listen, 'boolean'),
|
||||
listenAddressIPv6: cliArguments.listenAddressIPv6 ?? getConfigValue('listenAddress.ipv6', defaultConfig.listenAddressIPv6),
|
||||
listenAddressIPv4: cliArguments.listenAddressIPv4 ?? getConfigValue('listenAddress.ipv4', defaultConfig.listenAddressIPv4),
|
||||
enableIPv4: stringToBool(cliArguments.enableIPv4) ?? stringToBool(getConfigValue('protocol.ipv4', defaultConfig.enableIPv4)) ?? defaultConfig.enableIPv4,
|
||||
enableIPv6: stringToBool(cliArguments.enableIPv6) ?? stringToBool(getConfigValue('protocol.ipv6', defaultConfig.enableIPv6)) ?? defaultConfig.enableIPv6,
|
||||
dnsPreferIPv6: cliArguments.dnsPreferIPv6 ?? getConfigValue('dnsPreferIPv6', defaultConfig.dnsPreferIPv6, 'boolean'),
|
||||
browserLaunchEnabled: cliArguments.browserLaunchEnabled ?? cliArguments.autorun ?? getConfigValue('browserLaunch.enabled', defaultConfig.browserLaunchEnabled, 'boolean'),
|
||||
browserLaunchHostname: cliArguments.browserLaunchHostname ?? cliArguments.autorunHostname ?? getConfigValue('browserLaunch.hostname', defaultConfig.browserLaunchHostname),
|
||||
browserLaunchPort: cliArguments.browserLaunchPort ?? cliArguments.autorunPortOverride ?? getConfigValue('browserLaunch.port', defaultConfig.browserLaunchPort, 'number'),
|
||||
browserLaunchAvoidLocalhost: cliArguments.browserLaunchAvoidLocalhost ?? cliArguments.avoidLocalhost ?? getConfigValue('browserLaunch.avoidLocalhost', defaultConfig.browserLaunchAvoidLocalhost, 'boolean'),
|
||||
enableCorsProxy: cliArguments.corsProxy ?? getConfigValue('enableCorsProxy', defaultConfig.enableCorsProxy, 'boolean'),
|
||||
disableCsrf: cliArguments.disableCsrf ?? getConfigValue('disableCsrfProtection', defaultConfig.disableCsrf, 'boolean'),
|
||||
ssl: cliArguments.ssl ?? getConfigValue('ssl.enabled', defaultConfig.ssl, 'boolean'),
|
||||
certPath: cliArguments.certPath ?? getConfigValue('ssl.certPath', defaultConfig.certPath),
|
||||
keyPath: cliArguments.keyPath ?? getConfigValue('ssl.keyPath', defaultConfig.keyPath),
|
||||
keyPassphrase: cliArguments.keyPassphrase ?? getConfigValue('ssl.keyPassphrase', defaultConfig.keyPassphrase),
|
||||
whitelistMode: cliArguments.whitelist ?? getConfigValue('whitelistMode', defaultConfig.whitelistMode, 'boolean'),
|
||||
basicAuthMode: cliArguments.basicAuthMode ?? getConfigValue('basicAuthMode', defaultConfig.basicAuthMode, 'boolean'),
|
||||
requestProxyEnabled: cliArguments.requestProxyEnabled ?? getConfigValue('requestProxy.enabled', defaultConfig.requestProxyEnabled, 'boolean'),
|
||||
requestProxyUrl: cliArguments.requestProxyUrl ?? getConfigValue('requestProxy.url', defaultConfig.requestProxyUrl),
|
||||
requestProxyBypass: cliArguments.requestProxyBypass ?? getConfigValue('requestProxy.bypass', defaultConfig.requestProxyBypass),
|
||||
getIPv4ListenUrl: function () {
|
||||
const isValid = ipRegex.v4({ exact: true }).test(this.listenAddressIPv4);
|
||||
return new URL(
|
||||
(this.ssl ? 'https://' : 'http://') +
|
||||
(this.listen ? (isValid ? this.listenAddressIPv4 : '0.0.0.0') : '127.0.0.1') +
|
||||
(':' + this.port),
|
||||
);
|
||||
},
|
||||
getIPv6ListenUrl: function () {
|
||||
const isValid = ipRegex.v6({ exact: true }).test(this.listenAddressIPv6);
|
||||
return new URL(
|
||||
(this.ssl ? 'https://' : 'http://') +
|
||||
(this.listen ? (isValid ? this.listenAddressIPv6 : '[::]') : '[::1]') +
|
||||
(':' + this.port),
|
||||
);
|
||||
},
|
||||
getBrowserLaunchHostname: async function ({ useIPv6, useIPv4 }) {
|
||||
if (this.browserLaunchHostname === 'auto') {
|
||||
if (useIPv6 && useIPv4) {
|
||||
return this.browserLaunchAvoidLocalhost ? '[::1]' : 'localhost';
|
||||
}
|
||||
|
||||
if (useIPv6) {
|
||||
return '[::1]';
|
||||
}
|
||||
|
||||
if (useIPv4) {
|
||||
return '127.0.0.1';
|
||||
}
|
||||
}
|
||||
|
||||
return this.browserLaunchHostname;
|
||||
},
|
||||
getBrowserLaunchUrl: function (hostname) {
|
||||
const browserLaunchPort = (this.browserLaunchPort >= 0) ? this.browserLaunchPort : this.port;
|
||||
return new URL(
|
||||
(this.ssl ? 'https://' : 'http://') +
|
||||
(hostname) +
|
||||
(':') +
|
||||
(browserLaunchPort),
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
if (!this.booleanAutoOptions.includes(result.enableIPv6)) {
|
||||
console.warn(color.red('`protocol: ipv6` option invalid'), '\n use:', this.booleanAutoOptions, '\n setting to:', defaultConfig.enableIPv6);
|
||||
result.enableIPv6 = defaultConfig.enableIPv6;
|
||||
}
|
||||
|
||||
if (!this.booleanAutoOptions.includes(result.enableIPv4)) {
|
||||
console.warn(color.red('`protocol: ipv4` option invalid'), '\n use:', this.booleanAutoOptions, '\n setting to:', defaultConfig.enableIPv4);
|
||||
result.enableIPv4 = defaultConfig.enableIPv4;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
253
web-app/src/config-init.js
Normal file
253
web-app/src/config-init.js
Normal file
@@ -0,0 +1,253 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import yaml from 'yaml';
|
||||
import color from 'chalk';
|
||||
import _ from 'lodash';
|
||||
import { serverDirectory } from './server-directory.js';
|
||||
import { keyToEnv, setConfigFilePath } from './util.js';
|
||||
|
||||
const keyMigrationMap = [
|
||||
{
|
||||
oldKey: 'disableThumbnails',
|
||||
newKey: 'thumbnails.enabled',
|
||||
migrate: (value) => !value,
|
||||
},
|
||||
{
|
||||
oldKey: 'thumbnailsQuality',
|
||||
newKey: 'thumbnails.quality',
|
||||
migrate: (value) => value,
|
||||
},
|
||||
{
|
||||
oldKey: 'avatarThumbnailsPng',
|
||||
newKey: 'thumbnails.format',
|
||||
migrate: (value) => (value ? 'png' : 'jpg'),
|
||||
},
|
||||
{
|
||||
oldKey: 'disableChatBackup',
|
||||
newKey: 'backups.chat.enabled',
|
||||
migrate: (value) => !value,
|
||||
},
|
||||
{
|
||||
oldKey: 'numberOfBackups',
|
||||
newKey: 'backups.common.numberOfBackups',
|
||||
migrate: (value) => value,
|
||||
},
|
||||
{
|
||||
oldKey: 'maxTotalChatBackups',
|
||||
newKey: 'backups.chat.maxTotalBackups',
|
||||
migrate: (value) => value,
|
||||
},
|
||||
{
|
||||
oldKey: 'chatBackupThrottleInterval',
|
||||
newKey: 'backups.chat.throttleInterval',
|
||||
migrate: (value) => value,
|
||||
},
|
||||
{
|
||||
oldKey: 'enableExtensions',
|
||||
newKey: 'extensions.enabled',
|
||||
migrate: (value) => value,
|
||||
},
|
||||
{
|
||||
oldKey: 'enableExtensionsAutoUpdate',
|
||||
newKey: 'extensions.autoUpdate',
|
||||
migrate: (value) => value,
|
||||
},
|
||||
{
|
||||
oldKey: 'extras.disableAutoDownload',
|
||||
newKey: 'extensions.models.autoDownload',
|
||||
migrate: (value) => !value,
|
||||
},
|
||||
{
|
||||
oldKey: 'extras.classificationModel',
|
||||
newKey: 'extensions.models.classification',
|
||||
migrate: (value) => value,
|
||||
},
|
||||
{
|
||||
oldKey: 'extras.captioningModel',
|
||||
newKey: 'extensions.models.captioning',
|
||||
migrate: (value) => value,
|
||||
},
|
||||
{
|
||||
oldKey: 'extras.embeddingModel',
|
||||
newKey: 'extensions.models.embedding',
|
||||
migrate: (value) => value,
|
||||
},
|
||||
{
|
||||
oldKey: 'extras.speechToTextModel',
|
||||
newKey: 'extensions.models.speechToText',
|
||||
migrate: (value) => value,
|
||||
},
|
||||
{
|
||||
oldKey: 'extras.textToSpeechModel',
|
||||
newKey: 'extensions.models.textToSpeech',
|
||||
migrate: (value) => value,
|
||||
},
|
||||
{
|
||||
oldKey: 'minLogLevel',
|
||||
newKey: 'logging.minLogLevel',
|
||||
migrate: (value) => value,
|
||||
},
|
||||
{
|
||||
oldKey: 'cardsCacheCapacity',
|
||||
newKey: 'performance.memoryCacheCapacity',
|
||||
migrate: (value) => `${value}mb`,
|
||||
},
|
||||
{
|
||||
oldKey: 'cookieSecret',
|
||||
newKey: 'cookieSecret',
|
||||
migrate: () => void 0,
|
||||
remove: true,
|
||||
},
|
||||
{
|
||||
oldKey: 'autorun',
|
||||
newKey: 'browserLaunch.enabled',
|
||||
migrate: (value) => value,
|
||||
},
|
||||
{
|
||||
oldKey: 'autorunHostname',
|
||||
newKey: 'browserLaunch.hostname',
|
||||
migrate: (value) => value,
|
||||
},
|
||||
{
|
||||
oldKey: 'autorunPortOverride',
|
||||
newKey: 'browserLaunch.port',
|
||||
migrate: (value) => value,
|
||||
},
|
||||
{
|
||||
oldKey: 'avoidLocalhost',
|
||||
newKey: 'browserLaunch.avoidLocalhost',
|
||||
migrate: (value) => value,
|
||||
},
|
||||
{
|
||||
oldKey: 'extras.promptExpansionModel',
|
||||
newKey: 'extras.promptExpansionModel',
|
||||
migrate: () => void 0,
|
||||
remove: true,
|
||||
},
|
||||
{
|
||||
oldKey: 'autheliaAuth',
|
||||
newKey: 'sso.autheliaAuth',
|
||||
migrate: (value) => value,
|
||||
},
|
||||
{
|
||||
oldKey: 'authentikAuth',
|
||||
newKey: 'sso.authentikAuth',
|
||||
migrate: (value) => value,
|
||||
},
|
||||
];
|
||||
|
||||
/**
|
||||
* Gets all keys from an object recursively.
|
||||
* @param {object} obj Object to get all keys from
|
||||
* @param {string} prefix Prefix to prepend to all keys
|
||||
* @returns {string[]} Array of all keys in the object
|
||||
*/
|
||||
function getAllKeys(obj, prefix = '') {
|
||||
if (typeof obj !== 'object' || Array.isArray(obj) || obj === null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return _.flatMap(Object.keys(obj), key => {
|
||||
const newPrefix = prefix ? `${prefix}.${key}` : key;
|
||||
if (typeof obj[key] === 'object' && !Array.isArray(obj[key])) {
|
||||
return getAllKeys(obj[key], newPrefix);
|
||||
} else {
|
||||
return [newPrefix];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares the current config.yaml with the default config.yaml and adds any missing values.
|
||||
* @param {string} configPath Path to config.yaml
|
||||
*/
|
||||
export function addMissingConfigValues(configPath) {
|
||||
try {
|
||||
const defaultConfig = yaml.parse(fs.readFileSync(path.join(serverDirectory, './default/config.yaml'), 'utf8'));
|
||||
|
||||
if (!fs.existsSync(configPath)) {
|
||||
console.warn(color.yellow(`Warning: config.yaml not found at ${configPath}. Creating a new one with default values.`));
|
||||
fs.writeFileSync(configPath, yaml.stringify(defaultConfig));
|
||||
return;
|
||||
}
|
||||
|
||||
let config = yaml.parse(fs.readFileSync(configPath, 'utf8'));
|
||||
|
||||
// Migrate old keys to new keys
|
||||
const migratedKeys = [];
|
||||
for (const { oldKey, newKey, migrate, remove } of keyMigrationMap) {
|
||||
// Migrate environment variables
|
||||
const oldEnvKey = keyToEnv(oldKey);
|
||||
const newEnvKey = keyToEnv(newKey);
|
||||
if (process.env[oldEnvKey] && !process.env[newEnvKey]) {
|
||||
const oldValue = process.env[oldEnvKey];
|
||||
const newValue = migrate(oldValue);
|
||||
process.env[newEnvKey] = newValue;
|
||||
delete process.env[oldEnvKey];
|
||||
console.warn(color.yellow(`Warning: Using a deprecated environment variable: ${oldEnvKey}. Please use ${newEnvKey} instead.`));
|
||||
console.log(`Redirecting ${color.blue(oldEnvKey)}=${oldValue} -> ${color.blue(newEnvKey)}=${newValue}`);
|
||||
}
|
||||
|
||||
if (_.has(config, oldKey)) {
|
||||
if (remove) {
|
||||
_.unset(config, oldKey);
|
||||
migratedKeys.push({
|
||||
oldKey,
|
||||
newValue: void 0,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
const oldValue = _.get(config, oldKey);
|
||||
const newValue = migrate(oldValue);
|
||||
_.set(config, newKey, newValue);
|
||||
_.unset(config, oldKey);
|
||||
|
||||
migratedKeys.push({
|
||||
oldKey,
|
||||
newKey,
|
||||
oldValue,
|
||||
newValue,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Get all keys from the original config
|
||||
const originalKeys = getAllKeys(config);
|
||||
|
||||
// Use lodash's defaultsDeep function to recursively apply default properties
|
||||
config = _.defaultsDeep(config, defaultConfig);
|
||||
|
||||
// Get all keys from the updated config
|
||||
const updatedKeys = getAllKeys(config);
|
||||
|
||||
// Find the keys that were added
|
||||
const addedKeys = _.difference(updatedKeys, originalKeys);
|
||||
|
||||
if (addedKeys.length === 0 && migratedKeys.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (addedKeys.length > 0) {
|
||||
console.log('Adding missing config values to config.yaml:', addedKeys);
|
||||
}
|
||||
|
||||
if (migratedKeys.length > 0) {
|
||||
console.log('Migrating config values in config.yaml:', migratedKeys);
|
||||
}
|
||||
|
||||
fs.writeFileSync(configPath, yaml.stringify(config));
|
||||
} catch (error) {
|
||||
console.error(color.red('FATAL: Could not add missing config values to config.yaml'), error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs early initialization tasks before the server starts.
|
||||
* @param {string} configPath Path to config.yaml
|
||||
*/
|
||||
export function initConfig(configPath) {
|
||||
console.log('Using config path:', color.green(configPath));
|
||||
setConfigFilePath(configPath);
|
||||
addMissingConfigValues(configPath);
|
||||
}
|
||||
535
web-app/src/constants.js
Normal file
535
web-app/src/constants.js
Normal file
@@ -0,0 +1,535 @@
|
||||
export const PUBLIC_DIRECTORIES = {
|
||||
images: 'public/img/',
|
||||
backups: 'backups/',
|
||||
sounds: 'public/sounds',
|
||||
extensions: 'public/scripts/extensions',
|
||||
globalExtensions: 'public/scripts/extensions/third-party',
|
||||
};
|
||||
|
||||
export const SETTINGS_FILE = 'settings.json';
|
||||
|
||||
/**
|
||||
* @type {import('./users.js').UserDirectoryList}
|
||||
* @readonly
|
||||
* @enum {string}
|
||||
*/
|
||||
export const USER_DIRECTORY_TEMPLATE = Object.freeze({
|
||||
root: '',
|
||||
thumbnails: 'thumbnails',
|
||||
thumbnailsBg: 'thumbnails/bg',
|
||||
thumbnailsAvatar: 'thumbnails/avatar',
|
||||
thumbnailsPersona: 'thumbnails/persona',
|
||||
worlds: 'worlds',
|
||||
user: 'user',
|
||||
avatars: 'User Avatars',
|
||||
userImages: 'user/images',
|
||||
groups: 'groups',
|
||||
groupChats: 'group chats',
|
||||
chats: 'chats',
|
||||
characters: 'characters',
|
||||
backgrounds: 'backgrounds',
|
||||
novelAI_Settings: 'NovelAI Settings',
|
||||
koboldAI_Settings: 'KoboldAI Settings',
|
||||
openAI_Settings: 'OpenAI Settings',
|
||||
textGen_Settings: 'TextGen Settings',
|
||||
themes: 'themes',
|
||||
movingUI: 'movingUI',
|
||||
extensions: 'extensions',
|
||||
instruct: 'instruct',
|
||||
context: 'context',
|
||||
quickreplies: 'QuickReplies',
|
||||
assets: 'assets',
|
||||
comfyWorkflows: 'user/workflows',
|
||||
files: 'user/files',
|
||||
vectors: 'vectors',
|
||||
backups: 'backups',
|
||||
sysprompt: 'sysprompt',
|
||||
reasoning: 'reasoning',
|
||||
});
|
||||
|
||||
/**
|
||||
* @type {import('./users.js').User}
|
||||
* @readonly
|
||||
*/
|
||||
export const DEFAULT_USER = Object.freeze({
|
||||
handle: 'default-user',
|
||||
name: 'User',
|
||||
created: Date.now(),
|
||||
password: '',
|
||||
admin: true,
|
||||
enabled: true,
|
||||
salt: '',
|
||||
});
|
||||
|
||||
export const UNSAFE_EXTENSIONS = [
|
||||
'.php',
|
||||
'.exe',
|
||||
'.com',
|
||||
'.dll',
|
||||
'.pif',
|
||||
'.application',
|
||||
'.gadget',
|
||||
'.msi',
|
||||
'.jar',
|
||||
'.cmd',
|
||||
'.bat',
|
||||
'.reg',
|
||||
'.sh',
|
||||
'.py',
|
||||
'.js',
|
||||
'.jse',
|
||||
'.jsp',
|
||||
'.pdf',
|
||||
'.html',
|
||||
'.htm',
|
||||
'.hta',
|
||||
'.vb',
|
||||
'.vbs',
|
||||
'.vbe',
|
||||
'.cpl',
|
||||
'.msc',
|
||||
'.scr',
|
||||
'.sql',
|
||||
'.iso',
|
||||
'.img',
|
||||
'.dmg',
|
||||
'.ps1',
|
||||
'.ps1xml',
|
||||
'.ps2',
|
||||
'.ps2xml',
|
||||
'.psc1',
|
||||
'.psc2',
|
||||
'.msh',
|
||||
'.msh1',
|
||||
'.msh2',
|
||||
'.mshxml',
|
||||
'.msh1xml',
|
||||
'.msh2xml',
|
||||
'.scf',
|
||||
'.lnk',
|
||||
'.inf',
|
||||
'.reg',
|
||||
'.doc',
|
||||
'.docm',
|
||||
'.docx',
|
||||
'.dot',
|
||||
'.dotm',
|
||||
'.dotx',
|
||||
'.xls',
|
||||
'.xlsm',
|
||||
'.xlsx',
|
||||
'.xlt',
|
||||
'.xltm',
|
||||
'.xltx',
|
||||
'.xlam',
|
||||
'.ppt',
|
||||
'.pptm',
|
||||
'.pptx',
|
||||
'.pot',
|
||||
'.potm',
|
||||
'.potx',
|
||||
'.ppam',
|
||||
'.ppsx',
|
||||
'.ppsm',
|
||||
'.pps',
|
||||
'.ppam',
|
||||
'.sldx',
|
||||
'.sldm',
|
||||
'.ws',
|
||||
];
|
||||
|
||||
export const GEMINI_SAFETY = [
|
||||
{
|
||||
category: 'HARM_CATEGORY_HARASSMENT',
|
||||
threshold: 'OFF',
|
||||
},
|
||||
{
|
||||
category: 'HARM_CATEGORY_HATE_SPEECH',
|
||||
threshold: 'OFF',
|
||||
},
|
||||
{
|
||||
category: 'HARM_CATEGORY_SEXUALLY_EXPLICIT',
|
||||
threshold: 'OFF',
|
||||
},
|
||||
{
|
||||
category: 'HARM_CATEGORY_DANGEROUS_CONTENT',
|
||||
threshold: 'OFF',
|
||||
},
|
||||
{
|
||||
category: 'HARM_CATEGORY_CIVIC_INTEGRITY',
|
||||
threshold: 'OFF',
|
||||
},
|
||||
];
|
||||
|
||||
export const VERTEX_SAFETY = [
|
||||
{
|
||||
category: 'HARM_CATEGORY_IMAGE_HATE',
|
||||
threshold: 'OFF',
|
||||
},
|
||||
{
|
||||
category: 'HARM_CATEGORY_IMAGE_DANGEROUS_CONTENT',
|
||||
threshold: 'OFF',
|
||||
},
|
||||
{
|
||||
category: 'HARM_CATEGORY_IMAGE_HARASSMENT',
|
||||
threshold: 'OFF',
|
||||
},
|
||||
{
|
||||
category: 'HARM_CATEGORY_IMAGE_SEXUALLY_EXPLICIT',
|
||||
threshold: 'OFF',
|
||||
},
|
||||
{
|
||||
category: 'HARM_CATEGORY_JAILBREAK',
|
||||
threshold: 'OFF',
|
||||
},
|
||||
];
|
||||
|
||||
export const CHAT_COMPLETION_SOURCES = {
|
||||
OPENAI: 'openai',
|
||||
CLAUDE: 'claude',
|
||||
OPENROUTER: 'openrouter',
|
||||
AI21: 'ai21',
|
||||
MAKERSUITE: 'makersuite',
|
||||
VERTEXAI: 'vertexai',
|
||||
MISTRALAI: 'mistralai',
|
||||
CUSTOM: 'custom',
|
||||
COHERE: 'cohere',
|
||||
PERPLEXITY: 'perplexity',
|
||||
GROQ: 'groq',
|
||||
CHUTES: 'chutes',
|
||||
ELECTRONHUB: 'electronhub',
|
||||
NANOGPT: 'nanogpt',
|
||||
DEEPSEEK: 'deepseek',
|
||||
AIMLAPI: 'aimlapi',
|
||||
XAI: 'xai',
|
||||
POLLINATIONS: 'pollinations',
|
||||
MOONSHOT: 'moonshot',
|
||||
FIREWORKS: 'fireworks',
|
||||
COMETAPI: 'cometapi',
|
||||
AZURE_OPENAI: 'azure_openai',
|
||||
ZAI: 'zai',
|
||||
SILICONFLOW: 'siliconflow',
|
||||
};
|
||||
|
||||
/**
|
||||
* Path to multer file uploads under the data root.
|
||||
*/
|
||||
export const UPLOADS_DIRECTORY = '_uploads';
|
||||
|
||||
// TODO: this is copied from the client code; there should be a way to de-duplicate it eventually
|
||||
export const TEXTGEN_TYPES = {
|
||||
OOBA: 'ooba',
|
||||
MANCER: 'mancer',
|
||||
VLLM: 'vllm',
|
||||
APHRODITE: 'aphrodite',
|
||||
TABBY: 'tabby',
|
||||
KOBOLDCPP: 'koboldcpp',
|
||||
TOGETHERAI: 'togetherai',
|
||||
LLAMACPP: 'llamacpp',
|
||||
OLLAMA: 'ollama',
|
||||
INFERMATICAI: 'infermaticai',
|
||||
DREAMGEN: 'dreamgen',
|
||||
OPENROUTER: 'openrouter',
|
||||
FEATHERLESS: 'featherless',
|
||||
HUGGINGFACE: 'huggingface',
|
||||
GENERIC: 'generic',
|
||||
};
|
||||
|
||||
export const INFERMATICAI_KEYS = [
|
||||
'model',
|
||||
'prompt',
|
||||
'max_tokens',
|
||||
'temperature',
|
||||
'top_p',
|
||||
'top_k',
|
||||
'repetition_penalty',
|
||||
'stream',
|
||||
'stop',
|
||||
'presence_penalty',
|
||||
'frequency_penalty',
|
||||
'min_p',
|
||||
'seed',
|
||||
'ignore_eos',
|
||||
'n',
|
||||
'best_of',
|
||||
'min_tokens',
|
||||
'spaces_between_special_tokens',
|
||||
'skip_special_tokens',
|
||||
'logprobs',
|
||||
];
|
||||
|
||||
export const FEATHERLESS_KEYS = [
|
||||
'model',
|
||||
'prompt',
|
||||
'best_of',
|
||||
'echo',
|
||||
'frequency_penalty',
|
||||
'logit_bias',
|
||||
'logprobs',
|
||||
'max_tokens',
|
||||
'n',
|
||||
'presence_penalty',
|
||||
'seed',
|
||||
'stop',
|
||||
'stream',
|
||||
'suffix',
|
||||
'temperature',
|
||||
'top_p',
|
||||
'user',
|
||||
|
||||
'use_beam_search',
|
||||
'top_k',
|
||||
'min_p',
|
||||
'repetition_penalty',
|
||||
'length_penalty',
|
||||
'early_stopping',
|
||||
'stop_token_ids',
|
||||
'ignore_eos',
|
||||
'min_tokens',
|
||||
'skip_special_tokens',
|
||||
'spaces_between_special_tokens',
|
||||
'truncate_prompt_tokens',
|
||||
|
||||
'include_stop_str_in_output',
|
||||
'response_format',
|
||||
'guided_json',
|
||||
'guided_regex',
|
||||
'guided_choice',
|
||||
'guided_grammar',
|
||||
'guided_decoding_backend',
|
||||
'guided_whitespace_pattern',
|
||||
];
|
||||
|
||||
// https://docs.together.ai/reference/completions
|
||||
export const TOGETHERAI_KEYS = [
|
||||
'model',
|
||||
'prompt',
|
||||
'max_tokens',
|
||||
'temperature',
|
||||
'top_p',
|
||||
'top_k',
|
||||
'repetition_penalty',
|
||||
'min_p',
|
||||
'presence_penalty',
|
||||
'frequency_penalty',
|
||||
'stream',
|
||||
'stop',
|
||||
];
|
||||
|
||||
// https://github.com/ollama/ollama/blob/main/docs/api.md#request-8
|
||||
export const OLLAMA_KEYS = [
|
||||
'num_predict',
|
||||
'num_ctx',
|
||||
'num_batch',
|
||||
'stop',
|
||||
'temperature',
|
||||
'repeat_penalty',
|
||||
'presence_penalty',
|
||||
'frequency_penalty',
|
||||
'top_k',
|
||||
'top_p',
|
||||
'tfs_z',
|
||||
'typical_p',
|
||||
'seed',
|
||||
'repeat_last_n',
|
||||
'min_p',
|
||||
];
|
||||
|
||||
// https://platform.openai.com/docs/api-reference/completions
|
||||
export const OPENAI_KEYS = [
|
||||
'model',
|
||||
'prompt',
|
||||
'stream',
|
||||
'temperature',
|
||||
'top_p',
|
||||
'frequency_penalty',
|
||||
'presence_penalty',
|
||||
'stop',
|
||||
'seed',
|
||||
'logit_bias',
|
||||
'logprobs',
|
||||
'max_tokens',
|
||||
'n',
|
||||
'best_of',
|
||||
];
|
||||
|
||||
export const AVATAR_WIDTH = 512;
|
||||
export const AVATAR_HEIGHT = 768;
|
||||
export const DEFAULT_AVATAR_PATH = './public/img/ai4.png';
|
||||
|
||||
export const OPENROUTER_HEADERS = {
|
||||
'HTTP-Referer': 'https://sillytavern.app',
|
||||
'X-Title': 'SillyTavern',
|
||||
};
|
||||
|
||||
export const AIMLAPI_HEADERS = {
|
||||
'HTTP-Referer': 'https://sillytavern.app',
|
||||
'X-Title': 'SillyTavern',
|
||||
};
|
||||
|
||||
export const FEATHERLESS_HEADERS = {
|
||||
'HTTP-Referer': 'https://sillytavern.app',
|
||||
'X-Title': 'SillyTavern',
|
||||
};
|
||||
|
||||
export const OPENROUTER_KEYS = [
|
||||
'max_tokens',
|
||||
'temperature',
|
||||
'top_k',
|
||||
'top_p',
|
||||
'presence_penalty',
|
||||
'frequency_penalty',
|
||||
'repetition_penalty',
|
||||
'min_p',
|
||||
'top_a',
|
||||
'seed',
|
||||
'logit_bias',
|
||||
'model',
|
||||
'stream',
|
||||
'prompt',
|
||||
'stop',
|
||||
'provider',
|
||||
'include_reasoning',
|
||||
];
|
||||
|
||||
// https://github.com/vllm-project/vllm/blob/0f8a91401c89ac0a8018def3756829611b57727f/vllm/entrypoints/openai/protocol.py#L220
|
||||
export const VLLM_KEYS = [
|
||||
'model',
|
||||
'prompt',
|
||||
'best_of',
|
||||
'echo',
|
||||
'frequency_penalty',
|
||||
'logit_bias',
|
||||
'logprobs',
|
||||
'max_tokens',
|
||||
'n',
|
||||
'presence_penalty',
|
||||
'seed',
|
||||
'stop',
|
||||
'stream',
|
||||
'suffix',
|
||||
'temperature',
|
||||
'top_p',
|
||||
'user',
|
||||
|
||||
'use_beam_search',
|
||||
'top_k',
|
||||
'min_p',
|
||||
'repetition_penalty',
|
||||
'length_penalty',
|
||||
'early_stopping',
|
||||
'stop_token_ids',
|
||||
'ignore_eos',
|
||||
'min_tokens',
|
||||
'skip_special_tokens',
|
||||
'spaces_between_special_tokens',
|
||||
'truncate_prompt_tokens',
|
||||
|
||||
'include_stop_str_in_output',
|
||||
'response_format',
|
||||
'guided_json',
|
||||
'guided_regex',
|
||||
'guided_choice',
|
||||
'guided_grammar',
|
||||
'guided_decoding_backend',
|
||||
'guided_whitespace_pattern',
|
||||
];
|
||||
|
||||
export const AZURE_OPENAI_KEYS = [
|
||||
'messages',
|
||||
'temperature',
|
||||
'frequency_penalty',
|
||||
'presence_penalty',
|
||||
'top_p',
|
||||
'max_tokens',
|
||||
'max_completion_tokens',
|
||||
'stream',
|
||||
'logit_bias',
|
||||
'stop',
|
||||
'n',
|
||||
'logprobs',
|
||||
'seed',
|
||||
'tools',
|
||||
'tool_choice',
|
||||
'reasoning_effort',
|
||||
];
|
||||
|
||||
export const OPENAI_VERBOSITY_MODELS = /^gpt-5/;
|
||||
|
||||
export const OPENAI_REASONING_EFFORT_MODELS = [
|
||||
'o1',
|
||||
'o3-mini',
|
||||
'o3-mini-2025-01-31',
|
||||
'o4-mini',
|
||||
'o4-mini-2025-04-16',
|
||||
'o3',
|
||||
'o3-2025-04-16',
|
||||
'gpt-5',
|
||||
'gpt-5-2025-08-07',
|
||||
'gpt-5-mini',
|
||||
'gpt-5-mini-2025-08-07',
|
||||
'gpt-5-nano',
|
||||
'gpt-5-nano-2025-08-07',
|
||||
'gpt-5.1',
|
||||
'gpt-5.1-2025-11-13',
|
||||
'gpt-5.1-chat-latest',
|
||||
'gpt-5.2',
|
||||
'gpt-5.2-2025-12-11',
|
||||
'gpt-5.2-chat-latest',
|
||||
];
|
||||
|
||||
export const OPENAI_REASONING_EFFORT_MAP = {
|
||||
min: 'minimal',
|
||||
};
|
||||
|
||||
export const LOG_LEVELS = {
|
||||
DEBUG: 0,
|
||||
INFO: 1,
|
||||
WARN: 2,
|
||||
ERROR: 3,
|
||||
};
|
||||
|
||||
/**
|
||||
* An array of supported media file extensions.
|
||||
* This is used to validate file uploads and ensure that only supported media types are processed.
|
||||
*/
|
||||
export const MEDIA_EXTENSIONS = [
|
||||
'bmp',
|
||||
'png',
|
||||
'jpg',
|
||||
'webp',
|
||||
'jpeg',
|
||||
'jfif',
|
||||
'gif',
|
||||
'mp4',
|
||||
'avi',
|
||||
'mov',
|
||||
'wmv',
|
||||
'flv',
|
||||
'webm',
|
||||
'3gp',
|
||||
'mkv',
|
||||
'mpg',
|
||||
'mp3',
|
||||
'wav',
|
||||
'ogg',
|
||||
'flac',
|
||||
'aac',
|
||||
'm4a',
|
||||
'aiff',
|
||||
];
|
||||
|
||||
/**
|
||||
* Bitwise flag-style media request types.
|
||||
*/
|
||||
export const MEDIA_REQUEST_TYPE = {
|
||||
IMAGE: 0b001,
|
||||
VIDEO: 0b010,
|
||||
AUDIO: 0b100,
|
||||
};
|
||||
|
||||
|
||||
export const ZAI_ENDPOINT = {
|
||||
COMMON: 'common',
|
||||
CODING: 'coding',
|
||||
};
|
||||
6
web-app/src/electron/Start.bat
Normal file
6
web-app/src/electron/Start.bat
Normal file
@@ -0,0 +1,6 @@
|
||||
@echo off
|
||||
pushd %~dp0
|
||||
call npm install --no-save --no-audit --no-fund --loglevel=error --no-progress --omit=dev
|
||||
npm run start server.js %*
|
||||
pause
|
||||
popd
|
||||
62
web-app/src/electron/index.js
Normal file
62
web-app/src/electron/index.js
Normal file
@@ -0,0 +1,62 @@
|
||||
import { app, BrowserWindow } from 'electron';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import yargs from 'yargs';
|
||||
import { serverEvents, EVENT_NAMES } from '../server-events.js';
|
||||
|
||||
const cliArguments = yargs(process.argv)
|
||||
.usage('Usage: <your-start-script> [options]')
|
||||
.option('width', {
|
||||
type: 'number',
|
||||
default: 800,
|
||||
describe: 'The width of the window',
|
||||
})
|
||||
.option('height', {
|
||||
type: 'number',
|
||||
default: 600,
|
||||
describe: 'The height of the window',
|
||||
})
|
||||
.parseSync();
|
||||
|
||||
/** @type {string} The URL to load in the window. */
|
||||
let appUrl;
|
||||
|
||||
function createSillyTavernWindow() {
|
||||
if (!appUrl) {
|
||||
console.error('The server has not started yet.');
|
||||
return;
|
||||
}
|
||||
new BrowserWindow({
|
||||
height: cliArguments.height,
|
||||
width: cliArguments.width,
|
||||
}).loadURL(appUrl);
|
||||
}
|
||||
|
||||
function startServer() {
|
||||
return new Promise((_resolve, _reject) => {
|
||||
serverEvents.addListener(EVENT_NAMES.SERVER_STARTED, ({ url }) => {
|
||||
appUrl = url.toString();
|
||||
createSillyTavernWindow();
|
||||
});
|
||||
const sillyTavernRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../..');
|
||||
process.chdir(sillyTavernRoot);
|
||||
|
||||
import('../server-global.js');
|
||||
});
|
||||
}
|
||||
|
||||
app.whenReady().then(() => {
|
||||
app.on('activate', () => {
|
||||
if (BrowserWindow.getAllWindows().length === 0) {
|
||||
createSillyTavernWindow();
|
||||
}
|
||||
});
|
||||
|
||||
startServer();
|
||||
});
|
||||
|
||||
app.on('window-all-closed', () => {
|
||||
if (process.platform !== 'darwin') {
|
||||
app.quit();
|
||||
}
|
||||
});
|
||||
802
web-app/src/electron/package-lock.json
generated
Normal file
802
web-app/src/electron/package-lock.json
generated
Normal file
@@ -0,0 +1,802 @@
|
||||
{
|
||||
"name": "sillytavern-electron",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "sillytavern-electron",
|
||||
"version": "1.0.0",
|
||||
"license": "AGPL-3.0",
|
||||
"dependencies": {
|
||||
"electron": "^35.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@electron/get": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@electron/get/-/get-2.0.3.tgz",
|
||||
"integrity": "sha512-Qkzpg2s9GnVV2I2BjRksUi43U5e6+zaQMcjoJy0C+C5oxaKl+fmckGDQFtRpZpZV0NQekuZZ+tGz7EA9TVnQtQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"debug": "^4.1.1",
|
||||
"env-paths": "^2.2.0",
|
||||
"fs-extra": "^8.1.0",
|
||||
"got": "^11.8.5",
|
||||
"progress": "^2.0.3",
|
||||
"semver": "^6.2.0",
|
||||
"sumchecker": "^3.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"global-agent": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@sindresorhus/is": {
|
||||
"version": "4.6.0",
|
||||
"resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz",
|
||||
"integrity": "sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sindresorhus/is?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/@szmarczak/http-timer": {
|
||||
"version": "4.0.6",
|
||||
"resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-4.0.6.tgz",
|
||||
"integrity": "sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"defer-to-connect": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/cacheable-request": {
|
||||
"version": "6.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.3.tgz",
|
||||
"integrity": "sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/http-cache-semantics": "*",
|
||||
"@types/keyv": "^3.1.4",
|
||||
"@types/node": "*",
|
||||
"@types/responselike": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/http-cache-semantics": {
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.4.tgz",
|
||||
"integrity": "sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/keyv": {
|
||||
"version": "3.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/keyv/-/keyv-3.1.4.tgz",
|
||||
"integrity": "sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "22.13.9",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.9.tgz",
|
||||
"integrity": "sha512-acBjXdRJ3A6Pb3tqnw9HZmyR3Fiol3aGxRCK1x3d+6CDAMjl7I649wpSd+yNURCjbOUGu9tqtLKnTGxmK6CyGw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"undici-types": "~6.20.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/responselike": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.3.tgz",
|
||||
"integrity": "sha512-H/+L+UkTV33uf49PH5pCAUBVPNj2nDBXTN+qS1dOwyyg24l3CcicicCA7ca+HMvJBZcFgl5r8e+RR6elsb4Lyw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/yauzl": {
|
||||
"version": "2.10.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.10.3.tgz",
|
||||
"integrity": "sha512-oJoftv0LSuaDZE3Le4DbKX+KS9G36NzOeSap90UIK0yMA/NhKJhqlSGtNDORNRaIbQfzjXDrQa0ytJ6mNRGz/Q==",
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/boolean": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/boolean/-/boolean-3.2.0.tgz",
|
||||
"integrity": "sha512-d0II/GO9uf9lfUHH2BQsjxzRJZBdsjgsBiW4BvhWk/3qoKwQFjIDVN19PfX8F2D/r9PCMTtLWjYVCFrpeYUzsw==",
|
||||
"deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.",
|
||||
"license": "MIT",
|
||||
"optional": true
|
||||
},
|
||||
"node_modules/buffer-crc32": {
|
||||
"version": "0.2.13",
|
||||
"resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz",
|
||||
"integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/cacheable-lookup": {
|
||||
"version": "5.0.4",
|
||||
"resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz",
|
||||
"integrity": "sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10.6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/cacheable-request": {
|
||||
"version": "7.0.4",
|
||||
"resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-7.0.4.tgz",
|
||||
"integrity": "sha512-v+p6ongsrp0yTGbJXjgxPow2+DL93DASP4kXCDKb8/bwRtt9OEF3whggkkDkGNzgcWy2XaF4a8nZglC7uElscg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"clone-response": "^1.0.2",
|
||||
"get-stream": "^5.1.0",
|
||||
"http-cache-semantics": "^4.0.0",
|
||||
"keyv": "^4.0.0",
|
||||
"lowercase-keys": "^2.0.0",
|
||||
"normalize-url": "^6.0.1",
|
||||
"responselike": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/clone-response": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.3.tgz",
|
||||
"integrity": "sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"mimic-response": "^1.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "4.4.0",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
|
||||
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/decompress-response": {
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz",
|
||||
"integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"mimic-response": "^3.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/decompress-response/node_modules/mimic-response": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz",
|
||||
"integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/defer-to-connect": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz",
|
||||
"integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/define-data-property": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz",
|
||||
"integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==",
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"es-define-property": "^1.0.0",
|
||||
"es-errors": "^1.3.0",
|
||||
"gopd": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/define-properties": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz",
|
||||
"integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==",
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"define-data-property": "^1.0.1",
|
||||
"has-property-descriptors": "^1.0.0",
|
||||
"object-keys": "^1.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/detect-node": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz",
|
||||
"integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==",
|
||||
"license": "MIT",
|
||||
"optional": true
|
||||
},
|
||||
"node_modules/electron": {
|
||||
"version": "35.7.5",
|
||||
"resolved": "https://registry.npmjs.org/electron/-/electron-35.7.5.tgz",
|
||||
"integrity": "sha512-dnL+JvLraKZl7iusXTVTGYs10TKfzUi30uEDTqsmTm0guN9V2tbOjTzyIZbh9n3ygUjgEYyo+igAwMRXIi3IPw==",
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@electron/get": "^2.0.0",
|
||||
"@types/node": "^22.7.7",
|
||||
"extract-zip": "^2.0.1"
|
||||
},
|
||||
"bin": {
|
||||
"electron": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 12.20.55"
|
||||
}
|
||||
},
|
||||
"node_modules/end-of-stream": {
|
||||
"version": "1.4.4",
|
||||
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
|
||||
"integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"once": "^1.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/env-paths": {
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz",
|
||||
"integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/es-define-property": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
|
||||
"integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es-errors": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
|
||||
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es6-error": {
|
||||
"version": "4.1.1",
|
||||
"resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz",
|
||||
"integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==",
|
||||
"license": "MIT",
|
||||
"optional": true
|
||||
},
|
||||
"node_modules/escape-string-regexp": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
|
||||
"integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/extract-zip": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz",
|
||||
"integrity": "sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==",
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"debug": "^4.1.1",
|
||||
"get-stream": "^5.1.0",
|
||||
"yauzl": "^2.10.0"
|
||||
},
|
||||
"bin": {
|
||||
"extract-zip": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10.17.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@types/yauzl": "^2.9.1"
|
||||
}
|
||||
},
|
||||
"node_modules/fd-slicer": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz",
|
||||
"integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"pend": "~1.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/fs-extra": {
|
||||
"version": "8.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz",
|
||||
"integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"graceful-fs": "^4.2.0",
|
||||
"jsonfile": "^4.0.0",
|
||||
"universalify": "^0.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6 <7 || >=8"
|
||||
}
|
||||
},
|
||||
"node_modules/get-stream": {
|
||||
"version": "5.2.0",
|
||||
"resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz",
|
||||
"integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"pump": "^3.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/global-agent": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/global-agent/-/global-agent-3.0.0.tgz",
|
||||
"integrity": "sha512-PT6XReJ+D07JvGoxQMkT6qji/jVNfX/h364XHZOWeRzy64sSFr+xJ5OX7LI3b4MPQzdL4H8Y8M0xzPpsVMwA8Q==",
|
||||
"license": "BSD-3-Clause",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"boolean": "^3.0.1",
|
||||
"es6-error": "^4.1.1",
|
||||
"matcher": "^3.0.0",
|
||||
"roarr": "^2.15.3",
|
||||
"semver": "^7.3.2",
|
||||
"serialize-error": "^7.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/global-agent/node_modules/semver": {
|
||||
"version": "7.7.1",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz",
|
||||
"integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==",
|
||||
"license": "ISC",
|
||||
"optional": true,
|
||||
"bin": {
|
||||
"semver": "bin/semver.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/globalthis": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz",
|
||||
"integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==",
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"define-properties": "^1.2.1",
|
||||
"gopd": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/gopd": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
|
||||
"integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/got": {
|
||||
"version": "11.8.6",
|
||||
"resolved": "https://registry.npmjs.org/got/-/got-11.8.6.tgz",
|
||||
"integrity": "sha512-6tfZ91bOr7bOXnK7PRDCGBLa1H4U080YHNaAQ2KsMGlLEzRbk44nsZF2E1IeRc3vtJHPVbKCYgdFbaGO2ljd8g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@sindresorhus/is": "^4.0.0",
|
||||
"@szmarczak/http-timer": "^4.0.5",
|
||||
"@types/cacheable-request": "^6.0.1",
|
||||
"@types/responselike": "^1.0.0",
|
||||
"cacheable-lookup": "^5.0.3",
|
||||
"cacheable-request": "^7.0.2",
|
||||
"decompress-response": "^6.0.0",
|
||||
"http2-wrapper": "^1.0.0-beta.5.2",
|
||||
"lowercase-keys": "^2.0.0",
|
||||
"p-cancelable": "^2.0.0",
|
||||
"responselike": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10.19.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sindresorhus/got?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/graceful-fs": {
|
||||
"version": "4.2.11",
|
||||
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
|
||||
"integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/has-property-descriptors": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz",
|
||||
"integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==",
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"es-define-property": "^1.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/http-cache-semantics": {
|
||||
"version": "4.1.1",
|
||||
"resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz",
|
||||
"integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==",
|
||||
"license": "BSD-2-Clause"
|
||||
},
|
||||
"node_modules/http2-wrapper": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-1.0.3.tgz",
|
||||
"integrity": "sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"quick-lru": "^5.1.1",
|
||||
"resolve-alpn": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10.19.0"
|
||||
}
|
||||
},
|
||||
"node_modules/json-buffer": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz",
|
||||
"integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/json-stringify-safe": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
|
||||
"integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==",
|
||||
"license": "ISC",
|
||||
"optional": true
|
||||
},
|
||||
"node_modules/jsonfile": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz",
|
||||
"integrity": "sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==",
|
||||
"license": "MIT",
|
||||
"optionalDependencies": {
|
||||
"graceful-fs": "^4.1.6"
|
||||
}
|
||||
},
|
||||
"node_modules/keyv": {
|
||||
"version": "4.5.4",
|
||||
"resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz",
|
||||
"integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"json-buffer": "3.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/lowercase-keys": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz",
|
||||
"integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/matcher": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/matcher/-/matcher-3.0.0.tgz",
|
||||
"integrity": "sha512-OkeDaAZ/bQCxeFAozM55PKcKU0yJMPGifLwV4Qgjitu+5MoAfSQN4lsLJeXZ1b8w0x+/Emda6MZgXS1jvsapng==",
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"escape-string-regexp": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/mimic-response": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz",
|
||||
"integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/normalize-url": {
|
||||
"version": "6.1.0",
|
||||
"resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz",
|
||||
"integrity": "sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/object-keys": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz",
|
||||
"integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==",
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/once": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
|
||||
"integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"wrappy": "1"
|
||||
}
|
||||
},
|
||||
"node_modules/p-cancelable": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-2.1.1.tgz",
|
||||
"integrity": "sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/pend": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz",
|
||||
"integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/progress": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz",
|
||||
"integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/pump": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.2.tgz",
|
||||
"integrity": "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"end-of-stream": "^1.1.0",
|
||||
"once": "^1.3.1"
|
||||
}
|
||||
},
|
||||
"node_modules/quick-lru": {
|
||||
"version": "5.1.1",
|
||||
"resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz",
|
||||
"integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/resolve-alpn": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz",
|
||||
"integrity": "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/responselike": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/responselike/-/responselike-2.0.1.tgz",
|
||||
"integrity": "sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"lowercase-keys": "^2.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/roarr": {
|
||||
"version": "2.15.4",
|
||||
"resolved": "https://registry.npmjs.org/roarr/-/roarr-2.15.4.tgz",
|
||||
"integrity": "sha512-CHhPh+UNHD2GTXNYhPWLnU8ONHdI+5DI+4EYIAOaiD63rHeYlZvyh8P+in5999TTSFgUYuKUAjzRI4mdh/p+2A==",
|
||||
"license": "BSD-3-Clause",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"boolean": "^3.0.1",
|
||||
"detect-node": "^2.0.4",
|
||||
"globalthis": "^1.0.1",
|
||||
"json-stringify-safe": "^5.0.1",
|
||||
"semver-compare": "^1.0.0",
|
||||
"sprintf-js": "^1.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/semver": {
|
||||
"version": "6.3.1",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
|
||||
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
|
||||
"license": "ISC",
|
||||
"bin": {
|
||||
"semver": "bin/semver.js"
|
||||
}
|
||||
},
|
||||
"node_modules/semver-compare": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/semver-compare/-/semver-compare-1.0.0.tgz",
|
||||
"integrity": "sha512-YM3/ITh2MJ5MtzaM429anh+x2jiLVjqILF4m4oyQB18W7Ggea7BfqdH/wGMK7dDiMghv/6WG7znWMwUDzJiXow==",
|
||||
"license": "MIT",
|
||||
"optional": true
|
||||
},
|
||||
"node_modules/serialize-error": {
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/serialize-error/-/serialize-error-7.0.1.tgz",
|
||||
"integrity": "sha512-8I8TjW5KMOKsZQTvoxjuSIa7foAwPWGOts+6o7sgjz41/qMD9VQHEDxi6PBvK2l0MXUmqZyNpUK+T2tQaaElvw==",
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"type-fest": "^0.13.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/sprintf-js": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz",
|
||||
"integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==",
|
||||
"license": "BSD-3-Clause",
|
||||
"optional": true
|
||||
},
|
||||
"node_modules/sumchecker": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/sumchecker/-/sumchecker-3.0.1.tgz",
|
||||
"integrity": "sha512-MvjXzkz/BOfyVDkG0oFOtBxHX2u3gKbMHIF/dXblZsgD3BWOFLmHovIpZY7BykJdAjcqRCBi1WYBNdEC9yI7vg==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"debug": "^4.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/type-fest": {
|
||||
"version": "0.13.1",
|
||||
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.13.1.tgz",
|
||||
"integrity": "sha512-34R7HTnG0XIJcBSn5XhDd7nNFPRcXYRZrBB2O2jdKqYODldSzBAqzsWoZYYvduky73toYS/ESqxPvkDf/F0XMg==",
|
||||
"license": "(MIT OR CC0-1.0)",
|
||||
"optional": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/undici-types": {
|
||||
"version": "6.20.0",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz",
|
||||
"integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/universalify": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
|
||||
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/wrappy": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/yauzl": {
|
||||
"version": "2.10.0",
|
||||
"resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz",
|
||||
"integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"buffer-crc32": "~0.2.3",
|
||||
"fd-slicer": "~1.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
16
web-app/src/electron/package.json
Normal file
16
web-app/src/electron/package.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"name": "sillytavern-electron",
|
||||
"version": "1.0.0",
|
||||
"description": "Electron server for SillyTavern",
|
||||
"license": "AGPL-3.0",
|
||||
"author": "",
|
||||
"type": "module",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"start": "electron ."
|
||||
},
|
||||
"dependencies": {
|
||||
"electron": "^35.0.0"
|
||||
}
|
||||
}
|
||||
11
web-app/src/electron/start.sh
Executable file
11
web-app/src/electron/start.sh
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Make sure pwd is the directory of the script
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
echo "Assuming nodejs and npm is already installed. If you haven't installed them already, do so now"
|
||||
echo "Installing Electron Wrapper's Node Modules..."
|
||||
npm i --no-save --no-audit --no-fund --loglevel=error --no-progress --omit=dev
|
||||
|
||||
echo "Starting Electron Wrapper..."
|
||||
npm run start -- "$@"
|
||||
66
web-app/src/endpoints/anthropic.js
Normal file
66
web-app/src/endpoints/anthropic.js
Normal file
@@ -0,0 +1,66 @@
|
||||
import fetch from 'node-fetch';
|
||||
import express from 'express';
|
||||
|
||||
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/caption-image', async (request, response) => {
|
||||
try {
|
||||
const mimeType = request.body.image.split(';')[0].split(':')[1];
|
||||
const base64Data = request.body.image.split(',')[1];
|
||||
const baseUrl = request.body.reverse_proxy ? request.body.reverse_proxy : 'https://api.anthropic.com/v1';
|
||||
const url = `${baseUrl}/messages`;
|
||||
const body = {
|
||||
model: request.body.model,
|
||||
messages: [
|
||||
{
|
||||
'role': 'user', 'content': [
|
||||
{
|
||||
'type': 'image',
|
||||
'source': {
|
||||
'type': 'base64',
|
||||
'media_type': mimeType,
|
||||
'data': base64Data,
|
||||
},
|
||||
},
|
||||
{ 'type': 'text', 'text': request.body.prompt },
|
||||
],
|
||||
},
|
||||
],
|
||||
max_tokens: 4096,
|
||||
};
|
||||
|
||||
console.debug('Multimodal captioning request', body);
|
||||
|
||||
const result = await fetch(url, {
|
||||
body: JSON.stringify(body),
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'anthropic-version': '2023-06-01',
|
||||
'x-api-key': request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.CLAUDE),
|
||||
},
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const text = await result.text();
|
||||
console.warn(`Claude API returned error: ${result.status} ${result.statusText}`, text);
|
||||
return response.status(result.status).send({ error: true });
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const generateResponseJson = await result.json();
|
||||
const caption = generateResponseJson.content[0].text;
|
||||
console.debug('Claude response:', generateResponseJson);
|
||||
|
||||
if (!caption) {
|
||||
return response.status(500).send('No caption found');
|
||||
}
|
||||
|
||||
return response.json({ caption });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.status(500).send('Internal server error');
|
||||
}
|
||||
});
|
||||
370
web-app/src/endpoints/assets.js
Normal file
370
web-app/src/endpoints/assets.js
Normal file
@@ -0,0 +1,370 @@
|
||||
import path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
import { finished } from 'node:stream/promises';
|
||||
|
||||
import mime from 'mime-types';
|
||||
import express from 'express';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import fetch from 'node-fetch';
|
||||
|
||||
import { UNSAFE_EXTENSIONS } from '../constants.js';
|
||||
import { clientRelativePath } from '../util.js';
|
||||
|
||||
const VALID_CATEGORIES = ['bgm', 'ambient', 'blip', 'live2d', 'vrm', 'character', 'temp'];
|
||||
|
||||
/**
|
||||
* Validates the input filename for the asset.
|
||||
* @param {string} inputFilename Input filename
|
||||
* @returns {{error: boolean, message?: string}} Whether validation failed, and why if so
|
||||
*/
|
||||
export function validateAssetFileName(inputFilename) {
|
||||
if (!/^[a-zA-Z0-9_\-.]+$/.test(inputFilename)) {
|
||||
return {
|
||||
error: true,
|
||||
message: 'Illegal character in filename; only alphanumeric, \'_\', \'-\' are accepted.',
|
||||
};
|
||||
}
|
||||
|
||||
const inputExtension = path.extname(inputFilename).toLowerCase();
|
||||
if (UNSAFE_EXTENSIONS.some(ext => ext === inputExtension)) {
|
||||
return {
|
||||
error: true,
|
||||
message: 'Forbidden file extension.',
|
||||
};
|
||||
}
|
||||
|
||||
if (inputFilename.startsWith('.')) {
|
||||
return {
|
||||
error: true,
|
||||
message: 'Filename cannot start with \'.\'',
|
||||
};
|
||||
}
|
||||
|
||||
if (sanitize(inputFilename) !== inputFilename) {
|
||||
return {
|
||||
error: true,
|
||||
message: 'Reserved or long filename.',
|
||||
};
|
||||
}
|
||||
|
||||
return { error: false };
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursive function to get files
|
||||
* @param {string} dir - The directory to search for files
|
||||
* @param {string[]} files - The array of files to return
|
||||
* @returns {string[]} - The array of files
|
||||
*/
|
||||
function getFiles(dir, files = []) {
|
||||
if (!fs.existsSync(dir)) return files;
|
||||
|
||||
// Get an array of all files and directories in the passed directory using fs.readdirSync
|
||||
const fileList = fs.readdirSync(dir, { withFileTypes: true });
|
||||
// Create the full path of the file/directory by concatenating the passed directory and file/directory name
|
||||
for (const file of fileList) {
|
||||
const name = path.join(dir, file.name);
|
||||
// Check if the current file/directory is a directory using fs.statSync
|
||||
if (file.isDirectory()) {
|
||||
// If it is a directory, recursively call the getFiles function with the directory path and the files array
|
||||
getFiles(name, files);
|
||||
} else {
|
||||
// If it is a file, push the full path to the files array
|
||||
files.push(name);
|
||||
}
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure that the asset folders exist.
|
||||
* @param {import('../users.js').UserDirectoryList} directories - The user's directories
|
||||
*/
|
||||
function ensureFoldersExist(directories) {
|
||||
const folderPath = path.join(directories.assets);
|
||||
|
||||
for (const category of VALID_CATEGORIES) {
|
||||
const assetCategoryPath = path.join(folderPath, category);
|
||||
if (fs.existsSync(assetCategoryPath) && !fs.statSync(assetCategoryPath).isDirectory()) {
|
||||
fs.unlinkSync(assetCategoryPath);
|
||||
}
|
||||
if (!fs.existsSync(assetCategoryPath)) {
|
||||
fs.mkdirSync(assetCategoryPath, { recursive: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
/**
|
||||
* HTTP POST handler function to retrieve name of all files of a given folder path.
|
||||
*
|
||||
* @param {Object} request - HTTP Request object. Require folder path in query
|
||||
* @param {Object} response - HTTP Response object will contain a list of file path.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
router.post('/get', async (request, response) => {
|
||||
const folderPath = path.join(request.user.directories.assets);
|
||||
let output = {};
|
||||
|
||||
try {
|
||||
if (fs.existsSync(folderPath) && fs.statSync(folderPath).isDirectory()) {
|
||||
|
||||
ensureFoldersExist(request.user.directories);
|
||||
|
||||
const folders = fs.readdirSync(folderPath, { withFileTypes: true })
|
||||
.filter(file => file.isDirectory());
|
||||
|
||||
for (const { name: folder } of folders) {
|
||||
if (folder == 'temp')
|
||||
continue;
|
||||
|
||||
// Live2d assets
|
||||
if (folder == 'live2d') {
|
||||
output[folder] = [];
|
||||
const live2d_folder = path.normalize(path.join(folderPath, folder));
|
||||
const files = getFiles(live2d_folder);
|
||||
//console.debug("FILE FOUND:",files)
|
||||
for (let file of files) {
|
||||
if (file.includes('model') && file.endsWith('.json')) {
|
||||
//console.debug("Asset live2d model found:",file)
|
||||
output[folder].push(clientRelativePath(request.user.directories.root, file));
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// VRM assets
|
||||
if (folder == 'vrm') {
|
||||
output[folder] = { 'model': [], 'animation': [] };
|
||||
// Extract models
|
||||
const vrm_model_folder = path.normalize(path.join(folderPath, 'vrm', 'model'));
|
||||
let files = getFiles(vrm_model_folder);
|
||||
//console.debug("FILE FOUND:",files)
|
||||
for (let file of files) {
|
||||
if (!file.endsWith('.placeholder')) {
|
||||
//console.debug("Asset VRM model found:",file)
|
||||
output['vrm']['model'].push(clientRelativePath(request.user.directories.root, file));
|
||||
}
|
||||
}
|
||||
|
||||
// Extract models
|
||||
const vrm_animation_folder = path.normalize(path.join(folderPath, 'vrm', 'animation'));
|
||||
files = getFiles(vrm_animation_folder);
|
||||
//console.debug("FILE FOUND:",files)
|
||||
for (let file of files) {
|
||||
if (!file.endsWith('.placeholder')) {
|
||||
//console.debug("Asset VRM animation found:",file)
|
||||
output['vrm']['animation'].push(clientRelativePath(request.user.directories.root, file));
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Other assets (bgm/ambient/blip)
|
||||
const files = fs.readdirSync(path.join(folderPath, folder))
|
||||
.filter(filename => {
|
||||
return filename != '.placeholder';
|
||||
});
|
||||
output[folder] = [];
|
||||
for (const file of files) {
|
||||
output[folder].push(`assets/${folder}/${file}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
return response.send(output);
|
||||
});
|
||||
|
||||
/**
|
||||
* HTTP POST handler function to download the requested asset.
|
||||
*
|
||||
* @param {Object} request - HTTP Request object, expects a url, a category and a filename.
|
||||
* @param {Object} response - HTTP Response only gives status.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
router.post('/download', async (request, response) => {
|
||||
const url = request.body.url;
|
||||
const inputCategory = request.body.category;
|
||||
|
||||
// Check category
|
||||
let category = null;
|
||||
for (let i of VALID_CATEGORIES)
|
||||
if (i == inputCategory)
|
||||
category = i;
|
||||
|
||||
if (category === null) {
|
||||
console.error('Bad request: unsupported asset category.');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
// Validate filename
|
||||
ensureFoldersExist(request.user.directories);
|
||||
const validation = validateAssetFileName(request.body.filename);
|
||||
if (validation.error)
|
||||
return response.status(400).send(validation.message);
|
||||
|
||||
const temp_path = path.join(request.user.directories.assets, 'temp', request.body.filename);
|
||||
const file_path = path.join(request.user.directories.assets, category, request.body.filename);
|
||||
console.info('Request received to download', url, 'to', file_path);
|
||||
|
||||
try {
|
||||
// Download to temp
|
||||
const res = await fetch(url);
|
||||
if (!res.ok || res.body === null) {
|
||||
throw new Error(`Unexpected response ${res.statusText}`);
|
||||
}
|
||||
const destination = path.resolve(temp_path);
|
||||
// Delete if previous download failed
|
||||
if (fs.existsSync(temp_path)) {
|
||||
fs.unlink(temp_path, (err) => {
|
||||
if (err) throw err;
|
||||
});
|
||||
}
|
||||
const fileStream = fs.createWriteStream(destination, { flags: 'wx' });
|
||||
// @ts-ignore
|
||||
await finished(res.body.pipe(fileStream));
|
||||
|
||||
if (category === 'character') {
|
||||
const fileContent = fs.readFileSync(temp_path);
|
||||
const contentType = mime.lookup(temp_path) || 'application/octet-stream';
|
||||
response.setHeader('Content-Type', contentType);
|
||||
response.send(fileContent);
|
||||
fs.unlinkSync(temp_path);
|
||||
return;
|
||||
}
|
||||
|
||||
// Move into asset place
|
||||
console.info('Download finished, moving file from', temp_path, 'to', file_path);
|
||||
fs.copyFileSync(temp_path, file_path);
|
||||
fs.unlinkSync(temp_path);
|
||||
response.sendStatus(200);
|
||||
}
|
||||
catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* HTTP POST handler function to delete the requested asset.
|
||||
*
|
||||
* @param {Object} request - HTTP Request object, expects a category and a filename
|
||||
* @param {Object} response - HTTP Response only gives stats.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
router.post('/delete', async (request, response) => {
|
||||
const inputCategory = request.body.category;
|
||||
|
||||
// Check category
|
||||
let category = null;
|
||||
for (let i of VALID_CATEGORIES)
|
||||
if (i == inputCategory)
|
||||
category = i;
|
||||
|
||||
if (category === null) {
|
||||
console.error('Bad request: unsupported asset category.');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
// Validate filename
|
||||
const validation = validateAssetFileName(request.body.filename);
|
||||
if (validation.error)
|
||||
return response.status(400).send(validation.message);
|
||||
|
||||
const file_path = path.join(request.user.directories.assets, category, request.body.filename);
|
||||
console.info('Request received to delete', category, file_path);
|
||||
|
||||
try {
|
||||
// Delete if previous download failed
|
||||
if (fs.existsSync(file_path)) {
|
||||
fs.unlink(file_path, (err) => {
|
||||
if (err) throw err;
|
||||
});
|
||||
console.info('Asset deleted.');
|
||||
}
|
||||
else {
|
||||
console.error('Asset not found.');
|
||||
response.sendStatus(400);
|
||||
}
|
||||
// Move into asset place
|
||||
response.sendStatus(200);
|
||||
}
|
||||
catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
///////////////////////////////
|
||||
/**
|
||||
* HTTP POST handler function to retrieve a character background music list.
|
||||
*
|
||||
* @param {Object} request - HTTP Request object, expects a character name in the query.
|
||||
* @param {Object} response - HTTP Response object will contain a list of audio file path.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
router.post('/character', async (request, response) => {
|
||||
if (request.query.name === undefined) return response.sendStatus(400);
|
||||
|
||||
// For backwards compatibility, don't reject invalid character names, just sanitize them
|
||||
const name = sanitize(request.query.name.toString());
|
||||
const inputCategory = request.query.category;
|
||||
|
||||
// Check category
|
||||
let category = null;
|
||||
for (let i of VALID_CATEGORIES)
|
||||
if (i == inputCategory)
|
||||
category = i;
|
||||
|
||||
if (category === null) {
|
||||
console.error('Bad request: unsupported asset category.');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const folderPath = path.join(request.user.directories.characters, name, category);
|
||||
|
||||
let output = [];
|
||||
try {
|
||||
if (fs.existsSync(folderPath) && fs.statSync(folderPath).isDirectory()) {
|
||||
|
||||
// Live2d assets
|
||||
if (category == 'live2d') {
|
||||
const folders = fs.readdirSync(folderPath, { withFileTypes: true });
|
||||
for (const folderInfo of folders) {
|
||||
if (!folderInfo.isDirectory()) continue;
|
||||
|
||||
const modelFolder = folderInfo.name;
|
||||
const live2dModelPath = path.join(folderPath, modelFolder);
|
||||
for (let file of fs.readdirSync(live2dModelPath)) {
|
||||
//console.debug("Character live2d model found:", file)
|
||||
if (file.includes('model') && file.endsWith('.json'))
|
||||
output.push(path.join('characters', name, category, modelFolder, file));
|
||||
}
|
||||
}
|
||||
return response.send(output);
|
||||
}
|
||||
|
||||
// Other assets
|
||||
const files = fs.readdirSync(folderPath)
|
||||
.filter(filename => {
|
||||
return filename != '.placeholder';
|
||||
});
|
||||
|
||||
for (let i of files)
|
||||
output.push(`/characters/${name}/${category}/${i}`);
|
||||
}
|
||||
return response.send(output);
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
65
web-app/src/endpoints/avatars.js
Normal file
65
web-app/src/endpoints/avatars.js
Normal file
@@ -0,0 +1,65 @@
|
||||
import path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
|
||||
import express from 'express';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import { Jimp } from '../jimp.js';
|
||||
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||
|
||||
import { getImages, tryParse } from '../util.js';
|
||||
import { getFileNameValidationFunction } from '../middleware/validateFileName.js';
|
||||
import { applyAvatarCropResize } from './characters.js';
|
||||
import { invalidateThumbnail } from './thumbnails.js';
|
||||
import cacheBuster from '../middleware/cacheBuster.js';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/get', function (request, response) {
|
||||
const images = getImages(request.user.directories.avatars);
|
||||
response.send(images);
|
||||
});
|
||||
|
||||
router.post('/delete', getFileNameValidationFunction('avatar'), function (request, response) {
|
||||
if (!request.body) return response.sendStatus(400);
|
||||
|
||||
if (request.body.avatar !== sanitize(request.body.avatar)) {
|
||||
console.error('Malicious avatar name prevented');
|
||||
return response.sendStatus(403);
|
||||
}
|
||||
|
||||
const fileName = path.join(request.user.directories.avatars, sanitize(request.body.avatar));
|
||||
|
||||
if (fs.existsSync(fileName)) {
|
||||
fs.unlinkSync(fileName);
|
||||
invalidateThumbnail(request.user.directories, 'persona', sanitize(request.body.avatar));
|
||||
return response.send({ result: 'ok' });
|
||||
}
|
||||
|
||||
return response.sendStatus(404);
|
||||
});
|
||||
|
||||
router.post('/upload', getFileNameValidationFunction('overwrite_name'), async (request, response) => {
|
||||
if (!request.file) return response.sendStatus(400);
|
||||
|
||||
try {
|
||||
const pathToUpload = path.join(request.file.destination, request.file.filename);
|
||||
const crop = tryParse(request.query.crop);
|
||||
const rawImg = await Jimp.read(pathToUpload);
|
||||
const image = await applyAvatarCropResize(rawImg, crop);
|
||||
|
||||
// Remove previous thumbnail and bust cache if overwriting
|
||||
if (request.body.overwrite_name) {
|
||||
invalidateThumbnail(request.user.directories, 'persona', sanitize(request.body.overwrite_name));
|
||||
cacheBuster.bust(request, response);
|
||||
}
|
||||
|
||||
const filename = sanitize(request.body.overwrite_name || `${Date.now()}.png`);
|
||||
const pathToNewFile = path.join(request.user.directories.avatars, filename);
|
||||
writeFileAtomicSync(pathToNewFile, image);
|
||||
fs.unlinkSync(pathToUpload);
|
||||
return response.send({ path: filename });
|
||||
} catch (err) {
|
||||
console.error('Error uploading user avatar:', err);
|
||||
return response.status(400).send('Is not a valid image');
|
||||
}
|
||||
});
|
||||
88
web-app/src/endpoints/azure.js
Normal file
88
web-app/src/endpoints/azure.js
Normal file
@@ -0,0 +1,88 @@
|
||||
import fetch from 'node-fetch';
|
||||
import { Router } from 'express';
|
||||
|
||||
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||
|
||||
export const router = Router();
|
||||
|
||||
router.post('/list', async (req, res) => {
|
||||
try {
|
||||
const key = readSecret(req.user.directories, SECRET_KEYS.AZURE_TTS);
|
||||
|
||||
if (!key) {
|
||||
console.warn('Azure TTS API Key not set');
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
const region = req.body.region;
|
||||
|
||||
if (!region) {
|
||||
console.warn('Azure TTS region not set');
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
const url = `https://${region}.tts.speech.microsoft.com/cognitiveservices/voices/list`;
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Ocp-Apim-Subscription-Key': key,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
console.warn('Azure Request failed', response.status, response.statusText);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
|
||||
const voices = await response.json();
|
||||
return res.json(voices);
|
||||
} catch (error) {
|
||||
console.error('Azure Request failed', error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/generate', async (req, res) => {
|
||||
try {
|
||||
const key = readSecret(req.user.directories, SECRET_KEYS.AZURE_TTS);
|
||||
|
||||
if (!key) {
|
||||
console.warn('Azure TTS API Key not set');
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
const { text, voice, region } = req.body;
|
||||
if (!text || !voice || !region) {
|
||||
console.warn('Missing required parameters');
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
const url = `https://${region}.tts.speech.microsoft.com/cognitiveservices/v1`;
|
||||
const lang = String(voice).split('-').slice(0, 2).join('-');
|
||||
const escapedText = String(text).replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>');
|
||||
const ssml = `<speak version='1.0' xmlns='http://www.w3.org/2001/10/synthesis' xml:lang='${lang}'><voice xml:lang='${lang}' name='${voice}'>${escapedText}</voice></speak>`;
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Ocp-Apim-Subscription-Key': key,
|
||||
'Content-Type': 'application/ssml+xml',
|
||||
'X-Microsoft-OutputFormat': 'webm-24khz-16bit-mono-opus',
|
||||
},
|
||||
body: ssml,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
console.warn('Azure Request failed', response.status, response.statusText);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
|
||||
const audio = Buffer.from(await response.arrayBuffer());
|
||||
res.set('Content-Type', 'audio/ogg');
|
||||
return res.send(audio);
|
||||
} catch (error) {
|
||||
console.error('Azure Request failed', error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
2622
web-app/src/endpoints/backends/chat-completions.js
Normal file
2622
web-app/src/endpoints/backends/chat-completions.js
Normal file
File diff suppressed because it is too large
Load Diff
281
web-app/src/endpoints/backends/kobold.js
Normal file
281
web-app/src/endpoints/backends/kobold.js
Normal file
@@ -0,0 +1,281 @@
|
||||
import fs from 'node:fs';
|
||||
import express from 'express';
|
||||
import fetch from 'node-fetch';
|
||||
|
||||
import { forwardFetchResponse, delay } from '../../util.js';
|
||||
import { getOverrideHeaders, setAdditionalHeaders, setAdditionalHeadersByType } from '../../additional-headers.js';
|
||||
import { TEXTGEN_TYPES } from '../../constants.js';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/generate', async function (request, response_generate) {
|
||||
if (!request.body) return response_generate.sendStatus(400);
|
||||
|
||||
if (request.body.api_server.indexOf('localhost') != -1) {
|
||||
request.body.api_server = request.body.api_server.replace('localhost', '127.0.0.1');
|
||||
}
|
||||
|
||||
const request_prompt = request.body.prompt;
|
||||
const controller = new AbortController();
|
||||
request.socket.removeAllListeners('close');
|
||||
request.socket.on('close', async function () {
|
||||
if (request.body.can_abort && !response_generate.writableEnded) {
|
||||
try {
|
||||
console.info('Aborting Kobold generation...');
|
||||
// send abort signal to koboldcpp
|
||||
const abortResponse = await fetch(`${request.body.api_server}/extra/abort`, {
|
||||
method: 'POST',
|
||||
});
|
||||
|
||||
if (!abortResponse.ok) {
|
||||
console.error('Error sending abort request to Kobold:', abortResponse.status);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
controller.abort();
|
||||
});
|
||||
|
||||
let this_settings = {
|
||||
prompt: request_prompt,
|
||||
use_story: false,
|
||||
use_memory: false,
|
||||
use_authors_note: false,
|
||||
use_world_info: false,
|
||||
max_context_length: request.body.max_context_length,
|
||||
max_length: request.body.max_length,
|
||||
};
|
||||
|
||||
if (!request.body.gui_settings) {
|
||||
this_settings = {
|
||||
prompt: request_prompt,
|
||||
use_story: false,
|
||||
use_memory: false,
|
||||
use_authors_note: false,
|
||||
use_world_info: false,
|
||||
max_context_length: request.body.max_context_length,
|
||||
max_length: request.body.max_length,
|
||||
rep_pen: request.body.rep_pen,
|
||||
rep_pen_range: request.body.rep_pen_range,
|
||||
rep_pen_slope: request.body.rep_pen_slope,
|
||||
temperature: request.body.temperature,
|
||||
tfs: request.body.tfs,
|
||||
top_a: request.body.top_a,
|
||||
top_k: request.body.top_k,
|
||||
top_p: request.body.top_p,
|
||||
min_p: request.body.min_p,
|
||||
typical: request.body.typical,
|
||||
sampler_order: request.body.sampler_order,
|
||||
singleline: !!request.body.singleline,
|
||||
use_default_badwordsids: request.body.use_default_badwordsids,
|
||||
mirostat: request.body.mirostat,
|
||||
mirostat_eta: request.body.mirostat_eta,
|
||||
mirostat_tau: request.body.mirostat_tau,
|
||||
grammar: request.body.grammar,
|
||||
sampler_seed: request.body.sampler_seed,
|
||||
};
|
||||
if (request.body.stop_sequence) {
|
||||
this_settings['stop_sequence'] = request.body.stop_sequence;
|
||||
}
|
||||
}
|
||||
|
||||
console.debug(this_settings);
|
||||
const args = {
|
||||
body: JSON.stringify(this_settings),
|
||||
headers: Object.assign(
|
||||
{ 'Content-Type': 'application/json' },
|
||||
getOverrideHeaders((new URL(request.body.api_server))?.host),
|
||||
),
|
||||
signal: controller.signal,
|
||||
};
|
||||
|
||||
const MAX_RETRIES = 50;
|
||||
const delayAmount = 2500;
|
||||
for (let i = 0; i < MAX_RETRIES; i++) {
|
||||
try {
|
||||
const url = request.body.streaming ? `${request.body.api_server}/extra/generate/stream` : `${request.body.api_server}/v1/generate`;
|
||||
const response = await fetch(url, { method: 'POST', ...args });
|
||||
|
||||
if (request.body.streaming) {
|
||||
// Pipe remote SSE stream to Express response
|
||||
forwardFetchResponse(response, response_generate);
|
||||
return;
|
||||
} else {
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
console.warn(`Kobold returned error: ${response.status} ${response.statusText} ${errorText}`);
|
||||
|
||||
try {
|
||||
const errorJson = JSON.parse(errorText);
|
||||
const message = errorJson?.detail?.msg || errorText;
|
||||
return response_generate.status(400).send({ error: { message } });
|
||||
} catch {
|
||||
return response_generate.status(400).send({ error: { message: errorText } });
|
||||
}
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
console.debug('Endpoint response:', data);
|
||||
return response_generate.send(data);
|
||||
}
|
||||
} catch (error) {
|
||||
// response
|
||||
switch (error?.status) {
|
||||
case 403:
|
||||
case 503: // retry in case of temporary service issue, possibly caused by a queue failure?
|
||||
console.warn(`KoboldAI is busy. Retry attempt ${i + 1} of ${MAX_RETRIES}...`);
|
||||
await delay(delayAmount);
|
||||
break;
|
||||
default:
|
||||
if ('status' in error) {
|
||||
console.error('Status Code from Kobold:', error.status);
|
||||
}
|
||||
return response_generate.send({ error: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.error('Max retries exceeded. Giving up.');
|
||||
return response_generate.send({ error: true });
|
||||
});
|
||||
|
||||
router.post('/status', async function (request, response) {
|
||||
if (!request.body) return response.sendStatus(400);
|
||||
let api_server = request.body.api_server;
|
||||
if (api_server.indexOf('localhost') != -1) {
|
||||
api_server = api_server.replace('localhost', '127.0.0.1');
|
||||
}
|
||||
|
||||
const args = {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
};
|
||||
|
||||
setAdditionalHeaders(request, args, api_server);
|
||||
|
||||
const result = {};
|
||||
|
||||
/** @type {any} */
|
||||
const [koboldUnitedResponse, koboldExtraResponse, koboldModelResponse] = await Promise.all([
|
||||
// We catch errors both from the response not having a successful HTTP status and from JSON parsing failing
|
||||
|
||||
// Kobold United API version
|
||||
fetch(`${api_server}/v1/info/version`).then(response => {
|
||||
if (!response.ok) throw new Error(`Kobold API error: ${response.status, response.statusText}`);
|
||||
return response.json();
|
||||
}).catch(() => ({ result: '0.0.0' })),
|
||||
|
||||
// KoboldCpp version
|
||||
fetch(`${api_server}/extra/version`).then(response => {
|
||||
if (!response.ok) throw new Error(`Kobold API error: ${response.status, response.statusText}`);
|
||||
return response.json();
|
||||
}).catch(() => ({ version: '0.0' })),
|
||||
|
||||
// Current model
|
||||
fetch(`${api_server}/v1/model`).then(response => {
|
||||
if (!response.ok) throw new Error(`Kobold API error: ${response.status, response.statusText}`);
|
||||
return response.json();
|
||||
}).catch(() => null),
|
||||
]);
|
||||
|
||||
result.koboldUnitedVersion = koboldUnitedResponse.result;
|
||||
result.koboldCppVersion = koboldExtraResponse.result;
|
||||
result.model = !koboldModelResponse || koboldModelResponse.result === 'ReadOnly' ?
|
||||
'no_connection' :
|
||||
koboldModelResponse.result;
|
||||
|
||||
response.send(result);
|
||||
});
|
||||
|
||||
router.post('/transcribe-audio', async function (request, response) {
|
||||
try {
|
||||
const server = request.body.server;
|
||||
|
||||
if (!server) {
|
||||
console.error('Server is not set');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
if (!request.file) {
|
||||
console.error('No audio file found');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.debug('Transcribing audio with KoboldCpp', server);
|
||||
|
||||
const fileBase64 = fs.readFileSync(request.file.path).toString('base64');
|
||||
fs.unlinkSync(request.file.path);
|
||||
|
||||
const headers = {};
|
||||
setAdditionalHeadersByType(headers, TEXTGEN_TYPES.KOBOLDCPP, server, request.user.directories);
|
||||
|
||||
const url = new URL(server);
|
||||
url.pathname = '/api/extra/transcribe';
|
||||
|
||||
const result = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
...headers,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
prompt: '',
|
||||
audio_data: fileBase64,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const text = await result.text();
|
||||
console.error('KoboldCpp request failed', result.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
console.debug('KoboldCpp transcription response', data);
|
||||
return response.json(data);
|
||||
} catch (error) {
|
||||
console.error('KoboldCpp transcription failed', error);
|
||||
response.status(500).send('Internal server error');
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/embed', async function (request, response) {
|
||||
try {
|
||||
const { server, items } = request.body;
|
||||
|
||||
if (!server) {
|
||||
console.warn('KoboldCpp URL is not set');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const headers = {};
|
||||
setAdditionalHeadersByType(headers, TEXTGEN_TYPES.KOBOLDCPP, server, request.user.directories);
|
||||
|
||||
const embeddingsUrl = new URL(server);
|
||||
embeddingsUrl.pathname = '/api/extra/embeddings';
|
||||
|
||||
const embeddingsResult = await fetch(embeddingsUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
...headers,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
input: items,
|
||||
}),
|
||||
});
|
||||
|
||||
/** @type {any} */
|
||||
const data = await embeddingsResult.json();
|
||||
|
||||
if (!Array.isArray(data?.data)) {
|
||||
console.warn('KoboldCpp API response was not an array');
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
const model = data.model || 'unknown';
|
||||
const embeddings = data.data.map(x => Array.isArray(x) ? x[0] : x).sort((a, b) => a.index - b.index).map(x => x.embedding);
|
||||
return response.json({ model, embeddings });
|
||||
} catch (error) {
|
||||
console.error('KoboldCpp embedding failed', error);
|
||||
response.status(500).send('Internal server error');
|
||||
}
|
||||
});
|
||||
643
web-app/src/endpoints/backends/text-completions.js
Normal file
643
web-app/src/endpoints/backends/text-completions.js
Normal file
@@ -0,0 +1,643 @@
|
||||
import { Readable } from 'node:stream';
|
||||
import fetch from 'node-fetch';
|
||||
import express from 'express';
|
||||
import _ from 'lodash';
|
||||
|
||||
import {
|
||||
TEXTGEN_TYPES,
|
||||
TOGETHERAI_KEYS,
|
||||
OLLAMA_KEYS,
|
||||
INFERMATICAI_KEYS,
|
||||
OPENROUTER_KEYS,
|
||||
VLLM_KEYS,
|
||||
FEATHERLESS_KEYS,
|
||||
OPENAI_KEYS,
|
||||
} from '../../constants.js';
|
||||
import { forwardFetchResponse, trimV1, getConfigValue } from '../../util.js';
|
||||
import { setAdditionalHeaders } from '../../additional-headers.js';
|
||||
import { createHash } from 'node:crypto';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
/**
|
||||
* Special boy's steaming routine. Wrap this abomination into proper SSE stream.
|
||||
* @param {import('node-fetch').Response} jsonStream JSON stream
|
||||
* @param {import('express').Request} request Express request
|
||||
* @param {import('express').Response} response Express response
|
||||
* @returns {Promise<any>} Nothing valuable
|
||||
*/
|
||||
async function parseOllamaStream(jsonStream, request, response) {
|
||||
try {
|
||||
if (!jsonStream.body) {
|
||||
throw new Error('No body in the response');
|
||||
}
|
||||
|
||||
let partialData = '';
|
||||
jsonStream.body.on('data', (data) => {
|
||||
const chunk = data.toString();
|
||||
partialData += chunk;
|
||||
while (true) {
|
||||
let json;
|
||||
try {
|
||||
json = JSON.parse(partialData);
|
||||
} catch (e) {
|
||||
break;
|
||||
}
|
||||
const text = json.response || '';
|
||||
const thinking = json.thinking || '';
|
||||
const chunk = { choices: [{ text, thinking }] };
|
||||
response.write(`data: ${JSON.stringify(chunk)}\n\n`);
|
||||
partialData = '';
|
||||
}
|
||||
});
|
||||
|
||||
request.socket.on('close', function () {
|
||||
if (jsonStream.body instanceof Readable) jsonStream.body.destroy();
|
||||
response.end();
|
||||
});
|
||||
|
||||
jsonStream.body.on('end', () => {
|
||||
console.info('Streaming request finished');
|
||||
response.write('data: [DONE]\n\n');
|
||||
response.end();
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error forwarding streaming response:', error);
|
||||
if (!response.headersSent) {
|
||||
return response.status(500).send({ error: true });
|
||||
} else {
|
||||
return response.end();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Abort KoboldCpp generation request.
|
||||
* @param {import('express').Request} request the generation request
|
||||
* @param {string} url Server base URL
|
||||
* @returns {Promise<void>} Promise resolving when we are done
|
||||
*/
|
||||
async function abortKoboldCppRequest(request, url) {
|
||||
try {
|
||||
console.info('Aborting Kobold generation...');
|
||||
const args = {
|
||||
method: 'POST',
|
||||
headers: {},
|
||||
};
|
||||
|
||||
setAdditionalHeaders(request, args, url);
|
||||
const abortResponse = await fetch(`${url}/api/extra/abort`, args);
|
||||
|
||||
if (!abortResponse.ok) {
|
||||
console.error('Error sending abort request to Kobold:', abortResponse.status, abortResponse.statusText);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
//************** Ooba/OpenAI text completions API
|
||||
router.post('/status', async function (request, response) {
|
||||
if (!request.body) return response.sendStatus(400);
|
||||
|
||||
try {
|
||||
if (request.body.api_server.indexOf('localhost') !== -1) {
|
||||
request.body.api_server = request.body.api_server.replace('localhost', '127.0.0.1');
|
||||
}
|
||||
|
||||
console.debug('Trying to connect to API', request.body);
|
||||
const baseUrl = trimV1(request.body.api_server);
|
||||
|
||||
const args = {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
};
|
||||
|
||||
setAdditionalHeaders(request, args, baseUrl);
|
||||
|
||||
const apiType = request.body.api_type;
|
||||
let url = baseUrl;
|
||||
let result = '';
|
||||
|
||||
switch (apiType) {
|
||||
case TEXTGEN_TYPES.GENERIC:
|
||||
case TEXTGEN_TYPES.OOBA:
|
||||
case TEXTGEN_TYPES.VLLM:
|
||||
case TEXTGEN_TYPES.APHRODITE:
|
||||
case TEXTGEN_TYPES.KOBOLDCPP:
|
||||
case TEXTGEN_TYPES.LLAMACPP:
|
||||
case TEXTGEN_TYPES.INFERMATICAI:
|
||||
case TEXTGEN_TYPES.OPENROUTER:
|
||||
case TEXTGEN_TYPES.FEATHERLESS:
|
||||
url += '/v1/models';
|
||||
break;
|
||||
case TEXTGEN_TYPES.DREAMGEN:
|
||||
url += '/api/openai/v1/models';
|
||||
break;
|
||||
case TEXTGEN_TYPES.MANCER:
|
||||
url += '/oai/v1/models';
|
||||
break;
|
||||
case TEXTGEN_TYPES.TABBY:
|
||||
url += '/v1/model/list';
|
||||
break;
|
||||
case TEXTGEN_TYPES.TOGETHERAI:
|
||||
url += '/api/models?&info';
|
||||
break;
|
||||
case TEXTGEN_TYPES.OLLAMA:
|
||||
url += '/api/tags';
|
||||
break;
|
||||
case TEXTGEN_TYPES.HUGGINGFACE:
|
||||
url += '/info';
|
||||
break;
|
||||
}
|
||||
|
||||
const modelsReply = await fetch(url, args);
|
||||
const isPossiblyLmStudio = modelsReply.headers.get('x-powered-by') === 'Express';
|
||||
|
||||
if (!modelsReply.ok) {
|
||||
console.error('Models endpoint is offline.');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
let data = await modelsReply.json();
|
||||
|
||||
// Rewrap to OAI-like response
|
||||
if (apiType === TEXTGEN_TYPES.TOGETHERAI && Array.isArray(data)) {
|
||||
data = { data: data.map(x => ({ id: x.name, ...x })) };
|
||||
}
|
||||
|
||||
if (apiType === TEXTGEN_TYPES.OLLAMA && Array.isArray(data.models)) {
|
||||
data = { data: data.models.map(x => ({ id: x.name, ...x })) };
|
||||
}
|
||||
|
||||
if (apiType === TEXTGEN_TYPES.HUGGINGFACE) {
|
||||
data = { data: [] };
|
||||
}
|
||||
|
||||
if (!Array.isArray(data.data)) {
|
||||
console.error('Models response is not an array.');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const modelIds = data.data.map(x => x.id);
|
||||
console.info('Models available:', modelIds);
|
||||
|
||||
// Set result to the first model ID
|
||||
result = modelIds[0] || 'Valid';
|
||||
|
||||
if (apiType === TEXTGEN_TYPES.OOBA && !isPossiblyLmStudio) {
|
||||
try {
|
||||
const modelInfoUrl = baseUrl + '/v1/internal/model/info';
|
||||
const modelInfoReply = await fetch(modelInfoUrl, args);
|
||||
|
||||
if (modelInfoReply.ok) {
|
||||
/** @type {any} */
|
||||
const modelInfo = await modelInfoReply.json();
|
||||
console.debug('Ooba model info:', modelInfo);
|
||||
|
||||
const modelName = modelInfo?.model_name;
|
||||
result = modelName || result;
|
||||
response.setHeader('x-supports-tokenization', 'true');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to get Ooba model info: ${error}`);
|
||||
}
|
||||
} else if (apiType === TEXTGEN_TYPES.TABBY) {
|
||||
try {
|
||||
const modelInfoUrl = baseUrl + '/v1/model';
|
||||
const modelInfoReply = await fetch(modelInfoUrl, args);
|
||||
|
||||
if (modelInfoReply.ok) {
|
||||
/** @type {any} */
|
||||
const modelInfo = await modelInfoReply.json();
|
||||
console.debug('Tabby model info:', modelInfo);
|
||||
|
||||
const modelName = modelInfo?.id;
|
||||
result = modelName || result;
|
||||
} else {
|
||||
// TabbyAPI returns an error 400 if a model isn't loaded
|
||||
|
||||
result = 'None';
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to get TabbyAPI model info: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
return response.send({ result, data: data.data });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/props', async function (request, response) {
|
||||
if (!request.body.api_server) return response.sendStatus(400);
|
||||
|
||||
try {
|
||||
const baseUrl = trimV1(request.body.api_server);
|
||||
const args = {
|
||||
headers: {},
|
||||
};
|
||||
|
||||
setAdditionalHeaders(request, args, baseUrl);
|
||||
|
||||
const apiType = request.body.api_type;
|
||||
let propsUrl = baseUrl + '/props';
|
||||
if (apiType === TEXTGEN_TYPES.LLAMACPP && request.body.model) {
|
||||
propsUrl += `?model=${encodeURIComponent(request.body.model)}`;
|
||||
console.debug(`Querying llama-server props with model parameter: ${request.body.model}`);
|
||||
}
|
||||
const propsReply = await fetch(propsUrl, args);
|
||||
|
||||
if (!propsReply.ok) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const props = await propsReply.json();
|
||||
// TEMPORARY: llama.cpp's /props endpoint has a bug which replaces the last newline with a \0
|
||||
if (apiType === TEXTGEN_TYPES.LLAMACPP && props['chat_template'] && props['chat_template'].endsWith('\u0000')) {
|
||||
props['chat_template'] = props['chat_template'].slice(0, -1) + '\n';
|
||||
}
|
||||
props['chat_template_hash'] = createHash('sha256').update(props['chat_template']).digest('hex');
|
||||
console.debug(`Model properties: ${JSON.stringify(props)}`);
|
||||
return response.send(props);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/generate', async function (request, response) {
|
||||
if (!request.body) return response.sendStatus(400);
|
||||
|
||||
try {
|
||||
if (request.body.api_server.indexOf('localhost') !== -1) {
|
||||
request.body.api_server = request.body.api_server.replace('localhost', '127.0.0.1');
|
||||
}
|
||||
|
||||
const apiType = request.body.api_type;
|
||||
const baseUrl = request.body.api_server;
|
||||
console.debug(request.body);
|
||||
|
||||
const controller = new AbortController();
|
||||
request.socket.removeAllListeners('close');
|
||||
request.socket.on('close', async function () {
|
||||
if (request.body.api_type === TEXTGEN_TYPES.KOBOLDCPP && !response.writableEnded) {
|
||||
await abortKoboldCppRequest(request, trimV1(baseUrl));
|
||||
}
|
||||
|
||||
controller.abort();
|
||||
});
|
||||
|
||||
let url = trimV1(baseUrl);
|
||||
|
||||
switch (request.body.api_type) {
|
||||
case TEXTGEN_TYPES.GENERIC:
|
||||
case TEXTGEN_TYPES.VLLM:
|
||||
case TEXTGEN_TYPES.FEATHERLESS:
|
||||
case TEXTGEN_TYPES.APHRODITE:
|
||||
case TEXTGEN_TYPES.OOBA:
|
||||
case TEXTGEN_TYPES.TABBY:
|
||||
case TEXTGEN_TYPES.KOBOLDCPP:
|
||||
case TEXTGEN_TYPES.TOGETHERAI:
|
||||
case TEXTGEN_TYPES.INFERMATICAI:
|
||||
case TEXTGEN_TYPES.HUGGINGFACE:
|
||||
url += '/v1/completions';
|
||||
break;
|
||||
case TEXTGEN_TYPES.DREAMGEN:
|
||||
url += '/api/openai/v1/completions';
|
||||
break;
|
||||
case TEXTGEN_TYPES.MANCER:
|
||||
url += '/oai/v1/completions';
|
||||
break;
|
||||
case TEXTGEN_TYPES.LLAMACPP:
|
||||
url += '/completion';
|
||||
break;
|
||||
case TEXTGEN_TYPES.OLLAMA:
|
||||
url += '/api/generate';
|
||||
break;
|
||||
case TEXTGEN_TYPES.OPENROUTER:
|
||||
url += '/v1/chat/completions';
|
||||
break;
|
||||
}
|
||||
|
||||
const args = {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(request.body),
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
signal: controller.signal,
|
||||
timeout: 0,
|
||||
};
|
||||
|
||||
setAdditionalHeaders(request, args, baseUrl);
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.TOGETHERAI) {
|
||||
request.body = _.pickBy(request.body, (_, key) => TOGETHERAI_KEYS.includes(key));
|
||||
args.body = JSON.stringify(request.body);
|
||||
}
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.INFERMATICAI) {
|
||||
request.body = _.pickBy(request.body, (_, key) => INFERMATICAI_KEYS.includes(key));
|
||||
args.body = JSON.stringify(request.body);
|
||||
}
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.FEATHERLESS) {
|
||||
request.body = _.pickBy(request.body, (_, key) => FEATHERLESS_KEYS.includes(key));
|
||||
args.body = JSON.stringify(request.body);
|
||||
}
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.DREAMGEN) {
|
||||
args.body = JSON.stringify(request.body);
|
||||
}
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.GENERIC) {
|
||||
request.body = _.pickBy(request.body, (_, key) => OPENAI_KEYS.includes(key));
|
||||
if (Array.isArray(request.body.stop)) { request.body.stop = request.body.stop.slice(0, 4); }
|
||||
args.body = JSON.stringify(request.body);
|
||||
}
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.OPENROUTER) {
|
||||
if (Array.isArray(request.body.provider) && request.body.provider.length > 0) {
|
||||
request.body.provider = {
|
||||
allow_fallbacks: request.body.allow_fallbacks ?? true,
|
||||
order: request.body.provider,
|
||||
};
|
||||
} else {
|
||||
delete request.body.provider;
|
||||
}
|
||||
request.body = _.pickBy(request.body, (_, key) => OPENROUTER_KEYS.includes(key));
|
||||
args.body = JSON.stringify(request.body);
|
||||
}
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.VLLM) {
|
||||
request.body = _.pickBy(request.body, (_, key) => VLLM_KEYS.includes(key));
|
||||
args.body = JSON.stringify(request.body);
|
||||
}
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.OLLAMA) {
|
||||
const keepAlive = Number(getConfigValue('ollama.keepAlive', -1, 'number'));
|
||||
const numBatch = Number(getConfigValue('ollama.batchSize', -1, 'number'));
|
||||
if (numBatch > 0) {
|
||||
request.body['num_batch'] = numBatch;
|
||||
}
|
||||
args.body = JSON.stringify({
|
||||
model: request.body.model,
|
||||
prompt: request.body.prompt,
|
||||
stream: request.body.stream ?? false,
|
||||
keep_alive: keepAlive,
|
||||
raw: true,
|
||||
options: _.pickBy(request.body, (_, key) => OLLAMA_KEYS.includes(key)),
|
||||
});
|
||||
}
|
||||
|
||||
if (request.body.api_type === TEXTGEN_TYPES.OLLAMA && request.body.stream) {
|
||||
const stream = await fetch(url, args);
|
||||
parseOllamaStream(stream, request, response);
|
||||
} else if (request.body.stream) {
|
||||
const completionsStream = await fetch(url, args);
|
||||
// Pipe remote SSE stream to Express response
|
||||
forwardFetchResponse(completionsStream, response);
|
||||
}
|
||||
else {
|
||||
const completionsReply = await fetch(url, args);
|
||||
|
||||
if (completionsReply.ok) {
|
||||
/** @type {any} */
|
||||
const data = await completionsReply.json();
|
||||
console.debug('Endpoint response:', data);
|
||||
|
||||
// Map InfermaticAI response to OAI completions format
|
||||
if (apiType === TEXTGEN_TYPES.INFERMATICAI) {
|
||||
data['choices'] = (data?.choices || []).map(choice => ({ text: choice?.message?.content || choice.text, logprobs: choice?.logprobs, index: choice?.index }));
|
||||
}
|
||||
|
||||
return response.send(data);
|
||||
} else {
|
||||
const text = await completionsReply.text();
|
||||
const errorBody = { error: true, status: completionsReply.status, response: text };
|
||||
|
||||
return !response.headersSent
|
||||
? response.send(errorBody)
|
||||
: response.end();
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const status = error?.status ?? error?.code ?? 'UNKNOWN';
|
||||
const text = error?.error ?? error?.statusText ?? error?.message ?? 'Unknown error on /generate endpoint';
|
||||
let value = { error: true, status: status, response: text };
|
||||
console.error('Endpoint error:', error);
|
||||
|
||||
return !response.headersSent
|
||||
? response.send(value)
|
||||
: response.end();
|
||||
}
|
||||
});
|
||||
|
||||
const ollama = express.Router();
|
||||
|
||||
ollama.post('/download', async function (request, response) {
|
||||
try {
|
||||
if (!request.body.name || !request.body.api_server) return response.sendStatus(400);
|
||||
|
||||
const name = request.body.name;
|
||||
const url = String(request.body.api_server).replace(/\/$/, '');
|
||||
console.debug('Pulling Ollama model:', name);
|
||||
|
||||
const fetchResponse = await fetch(`${url}/api/pull`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
name: name,
|
||||
stream: false,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!fetchResponse.ok) {
|
||||
console.error('Download error:', fetchResponse.status, fetchResponse.statusText);
|
||||
return response.status(500).send({ error: true });
|
||||
}
|
||||
|
||||
console.debug('Ollama pull response:', await fetchResponse.json());
|
||||
return response.send({ ok: true });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
ollama.post('/caption-image', async function (request, response) {
|
||||
try {
|
||||
if (!request.body.server_url || !request.body.model) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.debug('Ollama caption request:', request.body);
|
||||
const baseUrl = trimV1(request.body.server_url);
|
||||
|
||||
const fetchResponse = await fetch(`${baseUrl}/api/generate`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
model: request.body.model,
|
||||
prompt: request.body.prompt,
|
||||
images: [request.body.image],
|
||||
stream: false,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!fetchResponse.ok) {
|
||||
const errorText = await fetchResponse.text();
|
||||
console.error('Ollama caption error:', fetchResponse.status, fetchResponse.statusText, errorText);
|
||||
return response.status(500).send({ error: true });
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await fetchResponse.json();
|
||||
console.debug('Ollama caption response:', data);
|
||||
|
||||
const caption = data?.response || '';
|
||||
|
||||
if (!caption) {
|
||||
console.error('Ollama caption is empty.');
|
||||
return response.status(500).send({ error: true });
|
||||
}
|
||||
|
||||
return response.send({ caption });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
const llamacpp = express.Router();
|
||||
|
||||
llamacpp.post('/props', async function (request, response) {
|
||||
try {
|
||||
if (!request.body.server_url) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.debug('LlamaCpp props request:', request.body);
|
||||
const baseUrl = trimV1(request.body.server_url);
|
||||
|
||||
const fetchResponse = await fetch(`${baseUrl}/props`, {
|
||||
method: 'GET',
|
||||
});
|
||||
|
||||
if (!fetchResponse.ok) {
|
||||
console.error('LlamaCpp props error:', fetchResponse.status, fetchResponse.statusText);
|
||||
return response.status(500).send({ error: true });
|
||||
}
|
||||
|
||||
const data = await fetchResponse.json();
|
||||
console.debug('LlamaCpp props response:', data);
|
||||
|
||||
return response.send(data);
|
||||
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
llamacpp.post('/slots', async function (request, response) {
|
||||
try {
|
||||
if (!request.body.server_url) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
if (!/^(erase|info|restore|save)$/.test(request.body.action)) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.debug('LlamaCpp slots request:', request.body);
|
||||
const baseUrl = trimV1(request.body.server_url);
|
||||
|
||||
let fetchResponse;
|
||||
if (request.body.action === 'info') {
|
||||
fetchResponse = await fetch(`${baseUrl}/slots`, {
|
||||
method: 'GET',
|
||||
});
|
||||
} else {
|
||||
if (!/^\d+$/.test(request.body.id_slot)) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
if (request.body.action !== 'erase' && !request.body.filename) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
fetchResponse = await fetch(`${baseUrl}/slots/${request.body.id_slot}?action=${request.body.action}`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
filename: request.body.action !== 'erase' ? `${request.body.filename}` : undefined,
|
||||
}),
|
||||
});
|
||||
}
|
||||
|
||||
if (!fetchResponse.ok) {
|
||||
console.error('LlamaCpp slots error:', fetchResponse.status, fetchResponse.statusText);
|
||||
return response.status(500).send({ error: true });
|
||||
}
|
||||
|
||||
const data = await fetchResponse.json();
|
||||
console.debug('LlamaCpp slots response:', data);
|
||||
|
||||
return response.send(data);
|
||||
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
const tabby = express.Router();
|
||||
|
||||
tabby.post('/download', async function (request, response) {
|
||||
try {
|
||||
const baseUrl = String(request.body.api_server).replace(/\/$/, '');
|
||||
|
||||
const args = {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(request.body),
|
||||
timeout: 0,
|
||||
};
|
||||
|
||||
setAdditionalHeaders(request, args, baseUrl);
|
||||
|
||||
// Check key permissions
|
||||
const permissionResponse = await fetch(`${baseUrl}/v1/auth/permission`, {
|
||||
headers: args.headers,
|
||||
});
|
||||
|
||||
if (permissionResponse.ok) {
|
||||
/** @type {any} */
|
||||
const permissionJson = await permissionResponse.json();
|
||||
|
||||
if (permissionJson['permission'] !== 'admin') {
|
||||
return response.status(403).send({ error: true });
|
||||
}
|
||||
} else {
|
||||
console.error('API Permission error:', permissionResponse.status, permissionResponse.statusText);
|
||||
return response.status(500).send({ error: true });
|
||||
}
|
||||
|
||||
const fetchResponse = await fetch(`${baseUrl}/v1/download`, args);
|
||||
|
||||
if (!fetchResponse.ok) {
|
||||
console.error('Download error:', fetchResponse.status, fetchResponse.statusText);
|
||||
return response.status(500).send({ error: true });
|
||||
}
|
||||
|
||||
return response.send({ ok: true });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.use('/ollama', ollama);
|
||||
router.use('/llamacpp', llamacpp);
|
||||
router.use('/tabby', tabby);
|
||||
76
web-app/src/endpoints/backgrounds.js
Normal file
76
web-app/src/endpoints/backgrounds.js
Normal file
@@ -0,0 +1,76 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
import express from 'express';
|
||||
import sanitize from 'sanitize-filename';
|
||||
|
||||
import { dimensions, invalidateThumbnail } from './thumbnails.js';
|
||||
import { getImages } from '../util.js';
|
||||
import { getFileNameValidationFunction } from '../middleware/validateFileName.js';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/all', function (request, response) {
|
||||
const images = getImages(request.user.directories.backgrounds);
|
||||
const config = { width: dimensions.bg[0], height: dimensions.bg[1] };
|
||||
response.json({ images, config });
|
||||
});
|
||||
|
||||
router.post('/delete', getFileNameValidationFunction('bg'), function (request, response) {
|
||||
if (!request.body) return response.sendStatus(400);
|
||||
|
||||
if (request.body.bg !== sanitize(request.body.bg)) {
|
||||
console.error('Malicious bg name prevented');
|
||||
return response.sendStatus(403);
|
||||
}
|
||||
|
||||
const fileName = path.join(request.user.directories.backgrounds, sanitize(request.body.bg));
|
||||
|
||||
if (!fs.existsSync(fileName)) {
|
||||
console.error('BG file not found');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
fs.unlinkSync(fileName);
|
||||
invalidateThumbnail(request.user.directories, 'bg', request.body.bg);
|
||||
return response.send('ok');
|
||||
});
|
||||
|
||||
router.post('/rename', function (request, response) {
|
||||
if (!request.body) return response.sendStatus(400);
|
||||
|
||||
const oldFileName = path.join(request.user.directories.backgrounds, sanitize(request.body.old_bg));
|
||||
const newFileName = path.join(request.user.directories.backgrounds, sanitize(request.body.new_bg));
|
||||
|
||||
if (!fs.existsSync(oldFileName)) {
|
||||
console.error('BG file not found');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
if (fs.existsSync(newFileName)) {
|
||||
console.error('New BG file already exists');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
fs.copyFileSync(oldFileName, newFileName);
|
||||
fs.unlinkSync(oldFileName);
|
||||
invalidateThumbnail(request.user.directories, 'bg', request.body.old_bg);
|
||||
return response.send('ok');
|
||||
});
|
||||
|
||||
router.post('/upload', function (request, response) {
|
||||
if (!request.body || !request.file) return response.sendStatus(400);
|
||||
|
||||
const img_path = path.join(request.file.destination, request.file.filename);
|
||||
const filename = request.file.originalname;
|
||||
|
||||
try {
|
||||
fs.copyFileSync(img_path, path.join(request.user.directories.backgrounds, filename));
|
||||
fs.unlinkSync(img_path);
|
||||
invalidateThumbnail(request.user.directories, 'bg', filename);
|
||||
response.send(filename);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
75
web-app/src/endpoints/backups.js
Normal file
75
web-app/src/endpoints/backups.js
Normal file
@@ -0,0 +1,75 @@
|
||||
import express from 'express';
|
||||
import fs, { promises as fsPromises } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import { CHAT_BACKUPS_PREFIX, getChatInfo } from './chats.js';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/chat/get', async (request, response) => {
|
||||
try {
|
||||
const backupModels = [];
|
||||
const backupFiles = await fsPromises
|
||||
.readdir(request.user.directories.backups, { withFileTypes: true })
|
||||
.then(d => d .filter(d => d.isFile() && path.extname(d.name) === '.jsonl' && d.name.startsWith(CHAT_BACKUPS_PREFIX)).map(d => d.name));
|
||||
|
||||
for (const name of backupFiles) {
|
||||
const filePath = path.join(request.user.directories.backups, name);
|
||||
const info = await getChatInfo(filePath);
|
||||
if (!info || !info.file_name) {
|
||||
continue;
|
||||
}
|
||||
backupModels.push(info);
|
||||
}
|
||||
|
||||
return response.json(backupModels);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/chat/delete', async (request, response) => {
|
||||
try {
|
||||
const { name } = request.body;
|
||||
const filePath = path.join(request.user.directories.backups, sanitize(name));
|
||||
|
||||
if (!path.parse(filePath).base.startsWith(CHAT_BACKUPS_PREFIX)) {
|
||||
console.warn('Attempt to delete non-chat backup file:', name);
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return response.sendStatus(404);
|
||||
}
|
||||
|
||||
await fsPromises.unlink(filePath);
|
||||
return response.sendStatus(200);
|
||||
}
|
||||
catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/chat/download', async (request, response) => {
|
||||
try {
|
||||
const { name } = request.body;
|
||||
const filePath = path.join(request.user.directories.backups, sanitize(name));
|
||||
|
||||
if (!path.parse(filePath).base.startsWith(CHAT_BACKUPS_PREFIX)) {
|
||||
console.warn('Attempt to download non-chat backup file:', name);
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return response.sendStatus(404);
|
||||
}
|
||||
|
||||
return response.download(filePath);
|
||||
}
|
||||
catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
29
web-app/src/endpoints/caption.js
Normal file
29
web-app/src/endpoints/caption.js
Normal file
@@ -0,0 +1,29 @@
|
||||
import express from 'express';
|
||||
import { getPipeline, getRawImage } from '../transformers.js';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
const TASK = 'image-to-text';
|
||||
|
||||
router.post('/', async (req, res) => {
|
||||
try {
|
||||
const { image } = req.body;
|
||||
|
||||
const rawImage = await getRawImage(image);
|
||||
|
||||
if (!rawImage) {
|
||||
console.warn('Failed to parse captioned image');
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
const pipe = await getPipeline(TASK);
|
||||
const result = await pipe(rawImage);
|
||||
const text = result[0].generated_text;
|
||||
console.info('Image caption:', text);
|
||||
|
||||
return res.json({ caption: text });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
1547
web-app/src/endpoints/characters.js
Normal file
1547
web-app/src/endpoints/characters.js
Normal file
File diff suppressed because it is too large
Load Diff
1020
web-app/src/endpoints/chats.js
Normal file
1020
web-app/src/endpoints/chats.js
Normal file
File diff suppressed because it is too large
Load Diff
55
web-app/src/endpoints/classify.js
Normal file
55
web-app/src/endpoints/classify.js
Normal file
@@ -0,0 +1,55 @@
|
||||
import express from 'express';
|
||||
|
||||
import { getPipeline } from '../transformers.js';
|
||||
|
||||
const TASK = 'text-classification';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
/**
|
||||
* @type {Map<string, object>} Cache for classification results
|
||||
*/
|
||||
const cacheObject = new Map();
|
||||
|
||||
router.post('/labels', async (req, res) => {
|
||||
try {
|
||||
const pipe = await getPipeline(TASK);
|
||||
const result = Object.keys(pipe.model.config.label2id);
|
||||
return res.json({ labels: result });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/', async (req, res) => {
|
||||
try {
|
||||
const { text } = req.body;
|
||||
|
||||
/**
|
||||
* Get classification result for a given text
|
||||
* @param {string} text Text to classify
|
||||
* @returns {Promise<object>} Classification result
|
||||
*/
|
||||
async function getResult(text) {
|
||||
if (cacheObject.has(text)) {
|
||||
return cacheObject.get(text);
|
||||
} else {
|
||||
const pipe = await getPipeline(TASK);
|
||||
const result = await pipe(text, { topk: 5 });
|
||||
result.sort((a, b) => b.score - a.score);
|
||||
cacheObject.set(text, result);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
console.debug('Classify input:', text);
|
||||
const result = await getResult(text);
|
||||
console.debug('Classify output:', result);
|
||||
|
||||
return res.json({ classification: result });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
1045
web-app/src/endpoints/content-manager.js
Normal file
1045
web-app/src/endpoints/content-manager.js
Normal file
File diff suppressed because it is too large
Load Diff
816
web-app/src/endpoints/data-maid.js
Normal file
816
web-app/src/endpoints/data-maid.js
Normal file
@@ -0,0 +1,816 @@
|
||||
import crypto from 'node:crypto';
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import express from 'express';
|
||||
import mime from 'mime-types';
|
||||
import { getSettingsBackupFilePrefix } from './settings.js';
|
||||
import { CHAT_BACKUPS_PREFIX } from './chats.js';
|
||||
import { isPathUnderParent, tryParse } from '../util.js';
|
||||
import { SETTINGS_FILE } from '../constants.js';
|
||||
|
||||
const sha256 = str => crypto.createHash('sha256').update(str).digest('hex');
|
||||
|
||||
/**
|
||||
* @typedef {object} DataMaidRawReport
|
||||
* @property {string[]} images - List of loose user images
|
||||
* @property {string[]} files - List of loose user files
|
||||
* @property {string[]} chats - List of loose character chats
|
||||
* @property {string[]} groupChats - List of loose group chats
|
||||
* @property {string[]} avatarThumbnails - List of loose avatar thumbnails
|
||||
* @property {string[]} backgroundThumbnails - List of loose background thumbnails
|
||||
* @property {string[]} personaThumbnails - List of loose persona thumbnails
|
||||
* @property {string[]} chatBackups - List of chat backups
|
||||
* @property {string[]} settingsBackups - List of settings backups
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} DataMaidSanitizedRecord - The entry excluding the sensitive paths.
|
||||
* @property {string} name - The name of the file.
|
||||
* @property {string} hash - The SHA-256 hash of the file path.
|
||||
* @property {string} [parent] - The name of the parent directory, if applicable.
|
||||
* @property {number} [size] - The size of the file in bytes, if available.
|
||||
* @property {number} [mtime] - The last modification time of the file, if available.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} DataMaidSanitizedReport - The report containing loose user data.
|
||||
* @property {DataMaidSanitizedRecord[]} images - List of sanitized loose user images
|
||||
* @property {DataMaidSanitizedRecord[]} files - List of sanitized loose user files
|
||||
* @property {DataMaidSanitizedRecord[]} chats - List of sanitized loose character chats
|
||||
* @property {DataMaidSanitizedRecord[]} groupChats - List of sanitized loose group chats
|
||||
* @property {DataMaidSanitizedRecord[]} avatarThumbnails - List of sanitized loose avatar thumbnails
|
||||
* @property {DataMaidSanitizedRecord[]} backgroundThumbnails - List of sanitized loose background thumbnails
|
||||
* @property {DataMaidSanitizedRecord[]} personaThumbnails - List of sanitized loose persona thumbnails
|
||||
* @property {DataMaidSanitizedRecord[]} chatBackups - List of sanitized chat backups
|
||||
* @property {DataMaidSanitizedRecord[]} settingsBackups - List of sanitized settings backups
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} DataMaidMessage - The chat message object.
|
||||
* @property {DataMaidMessageExtra} [extra] - The extra data object.
|
||||
* @property {DataMaidChatMetadata} [chat_metadata] - The chat metadata object.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} DataMaidFile - The file object.
|
||||
* @property {string} url - The file URL
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} DataMaidMedia - The media object.
|
||||
* @property {string} url - The media URL
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} DataMaidChatMetadata - The chat metadata object.
|
||||
* @property {DataMaidFile[]} [attachments] - The array of attachments, if any.
|
||||
* @property {string[]} [chat_backgrounds] - The array of chat background image links, if any.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} DataMaidMessageExtra - The extra data object.
|
||||
* @property {string} [image] - The link to the image, if any - DEPRECATED, use `media` instead.
|
||||
* @property {string} [video] - The link to the video, if any - DEPRECATED, use `media` instead.
|
||||
* @property {string[]} [image_swipes] - The links to the image swipes, if any - DEPRECATED, use `media` instead.
|
||||
* @property {DataMaidMedia[]} [media] - The links to the media, if any.
|
||||
* @property {DataMaidFile} [file] - The file object, if any - DEPRECATED, use `files` instead.
|
||||
* @property {DataMaidFile[]} [files] - The array of file objects, if any.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} DataMaidTokenEntry
|
||||
* @property {string} handle - The user's handle or identifier.
|
||||
* @property {{path: string, hash: string}[]} paths - The list of file paths and their hashes that can be cleaned up.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Service for detecting and managing loose user data files.
|
||||
* Helps identify orphaned files that are no longer referenced by the application.
|
||||
*/
|
||||
export class DataMaidService {
|
||||
/**
|
||||
* @type {Map<string, DataMaidTokenEntry>} Map clean-up tokens to user IDs
|
||||
*/
|
||||
static TOKENS = new Map();
|
||||
|
||||
/**
|
||||
* Creates a new DataMaidService instance for a specific user.
|
||||
* @param {string} handle - The user's handle.
|
||||
* @param {import('../users.js').UserDirectoryList} directories - List of user directories to scan for loose data.
|
||||
*/
|
||||
constructor(handle, directories) {
|
||||
this.handle = handle;
|
||||
this.directories = directories;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a report of loose user data.
|
||||
* @returns {Promise<DataMaidRawReport>} A report containing lists of loose user data.
|
||||
*/
|
||||
async generateReport() {
|
||||
/** @type {DataMaidRawReport} */
|
||||
const report = {
|
||||
images: await this.#collectImages(),
|
||||
files: await this.#collectFiles(),
|
||||
chats: await this.#collectChats(),
|
||||
groupChats: await this.#collectGroupChats(),
|
||||
avatarThumbnails: await this.#collectAvatarThumbnails(),
|
||||
backgroundThumbnails: await this.#collectBackgroundThumbnails(),
|
||||
personaThumbnails: await this.#collectPersonaThumbnails(),
|
||||
chatBackups: await this.#collectChatBackups(),
|
||||
settingsBackups: await this.#collectSettingsBackups(),
|
||||
};
|
||||
|
||||
return report;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Sanitizes a record by hashing the file name and removing sensitive information.
|
||||
* Additionally, adds metadata like size and modification time.
|
||||
* @param {string} name The file or directory name to sanitize.
|
||||
* @param {boolean} withParent If the model should include the parent directory name.
|
||||
* @returns {Promise<DataMaidSanitizedRecord>} A sanitized record with the file name, hash, parent directory name, size, and modification time.
|
||||
*/
|
||||
async #sanitizeRecord(name, withParent) {
|
||||
const stat = fs.existsSync(name) ? await fs.promises.stat(name) : null;
|
||||
return {
|
||||
name: path.basename(name),
|
||||
hash: sha256(name),
|
||||
parent: withParent ? path.basename(path.dirname(name)) : void 0,
|
||||
size: stat?.size,
|
||||
mtime: stat?.mtimeMs,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitizes the report by hashing the file paths and removing sensitive information.
|
||||
* @param {DataMaidRawReport} report - The raw report containing loose user data.
|
||||
* @returns {Promise<DataMaidSanitizedReport>} A sanitized report with sensitive paths removed.
|
||||
*/
|
||||
async sanitizeReport(report) {
|
||||
const sanitizedReport = {
|
||||
images: await Promise.all(report.images.map(i => this.#sanitizeRecord(i, true))),
|
||||
files: await Promise.all(report.files.map(i => this.#sanitizeRecord(i, false))),
|
||||
chats: await Promise.all(report.chats.map(i => this.#sanitizeRecord(i, true))),
|
||||
groupChats: await Promise.all(report.groupChats.map(i => this.#sanitizeRecord(i, false))),
|
||||
avatarThumbnails: await Promise.all(report.avatarThumbnails.map(i => this.#sanitizeRecord(i, false))),
|
||||
backgroundThumbnails: await Promise.all(report.backgroundThumbnails.map(i => this.#sanitizeRecord(i, false))),
|
||||
personaThumbnails: await Promise.all(report.personaThumbnails.map(i => this.#sanitizeRecord(i, false))),
|
||||
chatBackups: await Promise.all(report.chatBackups.map(i => this.#sanitizeRecord(i, false))),
|
||||
settingsBackups: await Promise.all(report.settingsBackups.map(i => this.#sanitizeRecord(i, false))),
|
||||
};
|
||||
|
||||
return sanitizedReport;
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects loose user images from the provided directories.
|
||||
* Images are considered loose if they exist in the user images directory
|
||||
* but are not referenced in any chat messages.
|
||||
* @returns {Promise<string[]>} List of paths to loose user images
|
||||
*/
|
||||
async #collectImages() {
|
||||
const result = [];
|
||||
|
||||
try {
|
||||
const messages = await this.#parseAllChats(x => !!x?.extra?.image || !!x?.extra?.video || Array.isArray(x?.extra?.image_swipes) || Array.isArray(x?.extra?.media));
|
||||
const knownImages = new Set();
|
||||
for (const message of messages) {
|
||||
if (message?.extra?.image) {
|
||||
knownImages.add(message.extra.image);
|
||||
}
|
||||
if (message?.extra?.video) {
|
||||
knownImages.add(message.extra.video);
|
||||
}
|
||||
if (Array.isArray(message?.extra?.image_swipes)) {
|
||||
for (const swipe of message.extra.image_swipes) {
|
||||
knownImages.add(swipe);
|
||||
}
|
||||
}
|
||||
if (Array.isArray(message?.extra?.media)) {
|
||||
for (const media of message.extra.media) {
|
||||
if (media?.url) {
|
||||
knownImages.add(media.url);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const metadata = await this.#parseAllMetadata(x => Array.isArray(x?.chat_backgrounds) && x.chat_backgrounds.length > 0);
|
||||
for (const meta of metadata) {
|
||||
if (Array.isArray(meta?.chat_backgrounds)) {
|
||||
for (const background of meta.chat_backgrounds) {
|
||||
if (background) {
|
||||
knownImages.add(background);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const knownImageFullPaths = new Set();
|
||||
knownImages.forEach(image => {
|
||||
if (image.startsWith('http') || image.startsWith('data:')) {
|
||||
return; // Skip URLs and data URIs
|
||||
}
|
||||
knownImageFullPaths.add(path.normalize(path.join(this.directories.root, image)));
|
||||
});
|
||||
const images = await fs.promises.readdir(this.directories.userImages, { withFileTypes: true });
|
||||
for (const dirent of images) {
|
||||
const direntPath = path.join(dirent.parentPath, dirent.name);
|
||||
if (dirent.isFile() && !knownImageFullPaths.has(direntPath)) {
|
||||
result.push(direntPath);
|
||||
}
|
||||
if (dirent.isDirectory()) {
|
||||
const subdirFiles = await fs.promises.readdir(direntPath, { withFileTypes: true });
|
||||
for (const file of subdirFiles) {
|
||||
const subdirFilePath = path.join(direntPath, file.name);
|
||||
if (file.isFile() && !knownImageFullPaths.has(subdirFilePath)) {
|
||||
result.push(subdirFilePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Data Maid] Error collecting user images:', error);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects loose user files from the provided directories.
|
||||
* Files are considered loose if they exist in the files directory
|
||||
* but are not referenced in chat messages, metadata, or settings.
|
||||
* @returns {Promise<string[]>} List of paths to loose user files
|
||||
*/
|
||||
async #collectFiles() {
|
||||
const result = [];
|
||||
|
||||
try {
|
||||
const messages = await this.#parseAllChats(x => !!x?.extra?.file?.url || (Array.isArray(x?.extra?.files) && x.extra.files.length > 0));
|
||||
const knownFiles = new Set();
|
||||
for (const message of messages) {
|
||||
if (message?.extra?.file?.url) {
|
||||
knownFiles.add(message.extra.file.url);
|
||||
}
|
||||
if (Array.isArray(message?.extra?.files)) {
|
||||
for (const file of message.extra.files) {
|
||||
if (file?.url) {
|
||||
knownFiles.add(file.url);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const metadata = await this.#parseAllMetadata(x => Array.isArray(x?.attachments) && x.attachments.length > 0);
|
||||
for (const meta of metadata) {
|
||||
if (Array.isArray(meta?.attachments)) {
|
||||
for (const attachment of meta.attachments) {
|
||||
if (attachment?.url) {
|
||||
knownFiles.add(attachment.url);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const pathToSettings = path.join(this.directories.root, SETTINGS_FILE);
|
||||
if (fs.existsSync(pathToSettings)) {
|
||||
try {
|
||||
const settingsContent = await fs.promises.readFile(pathToSettings, 'utf-8');
|
||||
const settings = tryParse(settingsContent);
|
||||
if (Array.isArray(settings?.extension_settings?.attachments)) {
|
||||
for (const file of settings.extension_settings.attachments) {
|
||||
if (file?.url) {
|
||||
knownFiles.add(file.url);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (typeof settings?.extension_settings?.character_attachments === 'object') {
|
||||
for (const files of Object.values(settings.extension_settings.character_attachments)) {
|
||||
if (!Array.isArray(files)) {
|
||||
continue;
|
||||
}
|
||||
for (const file of files) {
|
||||
if (file?.url) {
|
||||
knownFiles.add(file.url);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Data Maid] Error reading settings file:', error);
|
||||
}
|
||||
}
|
||||
const knownFileFullPaths = new Set();
|
||||
knownFiles.forEach(file => {
|
||||
knownFileFullPaths.add(path.normalize(path.join(this.directories.root, file)));
|
||||
});
|
||||
const files = await fs.promises.readdir(this.directories.files, { withFileTypes: true });
|
||||
for (const file of files) {
|
||||
const filePath = path.join(this.directories.files, file.name);
|
||||
if (file.isFile() && !knownFileFullPaths.has(filePath)) {
|
||||
result.push(filePath);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Data Maid] Error collecting user files:', error);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects loose character chats from the provided directories.
|
||||
* Chat folders are considered loose if they don't have corresponding character files.
|
||||
* @returns {Promise<string[]>} List of paths to loose character chats
|
||||
*/
|
||||
async #collectChats() {
|
||||
const result = [];
|
||||
|
||||
try {
|
||||
const knownChatFolders = new Set();
|
||||
const characters = await fs.promises.readdir(this.directories.characters, { withFileTypes: true });
|
||||
for (const file of characters) {
|
||||
if (file.isFile() && path.parse(file.name).ext === '.png') {
|
||||
knownChatFolders.add(file.name.replace('.png', ''));
|
||||
}
|
||||
}
|
||||
const chatFolders = await fs.promises.readdir(this.directories.chats, { withFileTypes: true });
|
||||
for (const folder of chatFolders) {
|
||||
if (folder.isDirectory() && !knownChatFolders.has(folder.name)) {
|
||||
const chatFiles = await fs.promises.readdir(path.join(this.directories.chats, folder.name), { withFileTypes: true });
|
||||
for (const file of chatFiles) {
|
||||
if (file.isFile() && path.parse(file.name).ext === '.jsonl') {
|
||||
result.push(path.join(this.directories.chats, folder.name, file.name));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Data Maid] Error collecting character chats:', error);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects loose group chats from the provided directories.
|
||||
* Group chat files are considered loose if they're not referenced by any group definition.
|
||||
* @returns {Promise<string[]>} List of paths to loose group chats
|
||||
*/
|
||||
async #collectGroupChats() {
|
||||
const result = [];
|
||||
|
||||
try {
|
||||
const groups = await fs.promises.readdir(this.directories.groups, { withFileTypes: true });
|
||||
const knownGroupChats = new Set();
|
||||
for (const file of groups) {
|
||||
if (file.isFile() && path.parse(file.name).ext === '.json') {
|
||||
try {
|
||||
const pathToFile = path.join(this.directories.groups, file.name);
|
||||
const fileContent = await fs.promises.readFile(pathToFile, 'utf-8');
|
||||
const groupData = tryParse(fileContent);
|
||||
if (groupData?.chat_id) {
|
||||
knownGroupChats.add(groupData.chat_id);
|
||||
}
|
||||
if (Array.isArray(groupData?.chats)) {
|
||||
for (const chat of groupData.chats) {
|
||||
knownGroupChats.add(chat);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`[Data Maid] Error parsing group chat file ${file.name}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
const groupChats = await fs.promises.readdir(this.directories.groupChats, { withFileTypes: true });
|
||||
for (const file of groupChats) {
|
||||
if (file.isFile() && path.parse(file.name).ext === '.jsonl') {
|
||||
if (!knownGroupChats.has(path.parse(file.name).name)) {
|
||||
result.push(path.join(this.directories.groupChats, file.name));
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Data Maid] Error collecting group chats:', error);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects loose avatar thumbnails from the provided directories.
|
||||
* @returns {Promise<string[]>} List of paths to loose avatar thumbnails
|
||||
*/
|
||||
async #collectAvatarThumbnails() {
|
||||
const result = [];
|
||||
|
||||
try {
|
||||
const knownAvatars = new Set();
|
||||
const avatars = await fs.promises.readdir(this.directories.characters, { withFileTypes: true });
|
||||
for (const file of avatars) {
|
||||
if (file.isFile()) {
|
||||
knownAvatars.add(file.name);
|
||||
}
|
||||
}
|
||||
const avatarThumbnails = await fs.promises.readdir(this.directories.thumbnailsAvatar, { withFileTypes: true });
|
||||
for (const file of avatarThumbnails) {
|
||||
if (file.isFile() && !knownAvatars.has(file.name)) {
|
||||
result.push(path.join(this.directories.thumbnailsAvatar, file.name));
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Data Maid] Error collecting avatar thumbnails:', error);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects loose background thumbnails from the provided directories.
|
||||
* @returns {Promise<string[]>} List of paths to loose background thumbnails
|
||||
*/
|
||||
async #collectBackgroundThumbnails() {
|
||||
const result = [];
|
||||
|
||||
try {
|
||||
const knownBackgrounds = new Set();
|
||||
const backgrounds = await fs.promises.readdir(this.directories.backgrounds, { withFileTypes: true });
|
||||
for (const file of backgrounds) {
|
||||
if (file.isFile()) {
|
||||
knownBackgrounds.add(file.name);
|
||||
}
|
||||
}
|
||||
const backgroundThumbnails = await fs.promises.readdir(this.directories.thumbnailsBg, { withFileTypes: true });
|
||||
for (const file of backgroundThumbnails) {
|
||||
if (file.isFile() && !knownBackgrounds.has(file.name)) {
|
||||
result.push(path.join(this.directories.thumbnailsBg, file.name));
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Data Maid] Error collecting background thumbnails:', error);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects loose persona thumbnails from the provided directories.
|
||||
* @returns {Promise<string[]>} List of paths to loose persona thumbnails
|
||||
*/
|
||||
async #collectPersonaThumbnails() {
|
||||
const result = [];
|
||||
|
||||
try {
|
||||
const knownPersonas = new Set();
|
||||
const personas = await fs.promises.readdir(this.directories.avatars, { withFileTypes: true });
|
||||
for (const file of personas) {
|
||||
if (file.isFile()) {
|
||||
knownPersonas.add(file.name);
|
||||
}
|
||||
}
|
||||
const personaThumbnails = await fs.promises.readdir(this.directories.thumbnailsPersona, { withFileTypes: true });
|
||||
for (const file of personaThumbnails) {
|
||||
if (file.isFile() && !knownPersonas.has(file.name)) {
|
||||
result.push(path.join(this.directories.thumbnailsPersona, file.name));
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Data Maid] Error collecting persona thumbnails:', error);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects chat backups from the provided directories.
|
||||
* @returns {Promise<string[]>} List of paths to chat backups
|
||||
*/
|
||||
async #collectChatBackups() {
|
||||
const result = [];
|
||||
|
||||
try {
|
||||
const prefix = CHAT_BACKUPS_PREFIX;
|
||||
const backups = await fs.promises.readdir(this.directories.backups, { withFileTypes: true });
|
||||
for (const file of backups) {
|
||||
if (file.isFile() && file.name.startsWith(prefix)) {
|
||||
result.push(path.join(this.directories.backups, file.name));
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Data Maid] Error collecting chat backups:', error);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects settings backups from the provided directories.
|
||||
* @returns {Promise<string[]>} List of paths to settings backups
|
||||
*/
|
||||
async #collectSettingsBackups() {
|
||||
const result = [];
|
||||
|
||||
try {
|
||||
const prefix = getSettingsBackupFilePrefix(this.handle);
|
||||
const backups = await fs.promises.readdir(this.directories.backups, { withFileTypes: true });
|
||||
for (const file of backups) {
|
||||
if (file.isFile() && file.name.startsWith(prefix)) {
|
||||
result.push(path.join(this.directories.backups, file.name));
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Data Maid] Error collecting settings backups:', error);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses all chat files and returns an array of chat messages.
|
||||
* Searches both individual character chats and group chats.
|
||||
* @param {function(DataMaidMessage): boolean} filterFn - Filter function to apply to each message.
|
||||
* @returns {Promise<DataMaidMessage[]>} Array of chat messages
|
||||
*/
|
||||
async #parseAllChats(filterFn) {
|
||||
try {
|
||||
const allChats = [];
|
||||
|
||||
const groupChats = await fs.promises.readdir(this.directories.groupChats, { withFileTypes: true });
|
||||
for (const file of groupChats) {
|
||||
if (file.isFile() && path.parse(file.name).ext === '.jsonl') {
|
||||
const chatMessages = await this.#parseChatFile(path.join(this.directories.groupChats, file.name));
|
||||
allChats.push(...chatMessages.filter(filterFn));
|
||||
}
|
||||
}
|
||||
|
||||
const chatDirectories = await fs.promises.readdir(this.directories.chats, { withFileTypes: true });
|
||||
for (const directory of chatDirectories) {
|
||||
if (directory.isDirectory()) {
|
||||
const chatFiles = await fs.promises.readdir(path.join(this.directories.chats, directory.name), { withFileTypes: true });
|
||||
for (const file of chatFiles) {
|
||||
if (file.isFile() && path.parse(file.name).ext === '.jsonl') {
|
||||
const chatMessages = await this.#parseChatFile(path.join(this.directories.chats, directory.name, file.name));
|
||||
allChats.push(...chatMessages.filter(filterFn));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return allChats;
|
||||
} catch (error) {
|
||||
console.error('[Data Maid] Error parsing chats:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses all metadata from chat files and group definitions.
|
||||
* Extracts metadata from both active and historical chat data.
|
||||
* @param {function(DataMaidChatMetadata): boolean} filterFn - Filter function to apply to each metadata entry.
|
||||
* @returns {Promise<DataMaidChatMetadata[]>} Parsed chat metadata as an array.
|
||||
*/
|
||||
async #parseAllMetadata(filterFn) {
|
||||
try {
|
||||
const allMetadata = [];
|
||||
|
||||
const groups = await fs.promises.readdir(this.directories.groups, { withFileTypes: true });
|
||||
for (const file of groups) {
|
||||
if (file.isFile() && path.parse(file.name).ext === '.json') {
|
||||
try {
|
||||
const pathToFile = path.join(this.directories.groups, file.name);
|
||||
const fileContent = await fs.promises.readFile(pathToFile, 'utf-8');
|
||||
const groupData = tryParse(fileContent);
|
||||
if (groupData?.chat_metadata && filterFn(groupData.chat_metadata)) {
|
||||
console.warn('Found group chat metadata in group definition - this is deprecated behavior.');
|
||||
allMetadata.push(groupData.chat_metadata);
|
||||
}
|
||||
if (groupData?.past_metadata) {
|
||||
console.warn('Found group past chat metadata in group definition - this is deprecated behavior.');
|
||||
allMetadata.push(...Object.values(groupData.past_metadata).filter(filterFn));
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`[Data Maid] Error parsing group chat file ${file.name}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const groupChats = await fs.promises.readdir(this.directories.groupChats, { withFileTypes: true });
|
||||
for (const file of groupChats) {
|
||||
if (file.isFile() && path.parse(file.name).ext === '.jsonl') {
|
||||
const chatMessages = await this.#parseChatFile(path.join(this.directories.groupChats, file.name));
|
||||
const chatMetadata = chatMessages?.[0]?.chat_metadata;
|
||||
if (chatMetadata && filterFn(chatMetadata)) {
|
||||
allMetadata.push(chatMetadata);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const chatDirectories = await fs.promises.readdir(this.directories.chats, { withFileTypes: true });
|
||||
for (const directory of chatDirectories) {
|
||||
if (directory.isDirectory()) {
|
||||
const chatFiles = await fs.promises.readdir(path.join(this.directories.chats, directory.name), { withFileTypes: true });
|
||||
for (const file of chatFiles) {
|
||||
if (file.isFile() && path.parse(file.name).ext === '.jsonl') {
|
||||
const chatMessages = await this.#parseChatFile(path.join(this.directories.chats, directory.name, file.name));
|
||||
const chatMetadata = chatMessages?.[0]?.chat_metadata;
|
||||
if (chatMetadata && filterFn(chatMetadata)) {
|
||||
allMetadata.push(chatMetadata);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return allMetadata;
|
||||
} catch (error) {
|
||||
console.error('[Data Maid] Error parsing chats:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a single chat file and returns an array of chat messages.
|
||||
* Each line in the JSONL file represents one message.
|
||||
* @param {string} filePath Path to the chat file to parse.
|
||||
* @returns {Promise<DataMaidMessage[]>} Parsed chat messages as an array.
|
||||
*/
|
||||
async #parseChatFile(filePath) {
|
||||
try {
|
||||
const content = await fs.promises.readFile(filePath, 'utf-8');
|
||||
const chatData = content.split('\n').map(tryParse).filter(Boolean);
|
||||
return chatData;
|
||||
} catch (error) {
|
||||
console.error(`[Data Maid] Error reading chat file ${filePath}:`, error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a unique token for the user to clean up their data.
|
||||
* Replaces any existing token for the same user.
|
||||
* @param {string} handle - The user's handle or identifier.
|
||||
* @param {DataMaidRawReport} report - The report containing loose user data.
|
||||
* @returns {string} A unique token.
|
||||
*/
|
||||
static generateToken(handle, report) {
|
||||
// Remove any existing token for this user
|
||||
for (const [token, entry] of this.TOKENS.entries()) {
|
||||
if (entry.handle === handle) {
|
||||
this.TOKENS.delete(token);
|
||||
}
|
||||
}
|
||||
|
||||
const token = crypto.randomBytes(32).toString('hex');
|
||||
const tokenEntry = {
|
||||
handle,
|
||||
paths: Object.values(report).filter(v => Array.isArray(v)).flat().map(x => ({ path: x, hash: sha256(x) })),
|
||||
};
|
||||
this.TOKENS.set(token, tokenEntry);
|
||||
return token;
|
||||
}
|
||||
}
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/report', async (req, res) => {
|
||||
try {
|
||||
if (!req.user || !req.user.directories) {
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
const dataMaid = new DataMaidService(req.user.profile.handle, req.user.directories);
|
||||
const rawReport = await dataMaid.generateReport();
|
||||
|
||||
const report = await dataMaid.sanitizeReport(rawReport);
|
||||
const token = DataMaidService.generateToken(req.user.profile.handle, rawReport);
|
||||
|
||||
return res.json({ report, token });
|
||||
} catch (error) {
|
||||
console.error('[Data Maid] Error generating data maid report:', error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/finalize', async (req, res) => {
|
||||
try {
|
||||
if (!req.user || !req.user.directories) {
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
if (!req.body.token) {
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
const token = req.body.token.toString();
|
||||
if (!DataMaidService.TOKENS.has(token)) {
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
const tokenEntry = DataMaidService.TOKENS.get(token);
|
||||
if (!tokenEntry || tokenEntry.handle !== req.user.profile.handle) {
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
// Remove the token after finalization
|
||||
DataMaidService.TOKENS.delete(token);
|
||||
return res.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error('[Data Maid] Error finalizing the token:', error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/view', async (req, res) => {
|
||||
try {
|
||||
if (!req.user || !req.user.directories) {
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
if (!req.query.token || !req.query.hash) {
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
const token = req.query.token.toString();
|
||||
const hash = req.query.hash.toString();
|
||||
|
||||
if (!DataMaidService.TOKENS.has(token)) {
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
const tokenEntry = DataMaidService.TOKENS.get(token);
|
||||
if (!tokenEntry || tokenEntry.handle !== req.user.profile.handle) {
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
const fileEntry = tokenEntry.paths.find(entry => entry.hash === hash);
|
||||
if (!fileEntry) {
|
||||
return res.sendStatus(404);
|
||||
}
|
||||
|
||||
if (!isPathUnderParent(req.user.directories.root, fileEntry.path)) {
|
||||
console.warn('[Data Maid] Attempted access to a file outside of the user directory:', fileEntry.path);
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
const pathToFile = fileEntry.path;
|
||||
const fileExists = fs.existsSync(pathToFile);
|
||||
|
||||
if (!fileExists) {
|
||||
return res.sendStatus(404);
|
||||
}
|
||||
|
||||
const fileBuffer = await fs.promises.readFile(pathToFile);
|
||||
const mimeType = mime.lookup(pathToFile) || 'text/plain';
|
||||
res.setHeader('Content-Type', mimeType);
|
||||
return res.send(fileBuffer);
|
||||
} catch (error) {
|
||||
console.error('[Data Maid] Error viewing file:', error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/delete', async (req, res) => {
|
||||
try {
|
||||
if (!req.user || !req.user.directories) {
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
const { token, hashes } = req.body;
|
||||
if (!token || !Array.isArray(hashes) || hashes.length === 0) {
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
if (!DataMaidService.TOKENS.has(token)) {
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
const tokenEntry = DataMaidService.TOKENS.get(token);
|
||||
if (!tokenEntry || tokenEntry.handle !== req.user.profile.handle) {
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
for (const hash of hashes) {
|
||||
const fileEntry = tokenEntry.paths.find(entry => entry.hash === hash);
|
||||
if (!fileEntry) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!isPathUnderParent(req.user.directories.root, fileEntry.path)) {
|
||||
console.warn('[Data Maid] Attempted deletion of a file outside of the user directory:', fileEntry.path);
|
||||
continue;
|
||||
}
|
||||
|
||||
const pathToFile = fileEntry.path;
|
||||
const fileExists = fs.existsSync(pathToFile);
|
||||
|
||||
if (!fileExists) {
|
||||
continue;
|
||||
}
|
||||
|
||||
await fs.promises.unlink(pathToFile);
|
||||
}
|
||||
|
||||
return res.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error('[Data Maid] Error deleting files:', error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
455
web-app/src/endpoints/extensions.js
Normal file
455
web-app/src/endpoints/extensions.js
Normal file
@@ -0,0 +1,455 @@
|
||||
import path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
|
||||
import express from 'express';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import { CheckRepoActions, default as simpleGit } from 'simple-git';
|
||||
|
||||
import { PUBLIC_DIRECTORIES } from '../constants.js';
|
||||
|
||||
/**
|
||||
* @type {Partial<import('simple-git').SimpleGitOptions>}
|
||||
*/
|
||||
const OPTIONS = Object.freeze({ timeout: { block: 5 * 60 * 1000 } });
|
||||
|
||||
/**
|
||||
* This function extracts the extension information from the manifest file.
|
||||
* @param {string} extensionPath - The path of the extension folder
|
||||
* @returns {Promise<Object>} - Returns the manifest data as an object
|
||||
*/
|
||||
async function getManifest(extensionPath) {
|
||||
const manifestPath = path.join(extensionPath, 'manifest.json');
|
||||
|
||||
// Check if manifest.json exists
|
||||
if (!fs.existsSync(manifestPath)) {
|
||||
throw new Error(`Manifest file not found at ${manifestPath}`);
|
||||
}
|
||||
|
||||
const manifest = JSON.parse(fs.readFileSync(manifestPath, 'utf8'));
|
||||
return manifest;
|
||||
}
|
||||
|
||||
/**
|
||||
* This function checks if the local repository is up-to-date with the remote repository.
|
||||
* @param {string} extensionPath - The path of the extension folder
|
||||
* @returns {Promise<Object>} - Returns the extension information as an object
|
||||
*/
|
||||
async function checkIfRepoIsUpToDate(extensionPath) {
|
||||
const git = simpleGit({ baseDir: extensionPath, ...OPTIONS });
|
||||
await git.fetch('origin');
|
||||
const currentBranch = await git.branch();
|
||||
const currentCommitHash = await git.revparse(['HEAD']);
|
||||
const log = await git.log({
|
||||
from: currentCommitHash,
|
||||
to: `origin/${currentBranch.current}`,
|
||||
});
|
||||
|
||||
// Fetch remote repository information
|
||||
const remotes = await git.getRemotes(true);
|
||||
if (remotes.length === 0) {
|
||||
return {
|
||||
isUpToDate: true,
|
||||
remoteUrl: '',
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
isUpToDate: log.total === 0,
|
||||
remoteUrl: remotes[0].refs.fetch, // URL of the remote repository
|
||||
};
|
||||
}
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
/**
|
||||
* HTTP POST handler function to clone a git repository from a provided URL, read the extension manifest,
|
||||
* and return extension information and path.
|
||||
*
|
||||
* @param {Object} request - HTTP Request object, expects a JSON body with a 'url' property.
|
||||
* @param {Object} response - HTTP Response object used to respond to the HTTP request.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
router.post('/install', async (request, response) => {
|
||||
if (!request.body.url) {
|
||||
return response.status(400).send('Bad Request: URL is required in the request body.');
|
||||
}
|
||||
|
||||
try {
|
||||
// No timeout for cloning, as it may take a while depending on the repo size
|
||||
const git = simpleGit();
|
||||
|
||||
// make sure the third-party directory exists
|
||||
if (!fs.existsSync(path.join(request.user.directories.extensions))) {
|
||||
fs.mkdirSync(path.join(request.user.directories.extensions));
|
||||
}
|
||||
|
||||
if (!fs.existsSync(PUBLIC_DIRECTORIES.globalExtensions)) {
|
||||
fs.mkdirSync(PUBLIC_DIRECTORIES.globalExtensions);
|
||||
}
|
||||
|
||||
const { url, global, branch } = request.body;
|
||||
|
||||
if (global && !request.user.profile.admin) {
|
||||
console.error(`User ${request.user.profile.handle} does not have permission to install global extensions.`);
|
||||
return response.status(403).send('Forbidden: No permission to install global extensions.');
|
||||
}
|
||||
|
||||
const basePath = global ? PUBLIC_DIRECTORIES.globalExtensions : request.user.directories.extensions;
|
||||
const extensionPath = path.join(basePath, sanitize(path.basename(url, '.git')));
|
||||
|
||||
if (fs.existsSync(extensionPath)) {
|
||||
return response.status(409).send(`Directory already exists at ${extensionPath}`);
|
||||
}
|
||||
|
||||
const cloneOptions = { '--depth': 1 };
|
||||
if (branch) {
|
||||
cloneOptions['--branch'] = branch;
|
||||
}
|
||||
await git.clone(url, extensionPath, cloneOptions);
|
||||
console.info(`Extension has been cloned to ${extensionPath} from ${url} at ${branch || '(default)'} branch`);
|
||||
|
||||
const { version, author, display_name } = await getManifest(extensionPath);
|
||||
|
||||
return response.send({ version, author, display_name, extensionPath });
|
||||
} catch (error) {
|
||||
console.error('Importing custom content failed', error);
|
||||
return response.status(500).send(`Server Error: ${error.message}`);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* HTTP POST handler function to pull the latest updates from a git repository
|
||||
* based on the extension name provided in the request body. It returns the latest commit hash,
|
||||
* the path of the extension, the status of the repository (whether it's up-to-date or not),
|
||||
* and the remote URL of the repository.
|
||||
*
|
||||
* @param {Object} request - HTTP Request object, expects a JSON body with an 'extensionName' property.
|
||||
* @param {Object} response - HTTP Response object used to respond to the HTTP request.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
router.post('/update', async (request, response) => {
|
||||
if (!request.body.extensionName) {
|
||||
return response.status(400).send('Bad Request: extensionName is required in the request body.');
|
||||
}
|
||||
|
||||
try {
|
||||
const { extensionName, global } = request.body;
|
||||
|
||||
if (global && !request.user.profile.admin) {
|
||||
console.error(`User ${request.user.profile.handle} does not have permission to update global extensions.`);
|
||||
return response.status(403).send('Forbidden: No permission to update global extensions.');
|
||||
}
|
||||
|
||||
const basePath = global ? PUBLIC_DIRECTORIES.globalExtensions : request.user.directories.extensions;
|
||||
const extensionPath = path.join(basePath, sanitize(extensionName));
|
||||
|
||||
if (!fs.existsSync(extensionPath)) {
|
||||
return response.status(404).send(`Directory does not exist at ${extensionPath}`);
|
||||
}
|
||||
|
||||
const { isUpToDate, remoteUrl } = await checkIfRepoIsUpToDate(extensionPath);
|
||||
const git = simpleGit({ baseDir: extensionPath, ...OPTIONS });
|
||||
const isRepo = await git.checkIsRepo(CheckRepoActions.IS_REPO_ROOT);
|
||||
if (!isRepo) {
|
||||
throw new Error(`Directory is not a Git repository at ${extensionPath}`);
|
||||
}
|
||||
const currentBranch = await git.branch();
|
||||
if (!isUpToDate) {
|
||||
await git.pull('origin', currentBranch.current);
|
||||
console.info(`Extension has been updated at ${extensionPath}`);
|
||||
} else {
|
||||
console.info(`Extension is up to date at ${extensionPath}`);
|
||||
}
|
||||
await git.fetch('origin');
|
||||
const fullCommitHash = await git.revparse(['HEAD']);
|
||||
const shortCommitHash = fullCommitHash.slice(0, 7);
|
||||
|
||||
return response.send({ shortCommitHash, extensionPath, isUpToDate, remoteUrl });
|
||||
} catch (error) {
|
||||
console.error('Updating extension failed', error);
|
||||
return response.status(500).send('Internal Server Error. Check the server logs for more details.');
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/branches', async (request, response) => {
|
||||
try {
|
||||
const { extensionName, global } = request.body;
|
||||
|
||||
if (!extensionName) {
|
||||
return response.status(400).send('Bad Request: extensionName is required in the request body.');
|
||||
}
|
||||
|
||||
if (global && !request.user.profile.admin) {
|
||||
console.error(`User ${request.user.profile.handle} does not have permission to list branches of global extensions.`);
|
||||
return response.status(403).send('Forbidden: No permission to list branches of global extensions.');
|
||||
}
|
||||
|
||||
const basePath = global ? PUBLIC_DIRECTORIES.globalExtensions : request.user.directories.extensions;
|
||||
const extensionPath = path.join(basePath, sanitize(extensionName));
|
||||
|
||||
if (!fs.existsSync(extensionPath)) {
|
||||
return response.status(404).send(`Directory does not exist at ${extensionPath}`);
|
||||
}
|
||||
|
||||
const git = simpleGit({ baseDir: extensionPath, ...OPTIONS });
|
||||
// Unshallow the repository if it is shallow
|
||||
const isShallow = await git.revparse(['--is-shallow-repository']) === 'true';
|
||||
if (isShallow) {
|
||||
console.info(`Unshallowing the repository at ${extensionPath}`);
|
||||
await git.fetch('origin', ['--unshallow']);
|
||||
}
|
||||
|
||||
// Fetch all branches
|
||||
await git.remote(['set-branches', 'origin', '*']);
|
||||
await git.fetch('origin');
|
||||
const localBranches = await git.branchLocal();
|
||||
const remoteBranches = await git.branch(['-r', '--list', 'origin/*']);
|
||||
const result = [
|
||||
...Object.values(localBranches.branches),
|
||||
...Object.values(remoteBranches.branches),
|
||||
].map(b => ({ current: b.current, commit: b.commit, name: b.name, label: b.label }));
|
||||
|
||||
return response.send(result);
|
||||
} catch (error) {
|
||||
console.error('Getting branches failed', error);
|
||||
return response.status(500).send('Internal Server Error. Check the server logs for more details.');
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/switch', async (request, response) => {
|
||||
try {
|
||||
const { extensionName, branch, global } = request.body;
|
||||
|
||||
if (!extensionName || !branch) {
|
||||
return response.status(400).send('Bad Request: extensionName and branch are required in the request body.');
|
||||
}
|
||||
|
||||
if (global && !request.user.profile.admin) {
|
||||
console.error(`User ${request.user.profile.handle} does not have permission to switch branches of global extensions.`);
|
||||
return response.status(403).send('Forbidden: No permission to switch branches of global extensions.');
|
||||
}
|
||||
|
||||
const basePath = global ? PUBLIC_DIRECTORIES.globalExtensions : request.user.directories.extensions;
|
||||
const extensionPath = path.join(basePath, sanitize(extensionName));
|
||||
|
||||
if (!fs.existsSync(extensionPath)) {
|
||||
return response.status(404).send(`Directory does not exist at ${extensionPath}`);
|
||||
}
|
||||
|
||||
const git = simpleGit({ baseDir: extensionPath, ...OPTIONS });
|
||||
const branches = await git.branchLocal();
|
||||
|
||||
if (String(branch).startsWith('origin/')) {
|
||||
const localBranch = branch.replace('origin/', '');
|
||||
if (branches.all.includes(localBranch)) {
|
||||
console.info(`Branch ${localBranch} already exists locally, checking it out`);
|
||||
await git.checkout(localBranch);
|
||||
return response.sendStatus(204);
|
||||
}
|
||||
|
||||
console.info(`Branch ${localBranch} does not exist locally, creating it from ${branch}`);
|
||||
await git.checkoutBranch(localBranch, branch);
|
||||
return response.sendStatus(204);
|
||||
}
|
||||
|
||||
if (!branches.all.includes(branch)) {
|
||||
console.error(`Branch ${branch} does not exist locally`);
|
||||
return response.status(404).send(`Branch ${branch} does not exist locally`);
|
||||
}
|
||||
|
||||
// Check if the branch is already checked out
|
||||
const currentBranch = await git.branch();
|
||||
if (currentBranch.current === branch) {
|
||||
console.info(`Branch ${branch} is already checked out`);
|
||||
return response.sendStatus(204);
|
||||
}
|
||||
|
||||
// Checkout the branch
|
||||
await git.checkout(branch);
|
||||
console.info(`Checked out branch ${branch} at ${extensionPath}`);
|
||||
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error('Switching branches failed', error);
|
||||
return response.status(500).send('Internal Server Error. Check the server logs for more details.');
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/move', async (request, response) => {
|
||||
try {
|
||||
const { extensionName, source, destination } = request.body;
|
||||
|
||||
if (!extensionName || !source || !destination) {
|
||||
return response.status(400).send('Bad Request. Not all required parameters are provided.');
|
||||
}
|
||||
|
||||
if (!request.user.profile.admin) {
|
||||
console.error(`User ${request.user.profile.handle} does not have permission to move extensions.`);
|
||||
return response.status(403).send('Forbidden: No permission to move extensions.');
|
||||
}
|
||||
|
||||
const sourceDirectory = source === 'global' ? PUBLIC_DIRECTORIES.globalExtensions : request.user.directories.extensions;
|
||||
const destinationDirectory = destination === 'global' ? PUBLIC_DIRECTORIES.globalExtensions : request.user.directories.extensions;
|
||||
const sourcePath = path.join(sourceDirectory, sanitize(extensionName));
|
||||
const destinationPath = path.join(destinationDirectory, sanitize(extensionName));
|
||||
|
||||
if (!fs.existsSync(sourcePath) || !fs.statSync(sourcePath).isDirectory()) {
|
||||
console.error(`Source directory does not exist at ${sourcePath}`);
|
||||
return response.status(404).send('Source directory does not exist.');
|
||||
}
|
||||
|
||||
if (fs.existsSync(destinationPath)) {
|
||||
console.error(`Destination directory already exists at ${destinationPath}`);
|
||||
return response.status(409).send('Destination directory already exists.');
|
||||
}
|
||||
|
||||
if (source === destination) {
|
||||
console.error('Source and destination directories are the same');
|
||||
return response.status(409).send('Source and destination directories are the same.');
|
||||
}
|
||||
|
||||
fs.cpSync(sourcePath, destinationPath, { recursive: true, force: true });
|
||||
fs.rmSync(sourcePath, { recursive: true, force: true });
|
||||
console.info(`Extension has been moved from ${sourcePath} to ${destinationPath}`);
|
||||
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error('Moving extension failed', error);
|
||||
return response.status(500).send('Internal Server Error. Check the server logs for more details.');
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* HTTP POST handler function to get the current git commit hash and branch name for a given extension.
|
||||
* It checks whether the repository is up-to-date with the remote, and returns the status along with
|
||||
* the remote URL of the repository.
|
||||
*
|
||||
* @param {Object} request - HTTP Request object, expects a JSON body with an 'extensionName' property.
|
||||
* @param {Object} response - HTTP Response object used to respond to the HTTP request.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
router.post('/version', async (request, response) => {
|
||||
if (!request.body.extensionName) {
|
||||
return response.status(400).send('Bad Request: extensionName is required in the request body.');
|
||||
}
|
||||
|
||||
try {
|
||||
const { extensionName, global } = request.body;
|
||||
const basePath = global ? PUBLIC_DIRECTORIES.globalExtensions : request.user.directories.extensions;
|
||||
const extensionPath = path.join(basePath, sanitize(extensionName));
|
||||
|
||||
if (!fs.existsSync(extensionPath)) {
|
||||
return response.status(404).send(`Directory does not exist at ${extensionPath}`);
|
||||
}
|
||||
|
||||
const git = simpleGit({ baseDir: extensionPath, ...OPTIONS });
|
||||
let currentCommitHash;
|
||||
try {
|
||||
const isRepo = await git.checkIsRepo(CheckRepoActions.IS_REPO_ROOT);
|
||||
if (!isRepo) {
|
||||
throw new Error(`Directory is not a Git repository at ${extensionPath}`);
|
||||
}
|
||||
currentCommitHash = await git.revparse(['HEAD']);
|
||||
} catch (error) {
|
||||
// it is not a git repo, or has no commits yet, or is a bare repo
|
||||
// not possible to update it, most likely can't get the branch name either
|
||||
return response.send({ currentBranchName: '', currentCommitHash: '', isUpToDate: true, remoteUrl: '' });
|
||||
}
|
||||
|
||||
const currentBranch = await git.branch();
|
||||
// get only the working branch
|
||||
const currentBranchName = currentBranch.current;
|
||||
await git.fetch('origin');
|
||||
console.debug(extensionName, currentBranchName, currentCommitHash);
|
||||
const { isUpToDate, remoteUrl } = await checkIfRepoIsUpToDate(extensionPath);
|
||||
|
||||
return response.send({ currentBranchName, currentCommitHash, isUpToDate, remoteUrl });
|
||||
|
||||
} catch (error) {
|
||||
console.error('Getting extension version failed', error);
|
||||
return response.status(500).send(`Server Error: ${error.message}`);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* HTTP POST handler function to delete a git repository based on the extension name provided in the request body.
|
||||
*
|
||||
* @param {Object} request - HTTP Request object, expects a JSON body with a 'url' property.
|
||||
* @param {Object} response - HTTP Response object used to respond to the HTTP request.
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
router.post('/delete', async (request, response) => {
|
||||
if (!request.body.extensionName) {
|
||||
return response.status(400).send('Bad Request: extensionName is required in the request body.');
|
||||
}
|
||||
|
||||
try {
|
||||
const { extensionName, global } = request.body;
|
||||
|
||||
if (global && !request.user.profile.admin) {
|
||||
console.error(`User ${request.user.profile.handle} does not have permission to delete global extensions.`);
|
||||
return response.status(403).send('Forbidden: No permission to delete global extensions.');
|
||||
}
|
||||
|
||||
const basePath = global ? PUBLIC_DIRECTORIES.globalExtensions : request.user.directories.extensions;
|
||||
const extensionPath = path.join(basePath, sanitize(extensionName));
|
||||
|
||||
if (!fs.existsSync(extensionPath)) {
|
||||
return response.status(404).send(`Directory does not exist at ${extensionPath}`);
|
||||
}
|
||||
|
||||
await fs.promises.rm(extensionPath, { recursive: true });
|
||||
console.info(`Extension has been deleted at ${extensionPath}`);
|
||||
|
||||
return response.send(`Extension has been deleted at ${extensionPath}`);
|
||||
|
||||
} catch (error) {
|
||||
console.error('Deleting custom content failed', error);
|
||||
return response.status(500).send(`Server Error: ${error.message}`);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Discover the extension folders
|
||||
* If the folder is called third-party, search for subfolders instead
|
||||
*/
|
||||
router.get('/discover', function (request, response) {
|
||||
if (!fs.existsSync(path.join(request.user.directories.extensions))) {
|
||||
fs.mkdirSync(path.join(request.user.directories.extensions));
|
||||
}
|
||||
|
||||
if (!fs.existsSync(PUBLIC_DIRECTORIES.globalExtensions)) {
|
||||
fs.mkdirSync(PUBLIC_DIRECTORIES.globalExtensions);
|
||||
}
|
||||
|
||||
// Get all folders in system extensions folder, excluding third-party
|
||||
const builtInExtensions = fs
|
||||
.readdirSync(PUBLIC_DIRECTORIES.extensions)
|
||||
.filter(f => fs.statSync(path.join(PUBLIC_DIRECTORIES.extensions, f)).isDirectory())
|
||||
.filter(f => f !== 'third-party')
|
||||
.map(f => ({ type: 'system', name: f }));
|
||||
|
||||
// Get all folders in local extensions folder
|
||||
const userExtensions = fs
|
||||
.readdirSync(path.join(request.user.directories.extensions))
|
||||
.filter(f => fs.statSync(path.join(request.user.directories.extensions, f)).isDirectory())
|
||||
.map(f => ({ type: 'local', name: `third-party/${f}` }));
|
||||
|
||||
// Get all folders in global extensions folder
|
||||
// In case of a conflict, the extension will be loaded from the user folder
|
||||
const globalExtensions = fs
|
||||
.readdirSync(PUBLIC_DIRECTORIES.globalExtensions)
|
||||
.filter(f => fs.statSync(path.join(PUBLIC_DIRECTORIES.globalExtensions, f)).isDirectory())
|
||||
.map(f => ({ type: 'global', name: `third-party/${f}` }))
|
||||
.filter(f => !userExtensions.some(e => e.name === f.name));
|
||||
|
||||
// Combine all extensions
|
||||
const allExtensions = [...builtInExtensions, ...userExtensions, ...globalExtensions];
|
||||
console.debug('Extensions available for', request.user.profile.handle, allExtensions);
|
||||
|
||||
return response.send(allExtensions);
|
||||
});
|
||||
101
web-app/src/endpoints/files.js
Normal file
101
web-app/src/endpoints/files.js
Normal file
@@ -0,0 +1,101 @@
|
||||
import path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
|
||||
import express from 'express';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import { sync as writeFileSyncAtomic } from 'write-file-atomic';
|
||||
|
||||
import { validateAssetFileName } from './assets.js';
|
||||
import { clientRelativePath } from '../util.js';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/sanitize-filename', async (request, response) => {
|
||||
try {
|
||||
const fileName = String(request.body.fileName);
|
||||
if (!fileName) {
|
||||
return response.status(400).send('No fileName specified');
|
||||
}
|
||||
|
||||
const sanitizedFilename = sanitize(fileName);
|
||||
return response.send({ fileName: sanitizedFilename });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/upload', async (request, response) => {
|
||||
try {
|
||||
if (!request.body.name) {
|
||||
return response.status(400).send('No upload name specified');
|
||||
}
|
||||
|
||||
if (!request.body.data) {
|
||||
return response.status(400).send('No upload data specified');
|
||||
}
|
||||
|
||||
// Validate filename
|
||||
const validation = validateAssetFileName(request.body.name);
|
||||
if (validation.error)
|
||||
return response.status(400).send(validation.message);
|
||||
|
||||
const pathToUpload = path.join(request.user.directories.files, request.body.name);
|
||||
writeFileSyncAtomic(pathToUpload, request.body.data, 'base64');
|
||||
const url = clientRelativePath(request.user.directories.root, pathToUpload);
|
||||
console.info(`Uploaded file: ${url} from ${request.user.profile.handle}`);
|
||||
return response.send({ path: url });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/delete', async (request, response) => {
|
||||
try {
|
||||
if (!request.body.path) {
|
||||
return response.status(400).send('No path specified');
|
||||
}
|
||||
|
||||
const pathToDelete = path.join(request.user.directories.root, request.body.path);
|
||||
if (!pathToDelete.startsWith(request.user.directories.files)) {
|
||||
return response.status(400).send('Invalid path');
|
||||
}
|
||||
|
||||
if (!fs.existsSync(pathToDelete)) {
|
||||
return response.status(404).send('File not found');
|
||||
}
|
||||
|
||||
fs.unlinkSync(pathToDelete);
|
||||
console.info(`Deleted file: ${request.body.path} from ${request.user.profile.handle}`);
|
||||
return response.sendStatus(200);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/verify', async (request, response) => {
|
||||
try {
|
||||
if (!Array.isArray(request.body.urls)) {
|
||||
return response.status(400).send('No URLs specified');
|
||||
}
|
||||
|
||||
const verified = {};
|
||||
|
||||
for (const url of request.body.urls) {
|
||||
const pathToVerify = path.join(request.user.directories.root, url);
|
||||
if (!pathToVerify.startsWith(request.user.directories.files)) {
|
||||
console.warn(`File verification: Invalid path: ${pathToVerify}`);
|
||||
continue;
|
||||
}
|
||||
const fileExists = fs.existsSync(pathToVerify);
|
||||
verified[url] = fileExists;
|
||||
}
|
||||
|
||||
return response.send(verified);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
641
web-app/src/endpoints/google.js
Normal file
641
web-app/src/endpoints/google.js
Normal file
@@ -0,0 +1,641 @@
|
||||
import { Buffer } from 'node:buffer';
|
||||
import fetch from 'node-fetch';
|
||||
import express from 'express';
|
||||
import { speak, languages } from 'google-translate-api-x';
|
||||
import crypto from 'node:crypto';
|
||||
import util from 'node:util';
|
||||
import urlJoin from 'url-join';
|
||||
import lodash from 'lodash';
|
||||
|
||||
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||
import { GEMINI_SAFETY, VERTEX_SAFETY } from '../constants.js';
|
||||
import { delay, getConfigValue, trimTrailingSlash } from '../util.js';
|
||||
|
||||
const API_MAKERSUITE = 'https://generativelanguage.googleapis.com';
|
||||
const API_VERTEX_AI = 'https://us-central1-aiplatform.googleapis.com';
|
||||
|
||||
function createWavHeader(dataSize, sampleRate, numChannels = 1, bitsPerSample = 16) {
|
||||
const header = Buffer.alloc(44);
|
||||
header.write('RIFF', 0);
|
||||
header.writeUInt32LE(36 + dataSize, 4);
|
||||
header.write('WAVE', 8);
|
||||
header.write('fmt ', 12);
|
||||
header.writeUInt32LE(16, 16);
|
||||
header.writeUInt16LE(1, 20);
|
||||
header.writeUInt16LE(numChannels, 22);
|
||||
header.writeUInt32LE(sampleRate, 24);
|
||||
header.writeUInt32LE(sampleRate * numChannels * bitsPerSample / 8, 28);
|
||||
header.writeUInt16LE(numChannels * bitsPerSample / 8, 32);
|
||||
header.writeUInt16LE(bitsPerSample, 34);
|
||||
header.write('data', 36);
|
||||
header.writeUInt32LE(dataSize, 40);
|
||||
return header;
|
||||
}
|
||||
|
||||
function createCompleteWavFile(pcmData, sampleRate) {
|
||||
const header = createWavHeader(pcmData.length, sampleRate);
|
||||
return Buffer.concat([header, pcmData]);
|
||||
}
|
||||
|
||||
// Vertex AI authentication helper functions
|
||||
export async function getVertexAIAuth(request) {
|
||||
const authMode = request.body.vertexai_auth_mode || 'express';
|
||||
|
||||
if (request.body.reverse_proxy) {
|
||||
return {
|
||||
authHeader: `Bearer ${request.body.proxy_password}`,
|
||||
authType: 'proxy',
|
||||
};
|
||||
}
|
||||
|
||||
if (authMode === 'express') {
|
||||
const apiKey = readSecret(request.user.directories, SECRET_KEYS.VERTEXAI);
|
||||
if (apiKey) {
|
||||
return {
|
||||
authHeader: `Bearer ${apiKey}`,
|
||||
authType: 'express',
|
||||
};
|
||||
}
|
||||
throw new Error('API key is required for Vertex AI Express mode');
|
||||
} else if (authMode === 'full') {
|
||||
// Get service account JSON from backend storage
|
||||
const serviceAccountJson = readSecret(request.user.directories, SECRET_KEYS.VERTEXAI_SERVICE_ACCOUNT);
|
||||
|
||||
if (serviceAccountJson) {
|
||||
try {
|
||||
const serviceAccount = JSON.parse(serviceAccountJson);
|
||||
const jwtToken = await generateJWTToken(serviceAccount);
|
||||
const accessToken = await getAccessToken(jwtToken);
|
||||
return {
|
||||
authHeader: `Bearer ${accessToken}`,
|
||||
authType: 'full',
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Failed to authenticate with service account:', error);
|
||||
throw new Error(`Service account authentication failed: ${error.message}`);
|
||||
}
|
||||
}
|
||||
throw new Error('Service Account JSON is required for Vertex AI Full mode');
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported Vertex AI authentication mode: ${authMode}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a JWT token for Google Cloud authentication using service account credentials.
|
||||
* @param {object} serviceAccount Service account JSON object
|
||||
* @returns {Promise<string>} JWT token
|
||||
*/
|
||||
export async function generateJWTToken(serviceAccount) {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const expiry = now + 3600; // 1 hour
|
||||
|
||||
const header = {
|
||||
alg: 'RS256',
|
||||
typ: 'JWT',
|
||||
};
|
||||
|
||||
const payload = {
|
||||
iss: serviceAccount.client_email,
|
||||
scope: 'https://www.googleapis.com/auth/cloud-platform',
|
||||
aud: 'https://oauth2.googleapis.com/token',
|
||||
iat: now,
|
||||
exp: expiry,
|
||||
};
|
||||
|
||||
const headerBase64 = Buffer.from(JSON.stringify(header)).toString('base64url');
|
||||
const payloadBase64 = Buffer.from(JSON.stringify(payload)).toString('base64url');
|
||||
const signatureInput = `${headerBase64}.${payloadBase64}`;
|
||||
|
||||
// Create signature using private key
|
||||
const sign = crypto.createSign('RSA-SHA256');
|
||||
sign.update(signatureInput);
|
||||
const signature = sign.sign(serviceAccount.private_key, 'base64url');
|
||||
|
||||
return `${signatureInput}.${signature}`;
|
||||
}
|
||||
|
||||
export async function getAccessToken(jwtToken) {
|
||||
const response = await fetch('https://oauth2.googleapis.com/token', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
|
||||
body: new URLSearchParams({
|
||||
grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer',
|
||||
assertion: jwtToken,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.text();
|
||||
throw new Error(`Failed to get access token: ${error}`);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await response.json();
|
||||
return data.access_token;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the project ID from a Service Account JSON object.
|
||||
* @param {object} serviceAccount Service account JSON object
|
||||
* @returns {string} Project ID
|
||||
* @throws {Error} If project ID is not found in the service account
|
||||
*/
|
||||
export function getProjectIdFromServiceAccount(serviceAccount) {
|
||||
if (!serviceAccount || typeof serviceAccount !== 'object') {
|
||||
throw new Error('Invalid service account object');
|
||||
}
|
||||
|
||||
const projectId = serviceAccount.project_id;
|
||||
if (!projectId || typeof projectId !== 'string') {
|
||||
throw new Error('Project ID not found in service account JSON');
|
||||
}
|
||||
|
||||
return projectId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates Google API URL and headers based on request configuration
|
||||
* @param {express.Request} request Express request object
|
||||
* @param {string} model Model name to use
|
||||
* @param {string} endpoint API endpoint (default: 'generateContent')
|
||||
* @returns {Promise<{url: string, headers: object, apiName: string, baseUrl: string, safetySettings: object[]}>} URL, headers, and API name
|
||||
*/
|
||||
export async function getGoogleApiConfig(request, model, endpoint = 'generateContent') {
|
||||
const useVertexAi = request.body.api === 'vertexai';
|
||||
const region = request.body.vertexai_region || 'us-central1';
|
||||
const apiName = useVertexAi ? 'Google Vertex AI' : 'Google AI Studio';
|
||||
const safetySettings = [...GEMINI_SAFETY, ...(useVertexAi ? VERTEX_SAFETY : [])];
|
||||
|
||||
let url;
|
||||
let baseUrl;
|
||||
let headers = {
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
|
||||
if (useVertexAi) {
|
||||
// Get authentication for Vertex AI
|
||||
const { authHeader, authType } = await getVertexAIAuth(request);
|
||||
|
||||
if (authType === 'express') {
|
||||
// Express mode: use API key parameter
|
||||
const keyParam = authHeader.replace('Bearer ', '');
|
||||
const projectId = request.body.vertexai_express_project_id;
|
||||
baseUrl = region === 'global'
|
||||
? 'https://aiplatform.googleapis.com/v1'
|
||||
: `https://${region}-aiplatform.googleapis.com/v1`;
|
||||
url = projectId
|
||||
? `${baseUrl}/projects/${projectId}/locations/${region}/publishers/google/models/${model}:${endpoint}`
|
||||
: `${baseUrl}/publishers/google/models/${model}:${endpoint}`;
|
||||
headers['x-goog-api-key'] = keyParam;
|
||||
} else if (authType === 'full') {
|
||||
// Full mode: use project-specific URL with Authorization header
|
||||
// Get project ID from Service Account JSON
|
||||
const serviceAccountJson = readSecret(request.user.directories, SECRET_KEYS.VERTEXAI_SERVICE_ACCOUNT);
|
||||
if (!serviceAccountJson) {
|
||||
throw new Error('Vertex AI Service Account JSON is missing.');
|
||||
}
|
||||
|
||||
let projectId;
|
||||
try {
|
||||
const serviceAccount = JSON.parse(serviceAccountJson);
|
||||
projectId = getProjectIdFromServiceAccount(serviceAccount);
|
||||
} catch (error) {
|
||||
throw new Error('Failed to extract project ID from Service Account JSON.');
|
||||
}
|
||||
// Handle global region differently - no region prefix in hostname
|
||||
baseUrl = region === 'global'
|
||||
? 'https://aiplatform.googleapis.com/v1'
|
||||
: `https://${region}-aiplatform.googleapis.com/v1`;
|
||||
url = `${baseUrl}/projects/${projectId}/locations/${region}/publishers/google/models/${model}:${endpoint}`;
|
||||
headers['Authorization'] = authHeader;
|
||||
} else {
|
||||
// Proxy mode: use Authorization header
|
||||
const apiUrl = trimTrailingSlash(request.body.reverse_proxy || API_VERTEX_AI);
|
||||
baseUrl = `${apiUrl}/v1`;
|
||||
url = `${baseUrl}/publishers/google/models/${model}:${endpoint}`;
|
||||
headers['Authorization'] = authHeader;
|
||||
}
|
||||
} else {
|
||||
// Google AI Studio
|
||||
const apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.MAKERSUITE);
|
||||
const apiUrl = trimTrailingSlash(request.body.reverse_proxy || API_MAKERSUITE);
|
||||
const apiVersion = getConfigValue('gemini.apiVersion', 'v1beta');
|
||||
baseUrl = `${apiUrl}/${apiVersion}`;
|
||||
url = `${baseUrl}/models/${model}:${endpoint}`;
|
||||
headers['x-goog-api-key'] = apiKey;
|
||||
}
|
||||
|
||||
return { url, headers, apiName, baseUrl, safetySettings };
|
||||
}
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/caption-image', async (request, response) => {
|
||||
try {
|
||||
const mimeType = request.body.image.split(';')[0].split(':')[1];
|
||||
const base64Data = request.body.image.split(',')[1];
|
||||
const model = request.body.model || 'gemini-2.0-flash';
|
||||
const { url, headers, apiName, safetySettings } = await getGoogleApiConfig(request, model);
|
||||
|
||||
const body = {
|
||||
contents: [{
|
||||
role: 'user',
|
||||
parts: [
|
||||
{ text: request.body.prompt },
|
||||
{
|
||||
inlineData: {
|
||||
mimeType: mimeType,
|
||||
data: base64Data,
|
||||
},
|
||||
}],
|
||||
}],
|
||||
safetySettings: safetySettings,
|
||||
};
|
||||
|
||||
console.debug(`${apiName} captioning request`, model, body);
|
||||
|
||||
const result = await fetch(url, {
|
||||
body: JSON.stringify(body),
|
||||
method: 'POST',
|
||||
headers: headers,
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const error = await result.json();
|
||||
console.error(`${apiName} API returned error: ${result.status} ${result.statusText}`, error);
|
||||
return response.status(500).send({ error: true });
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await result.json();
|
||||
console.info(`${apiName} captioning response`, data);
|
||||
|
||||
const candidates = data?.candidates;
|
||||
if (!candidates) {
|
||||
return response.status(500).send('No candidates found, image was most likely filtered.');
|
||||
}
|
||||
|
||||
const caption = candidates[0].content.parts[0].text;
|
||||
if (!caption) {
|
||||
return response.status(500).send('No caption found');
|
||||
}
|
||||
|
||||
return response.json({ caption });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.status(500).send('Internal server error');
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/list-voices', (_, response) => {
|
||||
return response.json(languages);
|
||||
});
|
||||
|
||||
router.post('/generate-voice', async (request, response) => {
|
||||
try {
|
||||
const text = request.body.text;
|
||||
const voice = request.body.voice ?? 'en';
|
||||
|
||||
const result = await speak(text, { to: voice, forceBatch: false });
|
||||
const buffer = Array.isArray(result)
|
||||
? Buffer.concat(result.map(x => new Uint8Array(Buffer.from(x.toString(), 'base64'))))
|
||||
: Buffer.from(result.toString(), 'base64');
|
||||
|
||||
response.setHeader('Content-Type', 'audio/mpeg');
|
||||
return response.send(buffer);
|
||||
} catch (error) {
|
||||
console.error('Google Translate TTS generation failed', error);
|
||||
response.status(500).send('Internal server error');
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/list-native-voices', async (_, response) => {
|
||||
try {
|
||||
// Hardcoded Gemini native TTS voices from official documentation
|
||||
// Source: https://ai.google.dev/gemini-api/docs/speech-generation#voices
|
||||
const voices = [
|
||||
{ name: 'Zephyr', voice_id: 'Zephyr', lang: 'en-US', description: 'Bright' },
|
||||
{ name: 'Puck', voice_id: 'Puck', lang: 'en-US', description: 'Upbeat' },
|
||||
{ name: 'Charon', voice_id: 'Charon', lang: 'en-US', description: 'Informative' },
|
||||
{ name: 'Kore', voice_id: 'Kore', lang: 'en-US', description: 'Firm' },
|
||||
{ name: 'Fenrir', voice_id: 'Fenrir', lang: 'en-US', description: 'Excitable' },
|
||||
{ name: 'Leda', voice_id: 'Leda', lang: 'en-US', description: 'Youthful' },
|
||||
{ name: 'Orus', voice_id: 'Orus', lang: 'en-US', description: 'Firm' },
|
||||
{ name: 'Aoede', voice_id: 'Aoede', lang: 'en-US', description: 'Breezy' },
|
||||
{ name: 'Callirhoe', voice_id: 'Callirhoe', lang: 'en-US', description: 'Easy-going' },
|
||||
{ name: 'Autonoe', voice_id: 'Autonoe', lang: 'en-US', description: 'Bright' },
|
||||
{ name: 'Enceladus', voice_id: 'Enceladus', lang: 'en-US', description: 'Breathy' },
|
||||
{ name: 'Iapetus', voice_id: 'Iapetus', lang: 'en-US', description: 'Clear' },
|
||||
{ name: 'Umbriel', voice_id: 'Umbriel', lang: 'en-US', description: 'Easy-going' },
|
||||
{ name: 'Algieba', voice_id: 'Algieba', lang: 'en-US', description: 'Smooth' },
|
||||
{ name: 'Despina', voice_id: 'Despina', lang: 'en-US', description: 'Smooth' },
|
||||
{ name: 'Erinome', voice_id: 'Erinome', lang: 'en-US', description: 'Clear' },
|
||||
{ name: 'Algenib', voice_id: 'Algenib', lang: 'en-US', description: 'Gravelly' },
|
||||
{ name: 'Rasalgethi', voice_id: 'Rasalgethi', lang: 'en-US', description: 'Informative' },
|
||||
{ name: 'Laomedeia', voice_id: 'Laomedeia', lang: 'en-US', description: 'Upbeat' },
|
||||
{ name: 'Achernar', voice_id: 'Achernar', lang: 'en-US', description: 'Soft' },
|
||||
{ name: 'Alnilam', voice_id: 'Alnilam', lang: 'en-US', description: 'Firm' },
|
||||
{ name: 'Schedar', voice_id: 'Schedar', lang: 'en-US', description: 'Even' },
|
||||
{ name: 'Gacrux', voice_id: 'Gacrux', lang: 'en-US', description: 'Mature' },
|
||||
{ name: 'Pulcherrima', voice_id: 'Pulcherrima', lang: 'en-US', description: 'Forward' },
|
||||
{ name: 'Achird', voice_id: 'Achird', lang: 'en-US', description: 'Friendly' },
|
||||
{ name: 'Zubenelgenubi', voice_id: 'Zubenelgenubi', lang: 'en-US', description: 'Casual' },
|
||||
{ name: 'Vindemiatrix', voice_id: 'Vindemiatrix', lang: 'en-US', description: 'Gentle' },
|
||||
{ name: 'Sadachbia', voice_id: 'Sadachbia', lang: 'en-US', description: 'Lively' },
|
||||
{ name: 'Sadaltager', voice_id: 'Sadaltager', lang: 'en-US', description: 'Knowledgeable' },
|
||||
{ name: 'Sulafat', voice_id: 'Sulafat', lang: 'en-US', description: 'Warm' },
|
||||
];
|
||||
return response.json({ voices });
|
||||
} catch (error) {
|
||||
console.error('Failed to return Google TTS voices:', error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/generate-native-tts', async (request, response) => {
|
||||
try {
|
||||
const { text, voice, model } = request.body;
|
||||
const { url, headers, apiName, safetySettings } = await getGoogleApiConfig(request, model);
|
||||
|
||||
console.debug(`${apiName} TTS request`, { model, text, voice });
|
||||
|
||||
const requestBody = {
|
||||
contents: [{
|
||||
role: 'user',
|
||||
parts: [{ text: text }],
|
||||
}],
|
||||
generationConfig: {
|
||||
responseModalities: ['AUDIO'],
|
||||
speechConfig: {
|
||||
voiceConfig: {
|
||||
prebuiltVoiceConfig: {
|
||||
voiceName: voice,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
safetySettings: safetySettings,
|
||||
};
|
||||
|
||||
const result = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: headers,
|
||||
body: JSON.stringify(requestBody),
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const errorText = await result.text();
|
||||
console.error(`${apiName} TTS API error: ${result.status} ${result.statusText}`, errorText);
|
||||
const errorMessage = JSON.parse(errorText).error?.message || 'TTS generation failed.';
|
||||
return response.status(result.status).json({ error: errorMessage });
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await result.json();
|
||||
const audioPart = data?.candidates?.[0]?.content?.parts?.[0];
|
||||
const audioData = audioPart?.inlineData?.data;
|
||||
const mimeType = audioPart?.inlineData?.mimeType;
|
||||
|
||||
if (!audioData) {
|
||||
return response.status(500).json({ error: 'No audio data found in response' });
|
||||
}
|
||||
|
||||
const audioBuffer = Buffer.from(audioData, 'base64');
|
||||
|
||||
//If the audio is raw PCM, wrap it in a WAV header and send it.
|
||||
if (mimeType && mimeType.toLowerCase().includes('audio/l16')) {
|
||||
const rateMatch = mimeType.match(/rate=(\d+)/);
|
||||
const sampleRate = rateMatch ? parseInt(rateMatch[1], 10) : 24000;
|
||||
const pcmData = audioBuffer;
|
||||
|
||||
// Create a complete, playable WAV file buffer.
|
||||
const wavBuffer = createCompleteWavFile(pcmData, sampleRate);
|
||||
|
||||
// Send the WAV file directly to the browser. This is much faster.
|
||||
response.setHeader('Content-Type', 'audio/wav');
|
||||
return response.send(wavBuffer);
|
||||
}
|
||||
|
||||
// Fallback for any other audio format Google might send in the future.
|
||||
response.setHeader('Content-Type', mimeType || 'application/octet-stream');
|
||||
response.send(audioBuffer);
|
||||
} catch (error) {
|
||||
console.error('Google TTS generation failed:', error);
|
||||
if (!response.headersSent) {
|
||||
return response.status(500).json({ error: 'Internal server error during TTS generation' });
|
||||
}
|
||||
return response.end();
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/generate-image', async (request, response) => {
|
||||
try {
|
||||
const model = request.body.model || 'imagen-3.0-generate-002';
|
||||
const { url, headers, apiName } = await getGoogleApiConfig(request, model, 'predict');
|
||||
|
||||
// AI Studio is stricter than Vertex AI.
|
||||
const isVertex = request.body.api === 'vertexai';
|
||||
// Is it even worth it?
|
||||
const isDeprecated = model.startsWith('imagegeneration');
|
||||
// Get person generation setting from config
|
||||
const personGeneration = getConfigValue('gemini.image.personGeneration', 'allow_adult');
|
||||
|
||||
const requestBody = {
|
||||
instances: [{
|
||||
prompt: request.body.prompt || '',
|
||||
}],
|
||||
parameters: {
|
||||
sampleCount: 1,
|
||||
seed: isVertex ? Number(request.body.seed ?? Math.floor(Math.random() * 1000000)) : undefined,
|
||||
enhancePrompt: isVertex ? Boolean(request.body.enhance ?? false) : undefined,
|
||||
negativePrompt: isVertex ? (request.body.negative_prompt || undefined) : undefined,
|
||||
aspectRatio: String(request.body.aspect_ratio || '1:1'),
|
||||
personGeneration: !isDeprecated && personGeneration ? personGeneration : undefined,
|
||||
language: isVertex ? 'auto' : undefined,
|
||||
safetySetting: !isDeprecated ? (isVertex ? 'block_only_high' : 'block_low_and_above') : undefined,
|
||||
addWatermark: isVertex ? false : undefined,
|
||||
outputOptions: {
|
||||
mimeType: 'image/jpeg',
|
||||
compressionQuality: 100,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
console.debug(`${apiName} image generation request:`, model, requestBody);
|
||||
|
||||
const result = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: headers,
|
||||
body: JSON.stringify(requestBody),
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const errorText = await result.text();
|
||||
console.warn(`${apiName} image generation error: ${result.status} ${result.statusText}`, errorText);
|
||||
return response.status(500).send('Image generation request failed');
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await result.json();
|
||||
const imagePart = data?.predictions?.[0]?.bytesBase64Encoded;
|
||||
|
||||
if (!imagePart) {
|
||||
console.warn(`${apiName} image generation error: No image data found in response`);
|
||||
return response.status(500).send('No image data found in response');
|
||||
}
|
||||
|
||||
return response.send({ image: imagePart });
|
||||
} catch (error) {
|
||||
console.error('Google Image generation failed:', error);
|
||||
if (!response.headersSent) {
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
return response.end();
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/generate-video', async (request, response) => {
|
||||
try {
|
||||
const controller = new AbortController();
|
||||
request.socket.removeAllListeners('close');
|
||||
request.socket.on('close', function () {
|
||||
controller.abort();
|
||||
});
|
||||
|
||||
const model = request.body.model || 'veo-3.1-generate-preview';
|
||||
const { url, headers, apiName, baseUrl } = await getGoogleApiConfig(request, model, 'predictLongRunning');
|
||||
const useVertexAi = request.body.api === 'vertexai';
|
||||
|
||||
const isVeo3 = /veo-3/.test(model);
|
||||
const lowerBound = isVeo3 ? 4 : 5;
|
||||
const upperBound = isVeo3 ? 8 : 8;
|
||||
|
||||
const requestBody = {
|
||||
instances: [{
|
||||
prompt: String(request.body.prompt || ''),
|
||||
}],
|
||||
parameters: {
|
||||
negativePrompt: String(request.body.negative_prompt || ''),
|
||||
durationSeconds: lodash.clamp(Number(request.body.seconds || 6), lowerBound, upperBound),
|
||||
aspectRatio: String(request.body.aspect_ratio || '16:9'),
|
||||
personGeneration: 'allow_all',
|
||||
seed: isVeo3 ? Number(request.body.seed ?? Math.floor(Math.random() * 1000000)) : undefined,
|
||||
},
|
||||
};
|
||||
|
||||
console.debug(`${apiName} video generation request:`, model, requestBody);
|
||||
const videoJobResponse = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: headers,
|
||||
body: JSON.stringify(requestBody),
|
||||
});
|
||||
|
||||
if (!videoJobResponse.ok) {
|
||||
const errorText = await videoJobResponse.text();
|
||||
console.warn(`${apiName} video generation error: ${videoJobResponse.status} ${videoJobResponse.statusText}`, errorText);
|
||||
return response.status(500).send('Video generation request failed');
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const videoJobData = await videoJobResponse.json();
|
||||
const videoJobName = videoJobData?.name;
|
||||
|
||||
if (!videoJobName) {
|
||||
console.warn(`${apiName} video generation error: No job name found in response`);
|
||||
return response.status(500).send('No video job name found in response');
|
||||
}
|
||||
|
||||
console.debug(`${apiName} video job name:`, videoJobName);
|
||||
|
||||
for (let attempt = 0; attempt < 30; attempt++) {
|
||||
if (controller.signal.aborted) {
|
||||
console.info(`${apiName} video generation aborted by client`);
|
||||
return response.status(500).send('Video generation aborted by client');
|
||||
}
|
||||
|
||||
await delay(5000 + attempt * 1000);
|
||||
|
||||
if (useVertexAi) {
|
||||
const { url: pollUrl, headers: pollHeaders } = await getGoogleApiConfig(request, model, 'fetchPredictOperation');
|
||||
|
||||
const pollResponse = await fetch(pollUrl, {
|
||||
method: 'POST',
|
||||
headers: pollHeaders,
|
||||
body: JSON.stringify({ operationName: videoJobName }),
|
||||
});
|
||||
|
||||
if (!pollResponse.ok) {
|
||||
const errorText = await pollResponse.text();
|
||||
console.warn(`${apiName} video job status error: ${pollResponse.status} ${pollResponse.statusText}`, errorText);
|
||||
return response.status(500).send('Video job status request failed');
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const pollData = await pollResponse.json();
|
||||
const jobDone = pollData?.done;
|
||||
console.debug(`${apiName} video job status attempt ${attempt + 1}: ${jobDone ? 'done' : 'running'}`);
|
||||
|
||||
if (jobDone) {
|
||||
const videoData = pollData?.response?.videos?.[0]?.bytesBase64Encoded;
|
||||
if (!videoData) {
|
||||
const pollDataLog = util.inspect(pollData, { depth: 5, colors: true, maxStringLength: 500 });
|
||||
console.warn(`${apiName} video generation error: No video data found in response`, pollDataLog);
|
||||
return response.status(500).send('No video data found in response');
|
||||
}
|
||||
|
||||
return response.send({ video: videoData });
|
||||
}
|
||||
} else {
|
||||
const pollUrl = urlJoin(baseUrl, videoJobName);
|
||||
const pollResponse = await fetch(pollUrl, {
|
||||
method: 'GET',
|
||||
headers: headers,
|
||||
});
|
||||
|
||||
if (!pollResponse.ok) {
|
||||
const errorText = await pollResponse.text();
|
||||
console.warn(`${apiName} video job status error: ${pollResponse.status} ${pollResponse.statusText}`, errorText);
|
||||
return response.status(500).send('Video job status request failed');
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const pollData = await pollResponse.json();
|
||||
const jobDone = pollData?.done;
|
||||
console.debug(`${apiName} video job status attempt ${attempt + 1}: ${jobDone ? 'done' : 'running'}`);
|
||||
|
||||
if (jobDone) {
|
||||
const videoUri = pollData?.response?.generateVideoResponse?.generatedSamples?.[0]?.video?.uri;
|
||||
console.debug(`${apiName} video URI:`, videoUri);
|
||||
|
||||
if (!videoUri) {
|
||||
const pollDataLog = util.inspect(pollData, { depth: 5, colors: true, maxStringLength: 500 });
|
||||
console.warn(`${apiName} video generation error: No video URI found in response`, pollDataLog);
|
||||
return response.status(500).send('No video URI found in response');
|
||||
}
|
||||
|
||||
const videoResponse = await fetch(videoUri, {
|
||||
method: 'GET',
|
||||
headers: headers,
|
||||
});
|
||||
|
||||
if (!videoResponse.ok) {
|
||||
console.warn(`${apiName} video fetch error: ${videoResponse.status} ${videoResponse.statusText}`);
|
||||
return response.status(500).send('Video fetch request failed');
|
||||
}
|
||||
|
||||
const videoData = await videoResponse.arrayBuffer();
|
||||
const videoBase64 = Buffer.from(videoData).toString('base64');
|
||||
|
||||
return response.send({ video: videoBase64 });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.warn(`${apiName} video generation error: Job timed out after multiple attempts`);
|
||||
return response.status(500).send('Video generation timed out');
|
||||
} catch (error) {
|
||||
console.error('Google Video generation failed:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
235
web-app/src/endpoints/groups.js
Normal file
235
web-app/src/endpoints/groups.js
Normal file
@@ -0,0 +1,235 @@
|
||||
import fs from 'node:fs';
|
||||
import { promises as fsPromises } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
import express from 'express';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import { sync as writeFileAtomicSync, default as writeFileAtomic } from 'write-file-atomic';
|
||||
|
||||
import { color, tryParse } from '../util.js';
|
||||
import { getFileNameValidationFunction } from '../middleware/validateFileName.js';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
/**
|
||||
* Warns if group data contains deprecated metadata keys and removes them.
|
||||
* @param {object} groupData Group data object
|
||||
*/
|
||||
function warnOnGroupMetadata(groupData) {
|
||||
if (typeof groupData !== 'object' || groupData === null) {
|
||||
return;
|
||||
}
|
||||
['chat_metadata', 'past_metadata'].forEach(key => {
|
||||
if (Object.hasOwn(groupData, key)) {
|
||||
console.warn(color.yellow(`Group JSON data for "${groupData.id}" contains deprecated key "${key}".`));
|
||||
delete groupData[key];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrates group metadata to include chat metadata for each group chat instead of the group itself.
|
||||
* @param {import('../users.js').UserDirectoryList[]} userDirectories Listing of all users' directories
|
||||
*/
|
||||
export async function migrateGroupChatsMetadataFormat(userDirectories) {
|
||||
for (const userDirs of userDirectories) {
|
||||
try {
|
||||
let anyDataMigrated = false;
|
||||
const backupPath = path.join(userDirs.backups, '_group_metadata_update');
|
||||
const groupFiles = await fsPromises.readdir(userDirs.groups, { withFileTypes: true });
|
||||
const groupChatFiles = await fsPromises.readdir(userDirs.groupChats, { withFileTypes: true });
|
||||
for (const groupFile of groupFiles) {
|
||||
try {
|
||||
const isJsonFile = groupFile.isFile() && path.extname(groupFile.name) === '.json';
|
||||
if (!isJsonFile) {
|
||||
continue;
|
||||
}
|
||||
const groupFilePath = path.join(userDirs.groups, groupFile.name);
|
||||
const groupDataRaw = await fsPromises.readFile(groupFilePath, 'utf8');
|
||||
const groupData = tryParse(groupDataRaw) || {};
|
||||
const needsMigration = ['chat_metadata', 'past_metadata'].some(key => Object.hasOwn(groupData, key));
|
||||
if (!needsMigration) {
|
||||
continue;
|
||||
}
|
||||
if (!fs.existsSync(backupPath)){
|
||||
await fsPromises.mkdir(backupPath, { recursive: true });
|
||||
}
|
||||
await fsPromises.copyFile(groupFilePath, path.join(backupPath, groupFile.name));
|
||||
const allMetadata = {
|
||||
...(groupData.past_metadata || {}),
|
||||
[groupData.chat_id]: (groupData.chat_metadata || {}),
|
||||
};
|
||||
if (!Array.isArray(groupData.chats)) {
|
||||
console.warn(color.yellow(`Group ${groupFile.name} has no chats array, skipping migration.`));
|
||||
continue;
|
||||
}
|
||||
for (const chatId of groupData.chats) {
|
||||
try {
|
||||
const chatFileName = sanitize(`${chatId}.jsonl`);
|
||||
const chatFileDirent = groupChatFiles.find(f => f.isFile() && f.name === chatFileName);
|
||||
if (!chatFileDirent) {
|
||||
console.warn(color.yellow(`Group chat file ${chatId} not found, skipping migration.`));
|
||||
continue;
|
||||
}
|
||||
const chatFilePath = path.join(userDirs.groupChats, chatFileName);
|
||||
const chatMetadata = allMetadata[chatId] || {};
|
||||
const chatDataRaw = await fsPromises.readFile(chatFilePath, 'utf8');
|
||||
const chatData = chatDataRaw.split('\n').filter(line => line.trim()).map(line => tryParse(line)).filter(Boolean);
|
||||
const alreadyHasMetadata = chatData.length > 0 && Object.hasOwn(chatData[0], 'chat_metadata');
|
||||
if (alreadyHasMetadata) {
|
||||
console.log(color.yellow(`Group chat ${chatId} already has chat metadata, skipping update.`));
|
||||
continue;
|
||||
}
|
||||
await fsPromises.copyFile(chatFilePath, path.join(backupPath, chatFileName));
|
||||
const chatHeader = { chat_metadata: chatMetadata, user_name: 'unused', character_name: 'unused' };
|
||||
const newChatData = [chatHeader, ...chatData];
|
||||
const newChatDataRaw = newChatData.map(entry => JSON.stringify(entry)).join('\n');
|
||||
await writeFileAtomic(chatFilePath, newChatDataRaw, 'utf8');
|
||||
console.log(`Updated group chat data format for ${chatId}`);
|
||||
anyDataMigrated = true;
|
||||
} catch (chatError) {
|
||||
console.error(color.red(`Could not update existing chat data for ${chatId}`), chatError);
|
||||
}
|
||||
}
|
||||
delete groupData.chat_metadata;
|
||||
delete groupData.past_metadata;
|
||||
await writeFileAtomic(groupFilePath, JSON.stringify(groupData, null, 4), 'utf8');
|
||||
console.log(`Migrated group chats metadata for group: ${groupData.id}`);
|
||||
anyDataMigrated = true;
|
||||
} catch (groupError) {
|
||||
console.error(color.red(`Could not process group file ${groupFile.name}`), groupError);
|
||||
}
|
||||
}
|
||||
if (anyDataMigrated) {
|
||||
console.log(color.green(`Completed migration of group chats metadata for user at ${userDirs.root}`));
|
||||
console.log(color.cyan(`Backups of modified files are located at ${backupPath}`));
|
||||
}
|
||||
} catch (directoryError) {
|
||||
console.error(color.red(`Error migrating group chats metadata for user at ${userDirs.root}`), directoryError);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
router.post('/all', (request, response) => {
|
||||
const groups = [];
|
||||
|
||||
if (!fs.existsSync(request.user.directories.groups)) {
|
||||
fs.mkdirSync(request.user.directories.groups);
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(request.user.directories.groups).filter(x => path.extname(x) === '.json');
|
||||
const chats = fs.readdirSync(request.user.directories.groupChats).filter(x => path.extname(x) === '.jsonl');
|
||||
|
||||
files.forEach(function (file) {
|
||||
try {
|
||||
const filePath = path.join(request.user.directories.groups, file);
|
||||
const fileContents = fs.readFileSync(filePath, 'utf8');
|
||||
const group = JSON.parse(fileContents);
|
||||
const groupStat = fs.statSync(filePath);
|
||||
group['date_added'] = groupStat.birthtimeMs;
|
||||
group['create_date'] = new Date(groupStat.birthtimeMs).toISOString();
|
||||
|
||||
let chat_size = 0;
|
||||
let date_last_chat = 0;
|
||||
|
||||
if (Array.isArray(group.chats) && Array.isArray(chats)) {
|
||||
for (const chat of chats) {
|
||||
if (group.chats.includes(path.parse(chat).name)) {
|
||||
const chatStat = fs.statSync(path.join(request.user.directories.groupChats, chat));
|
||||
chat_size += chatStat.size;
|
||||
date_last_chat = Math.max(date_last_chat, chatStat.mtimeMs);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
group['date_last_chat'] = date_last_chat;
|
||||
group['chat_size'] = chat_size;
|
||||
groups.push(group);
|
||||
}
|
||||
catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
});
|
||||
|
||||
return response.send(groups);
|
||||
});
|
||||
|
||||
router.post('/create', (request, response) => {
|
||||
if (!request.body) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
warnOnGroupMetadata(request.body);
|
||||
const id = String(Date.now());
|
||||
const groupMetadata = {
|
||||
id: id,
|
||||
name: request.body.name ?? 'New Group',
|
||||
members: request.body.members ?? [],
|
||||
avatar_url: request.body.avatar_url,
|
||||
allow_self_responses: !!request.body.allow_self_responses,
|
||||
activation_strategy: request.body.activation_strategy ?? 0,
|
||||
generation_mode: request.body.generation_mode ?? 0,
|
||||
disabled_members: request.body.disabled_members ?? [],
|
||||
fav: request.body.fav,
|
||||
chat_id: request.body.chat_id ?? id,
|
||||
chats: request.body.chats ?? [id],
|
||||
auto_mode_delay: request.body.auto_mode_delay ?? 5,
|
||||
generation_mode_join_prefix: request.body.generation_mode_join_prefix ?? '',
|
||||
generation_mode_join_suffix: request.body.generation_mode_join_suffix ?? '',
|
||||
};
|
||||
const pathToFile = path.join(request.user.directories.groups, sanitize(`${id}.json`));
|
||||
const fileData = JSON.stringify(groupMetadata, null, 4);
|
||||
|
||||
if (!fs.existsSync(request.user.directories.groups)) {
|
||||
fs.mkdirSync(request.user.directories.groups);
|
||||
}
|
||||
|
||||
writeFileAtomicSync(pathToFile, fileData);
|
||||
return response.send(groupMetadata);
|
||||
});
|
||||
|
||||
router.post('/edit', getFileNameValidationFunction('id'), (request, response) => {
|
||||
if (!request.body || !request.body.id) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
warnOnGroupMetadata(request.body);
|
||||
const id = request.body.id;
|
||||
const pathToFile = path.join(request.user.directories.groups, sanitize(`${id}.json`));
|
||||
const fileData = JSON.stringify(request.body, null, 4);
|
||||
|
||||
writeFileAtomicSync(pathToFile, fileData);
|
||||
return response.send({ ok: true });
|
||||
});
|
||||
|
||||
router.post('/delete', getFileNameValidationFunction('id'), async (request, response) => {
|
||||
if (!request.body || !request.body.id) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const id = request.body.id;
|
||||
const pathToGroup = path.join(request.user.directories.groups, sanitize(`${id}.json`));
|
||||
|
||||
try {
|
||||
// Delete group chats
|
||||
const group = JSON.parse(fs.readFileSync(pathToGroup, 'utf8'));
|
||||
|
||||
if (group && Array.isArray(group.chats)) {
|
||||
for (const chat of group.chats) {
|
||||
console.info('Deleting group chat', chat);
|
||||
const pathToFile = path.join(request.user.directories.groupChats, sanitize(`${chat}.jsonl`));
|
||||
|
||||
if (fs.existsSync(pathToFile)) {
|
||||
fs.unlinkSync(pathToFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Could not delete group chats. Clean them up manually.', error);
|
||||
}
|
||||
|
||||
if (fs.existsSync(pathToGroup)) {
|
||||
fs.unlinkSync(pathToGroup);
|
||||
}
|
||||
|
||||
return response.send({ ok: true });
|
||||
});
|
||||
411
web-app/src/endpoints/horde.js
Normal file
411
web-app/src/endpoints/horde.js
Normal file
@@ -0,0 +1,411 @@
|
||||
import fetch from 'node-fetch';
|
||||
import express from 'express';
|
||||
import { AIHorde, ModelGenerationInputStableSamplers, ModelInterrogationFormTypes, HordeAsyncRequestStates } from '@zeldafan0225/ai_horde';
|
||||
import { getVersion, delay, Cache } from '../util.js';
|
||||
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||
|
||||
const ANONYMOUS_KEY = '0000000000';
|
||||
const HORDE_TEXT_MODEL_METADATA_URL = 'https://raw.githubusercontent.com/db0/AI-Horde-text-model-reference/main/db.json';
|
||||
const cache = new Cache(60 * 1000);
|
||||
export const router = express.Router();
|
||||
|
||||
/**
|
||||
* Returns the AIHorde client agent.
|
||||
* @returns {Promise<string>} AIHorde client agent
|
||||
*/
|
||||
async function getClientAgent() {
|
||||
const version = await getVersion();
|
||||
return version?.agent || 'SillyTavern:UNKNOWN:Cohee#1207';
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the AIHorde client.
|
||||
* @returns {Promise<AIHorde>} AIHorde client
|
||||
*/
|
||||
async function getHordeClient() {
|
||||
return new AIHorde({
|
||||
client_agent: await getClientAgent(),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes dirty no-no words from the prompt.
|
||||
* Taken verbatim from KAI Lite's implementation (AGPLv3).
|
||||
* https://github.com/LostRuins/lite.koboldai.net/blob/main/index.html#L7786C2-L7811C1
|
||||
* @param {string} prompt Prompt to sanitize
|
||||
* @returns {string} Sanitized prompt
|
||||
*/
|
||||
function sanitizeHordeImagePrompt(prompt) {
|
||||
if (!prompt) {
|
||||
return '';
|
||||
}
|
||||
|
||||
//to avoid flagging from some image models, always swap these words
|
||||
prompt = prompt.replace(/\b(girl)\b/gmi, 'woman');
|
||||
prompt = prompt.replace(/\b(boy)\b/gmi, 'man');
|
||||
prompt = prompt.replace(/\b(girls)\b/gmi, 'women');
|
||||
prompt = prompt.replace(/\b(boys)\b/gmi, 'men');
|
||||
//always remove these high risk words from prompt, as they add little value to image gen while increasing the risk the prompt gets flagged
|
||||
prompt = prompt.replace(/\b(under.age|under.aged|underage|underaged|loli|pedo|pedophile|(\w+).year.old|(\w+).years.old|minor|prepubescent|minors|shota)\b/gmi, '');
|
||||
//replace risky subject nouns with person
|
||||
prompt = prompt.replace(/\b(youngster|infant|baby|toddler|child|teen|kid|kiddie|kiddo|teenager|student|preteen|pre.teen)\b/gmi, 'person');
|
||||
//remove risky adjectives and related words
|
||||
prompt = prompt.replace(/\b(young|younger|youthful|youth|small|smaller|smallest|girly|boyish|lil|tiny|teenaged|lit[tl]le|school.aged|school|highschool|kindergarten|teens|children|kids)\b/gmi, '');
|
||||
|
||||
return prompt;
|
||||
}
|
||||
|
||||
router.post('/text-workers', async (request, response) => {
|
||||
try {
|
||||
const cachedWorkers = cache.get('workers');
|
||||
|
||||
if (cachedWorkers && !request.body.force) {
|
||||
return response.send(cachedWorkers);
|
||||
}
|
||||
|
||||
const agent = await getClientAgent();
|
||||
const fetchResult = await fetch('https://aihorde.net/api/v2/workers?type=text', {
|
||||
headers: {
|
||||
'Client-Agent': agent,
|
||||
},
|
||||
});
|
||||
const data = await fetchResult.json();
|
||||
cache.set('workers', data);
|
||||
return response.send(data);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
async function getHordeTextModelMetadata() {
|
||||
const response = await fetch(HORDE_TEXT_MODEL_METADATA_URL);
|
||||
return await response.json();
|
||||
}
|
||||
|
||||
async function mergeModelsAndMetadata(models, metadata) {
|
||||
return models.map(model => {
|
||||
const metadataModel = metadata[model.name];
|
||||
if (!metadataModel) {
|
||||
return { ...model, is_whitelisted: false };
|
||||
}
|
||||
return { ...model, ...metadataModel, is_whitelisted: true };
|
||||
});
|
||||
}
|
||||
|
||||
router.post('/text-models', async (request, response) => {
|
||||
try {
|
||||
const cachedModels = cache.get('models');
|
||||
if (cachedModels && !request.body.force) {
|
||||
return response.send(cachedModels);
|
||||
}
|
||||
|
||||
const agent = await getClientAgent();
|
||||
const fetchResult = await fetch('https://aihorde.net/api/v2/status/models?type=text', {
|
||||
headers: {
|
||||
'Client-Agent': agent,
|
||||
},
|
||||
});
|
||||
|
||||
let data = await fetchResult.json();
|
||||
|
||||
// attempt to fetch and merge models metadata
|
||||
try {
|
||||
const metadata = await getHordeTextModelMetadata();
|
||||
data = await mergeModelsAndMetadata(data, metadata);
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Failed to fetch metadata:', error);
|
||||
}
|
||||
|
||||
cache.set('models', data);
|
||||
return response.send(data);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/status', async (_, response) => {
|
||||
try {
|
||||
const agent = await getClientAgent();
|
||||
const fetchResult = await fetch('https://aihorde.net/api/v2/status/heartbeat', {
|
||||
headers: {
|
||||
'Client-Agent': agent,
|
||||
},
|
||||
});
|
||||
|
||||
return response.send({ ok: fetchResult.ok });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/cancel-task', async (request, response) => {
|
||||
try {
|
||||
const taskId = request.body.taskId;
|
||||
const agent = await getClientAgent();
|
||||
const fetchResult = await fetch(`https://aihorde.net/api/v2/generate/text/status/${taskId}`, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
'Client-Agent': agent,
|
||||
},
|
||||
});
|
||||
|
||||
const data = await fetchResult.json();
|
||||
console.info(`Cancelled Horde task ${taskId}`);
|
||||
return response.send(data);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/task-status', async (request, response) => {
|
||||
try {
|
||||
const taskId = request.body.taskId;
|
||||
const agent = await getClientAgent();
|
||||
const fetchResult = await fetch(`https://aihorde.net/api/v2/generate/text/status/${taskId}`, {
|
||||
headers: {
|
||||
'Client-Agent': agent,
|
||||
},
|
||||
});
|
||||
|
||||
const data = await fetchResult.json();
|
||||
console.info(`Horde task ${taskId} status:`, data);
|
||||
return response.send(data);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/generate-text', async (request, response) => {
|
||||
const apiKey = readSecret(request.user.directories, SECRET_KEYS.HORDE) || ANONYMOUS_KEY;
|
||||
const url = 'https://aihorde.net/api/v2/generate/text/async';
|
||||
const agent = await getClientAgent();
|
||||
|
||||
console.debug(request.body);
|
||||
try {
|
||||
const result = await fetch(url, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(request.body),
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'apikey': apiKey,
|
||||
'Client-Agent': agent,
|
||||
},
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const message = await result.text();
|
||||
console.error('Horde returned an error:', message);
|
||||
return response.send({ error: { message } });
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
return response.send(data);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.send({ error: true });
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/sd-samplers', async (_, response) => {
|
||||
try {
|
||||
const samplers = Object.values(ModelGenerationInputStableSamplers);
|
||||
response.send(samplers);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/sd-models', async (_, response) => {
|
||||
try {
|
||||
const ai_horde = await getHordeClient();
|
||||
const models = await ai_horde.getModels();
|
||||
response.send(models);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/caption-image', async (request, response) => {
|
||||
try {
|
||||
const api_key_horde = readSecret(request.user.directories, SECRET_KEYS.HORDE) || ANONYMOUS_KEY;
|
||||
const ai_horde = await getHordeClient();
|
||||
const result = await ai_horde.postAsyncInterrogate({
|
||||
source_image: request.body.image,
|
||||
forms: [{ name: ModelInterrogationFormTypes.caption }],
|
||||
}, { token: api_key_horde });
|
||||
|
||||
if (!result.id) {
|
||||
console.error('Image interrogation request is not satisfyable:', result.message || 'unknown error');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const MAX_ATTEMPTS = 200;
|
||||
const CHECK_INTERVAL = 3000;
|
||||
|
||||
for (let attempt = 0; attempt < MAX_ATTEMPTS; attempt++) {
|
||||
await delay(CHECK_INTERVAL);
|
||||
const status = await ai_horde.getInterrogationStatus(result.id);
|
||||
console.info(status);
|
||||
|
||||
if (status.state === HordeAsyncRequestStates.done) {
|
||||
|
||||
if (status.forms === undefined) {
|
||||
console.error('Image interrogation request failed: no forms found.');
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
console.debug('Image interrogation result:', status);
|
||||
const caption = status?.forms[0]?.result?.caption || '';
|
||||
|
||||
if (!caption) {
|
||||
console.error('Image interrogation request failed: no caption found.');
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
return response.send({ caption });
|
||||
}
|
||||
|
||||
if (status.state === HordeAsyncRequestStates.faulted || status.state === HordeAsyncRequestStates.cancelled) {
|
||||
console.error('Image interrogation request is not successful.');
|
||||
return response.sendStatus(503);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/user-info', async (request, response) => {
|
||||
const api_key_horde = readSecret(request.user.directories, SECRET_KEYS.HORDE);
|
||||
|
||||
if (!api_key_horde) {
|
||||
return response.send({ anonymous: true });
|
||||
}
|
||||
|
||||
try {
|
||||
const ai_horde = await getHordeClient();
|
||||
const sharedKey = await (async () => {
|
||||
try {
|
||||
return await ai_horde.getSharedKey(api_key_horde);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
})();
|
||||
const user = await ai_horde.findUser({ token: api_key_horde });
|
||||
return response.send({ user, sharedKey, anonymous: false });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/generate-image', async (request, response) => {
|
||||
if (!request.body.prompt) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const MAX_ATTEMPTS = 200;
|
||||
const CHECK_INTERVAL = 3000;
|
||||
const PROMPT_THRESHOLD = 5000;
|
||||
|
||||
try {
|
||||
const maxLength = PROMPT_THRESHOLD - String(request.body.negative_prompt).length - 5;
|
||||
if (String(request.body.prompt).length > maxLength) {
|
||||
console.warn('Stable Horde prompt is too long, truncating...');
|
||||
request.body.prompt = String(request.body.prompt).substring(0, maxLength);
|
||||
}
|
||||
|
||||
// Sanitize prompt if requested
|
||||
if (request.body.sanitize) {
|
||||
const sanitized = sanitizeHordeImagePrompt(request.body.prompt);
|
||||
|
||||
if (request.body.prompt !== sanitized) {
|
||||
console.info('Stable Horde prompt was sanitized.');
|
||||
}
|
||||
|
||||
request.body.prompt = sanitized;
|
||||
}
|
||||
|
||||
const api_key_horde = readSecret(request.user.directories, SECRET_KEYS.HORDE) || ANONYMOUS_KEY;
|
||||
console.debug('Stable Horde request:', request.body);
|
||||
|
||||
const ai_horde = await getHordeClient();
|
||||
// noinspection JSCheckFunctionSignatures -- see @ts-ignore - use_gfpgan
|
||||
const generation = await ai_horde.postAsyncImageGenerate(
|
||||
{
|
||||
prompt: `${request.body.prompt} ### ${request.body.negative_prompt}`,
|
||||
params:
|
||||
{
|
||||
sampler_name: request.body.sampler,
|
||||
hires_fix: request.body.enable_hr,
|
||||
// @ts-ignore - use_gfpgan param is not in the type definition, need to update to new ai_horde @ https://github.com/ZeldaFan0225/ai_horde/blob/main/index.ts
|
||||
use_gfpgan: request.body.restore_faces,
|
||||
cfg_scale: request.body.scale,
|
||||
steps: request.body.steps,
|
||||
width: request.body.width,
|
||||
height: request.body.height,
|
||||
karras: Boolean(request.body.karras),
|
||||
clip_skip: request.body.clip_skip,
|
||||
seed: request.body.seed >= 0 ? String(request.body.seed) : undefined,
|
||||
n: 1,
|
||||
},
|
||||
r2: false,
|
||||
nsfw: request.body.nfsw,
|
||||
models: [request.body.model],
|
||||
},
|
||||
{ token: api_key_horde });
|
||||
|
||||
if (!generation.id) {
|
||||
console.warn('Image generation request is not satisfyable:', generation.message || 'unknown error');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.info('Horde image generation request:', generation);
|
||||
|
||||
const controller = new AbortController();
|
||||
request.socket.removeAllListeners('close');
|
||||
request.socket.on('close', function () {
|
||||
console.warn('Horde image generation request aborted.');
|
||||
controller.abort();
|
||||
if (generation.id) ai_horde.deleteImageGenerationRequest(generation.id);
|
||||
});
|
||||
|
||||
for (let attempt = 0; attempt < MAX_ATTEMPTS; attempt++) {
|
||||
controller.signal.throwIfAborted();
|
||||
await delay(CHECK_INTERVAL);
|
||||
const check = await ai_horde.getImageGenerationCheck(generation.id);
|
||||
console.info(check);
|
||||
|
||||
if (check.done) {
|
||||
const result = await ai_horde.getImageGenerationStatus(generation.id);
|
||||
if (result.generations === undefined) return response.sendStatus(500);
|
||||
return response.send(result.generations[0].img);
|
||||
}
|
||||
|
||||
/*
|
||||
if (!check.is_possible) {
|
||||
return response.sendStatus(503);
|
||||
}
|
||||
*/
|
||||
|
||||
if (check.faulted) {
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
}
|
||||
|
||||
return response.sendStatus(504);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
155
web-app/src/endpoints/images.js
Normal file
155
web-app/src/endpoints/images.js
Normal file
@@ -0,0 +1,155 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { Buffer } from 'node:buffer';
|
||||
|
||||
import express from 'express';
|
||||
import sanitize from 'sanitize-filename';
|
||||
|
||||
import { clientRelativePath, removeFileExtension, getImages, isPathUnderParent } from '../util.js';
|
||||
import { MEDIA_EXTENSIONS, MEDIA_REQUEST_TYPE } from '../constants.js';
|
||||
|
||||
/**
|
||||
* Ensure the directory for the provided file path exists.
|
||||
* If not, it will recursively create the directory.
|
||||
*
|
||||
* @param {string} filePath - The full path of the file for which the directory should be ensured.
|
||||
*/
|
||||
function ensureDirectoryExistence(filePath) {
|
||||
const dirname = path.dirname(filePath);
|
||||
if (fs.existsSync(dirname)) {
|
||||
return true;
|
||||
}
|
||||
ensureDirectoryExistence(dirname);
|
||||
fs.mkdirSync(dirname);
|
||||
}
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
/**
|
||||
* Endpoint to handle image uploads.
|
||||
* The image should be provided in the request body in base64 format.
|
||||
* Optionally, a character name can be provided to save the image in a sub-folder.
|
||||
*
|
||||
* @route POST /api/images/upload
|
||||
* @param {Object} request.body - The request payload.
|
||||
* @param {string} request.body.image - The base64 encoded image data.
|
||||
* @param {string} [request.body.ch_name] - Optional character name to determine the sub-directory.
|
||||
* @returns {Object} response - The response object containing the path where the image was saved.
|
||||
*/
|
||||
router.post('/upload', async (request, response) => {
|
||||
try {
|
||||
if (!request.body) {
|
||||
return response.status(400).send({ error: 'No data provided' });
|
||||
}
|
||||
|
||||
const { image, format } = request.body;
|
||||
|
||||
if (!image) {
|
||||
return response.status(400).send({ error: 'No image data provided' });
|
||||
}
|
||||
|
||||
const validFormat = MEDIA_EXTENSIONS.includes(format);
|
||||
if (!validFormat) {
|
||||
return response.status(400).send({ error: 'Invalid image format' });
|
||||
}
|
||||
|
||||
// Constructing filename and path
|
||||
let filename;
|
||||
if (request.body.filename) {
|
||||
filename = `${removeFileExtension(request.body.filename)}.${format}`;
|
||||
} else {
|
||||
filename = `${Date.now()}.${format}`;
|
||||
}
|
||||
|
||||
// if character is defined, save to a sub folder for that character
|
||||
let pathToNewFile = path.join(request.user.directories.userImages, sanitize(filename));
|
||||
if (request.body.ch_name) {
|
||||
pathToNewFile = path.join(request.user.directories.userImages, sanitize(request.body.ch_name), sanitize(filename));
|
||||
}
|
||||
|
||||
ensureDirectoryExistence(pathToNewFile);
|
||||
const imageBuffer = Buffer.from(image, 'base64');
|
||||
await fs.promises.writeFile(pathToNewFile, new Uint8Array(imageBuffer));
|
||||
response.send({ path: clientRelativePath(request.user.directories.root, pathToNewFile) });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.status(500).send({ error: 'Failed to save the image' });
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/list/:folder?', (request, response) => {
|
||||
try {
|
||||
if (request.params.folder) {
|
||||
if (request.body.folder) {
|
||||
return response.status(400).send({ error: 'Folder specified in both URL and body' });
|
||||
}
|
||||
|
||||
console.warn('Deprecated: Use POST /api/images/list with folder in request body');
|
||||
request.body.folder = request.params.folder;
|
||||
}
|
||||
|
||||
if (!request.body.folder) {
|
||||
return response.status(400).send({ error: 'No folder specified' });
|
||||
}
|
||||
|
||||
const directoryPath = path.join(request.user.directories.userImages, sanitize(request.body.folder));
|
||||
const type = Number(request.body.type ?? MEDIA_REQUEST_TYPE.IMAGE);
|
||||
const sort = request.body.sortField || 'date';
|
||||
const order = request.body.sortOrder || 'asc';
|
||||
|
||||
if (!fs.existsSync(directoryPath)) {
|
||||
fs.mkdirSync(directoryPath, { recursive: true });
|
||||
}
|
||||
|
||||
const images = getImages(directoryPath, sort, type);
|
||||
if (order === 'desc') {
|
||||
images.reverse();
|
||||
}
|
||||
return response.send(images);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.status(500).send({ error: 'Unable to retrieve files' });
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/folders', (request, response) => {
|
||||
try {
|
||||
const directoryPath = request.user.directories.userImages;
|
||||
if (!fs.existsSync(directoryPath)) {
|
||||
fs.mkdirSync(directoryPath, { recursive: true });
|
||||
}
|
||||
|
||||
const folders = fs.readdirSync(directoryPath, { withFileTypes: true })
|
||||
.filter(dirent => dirent.isDirectory())
|
||||
.map(dirent => dirent.name);
|
||||
|
||||
return response.send(folders);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.status(500).send({ error: 'Unable to retrieve folders' });
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/delete', async (request, response) => {
|
||||
try {
|
||||
if (!request.body.path) {
|
||||
return response.status(400).send('No path specified');
|
||||
}
|
||||
|
||||
const pathToDelete = path.join(request.user.directories.root, request.body.path);
|
||||
if (!isPathUnderParent(request.user.directories.userImages, pathToDelete)) {
|
||||
return response.status(400).send('Invalid path');
|
||||
}
|
||||
|
||||
if (!fs.existsSync(pathToDelete)) {
|
||||
return response.status(404).send('File not found');
|
||||
}
|
||||
|
||||
fs.unlinkSync(pathToDelete);
|
||||
console.info(`Deleted image: ${request.body.path} from ${request.user.profile.handle}`);
|
||||
return response.sendStatus(200);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
230
web-app/src/endpoints/minimax.js
Normal file
230
web-app/src/endpoints/minimax.js
Normal file
@@ -0,0 +1,230 @@
|
||||
import express from 'express';
|
||||
import fetch from 'node-fetch';
|
||||
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
// Audio format MIME type mapping
|
||||
const getAudioMimeType = (format) => {
|
||||
const mimeTypes = {
|
||||
'mp3': 'audio/mpeg',
|
||||
'wav': 'audio/wav',
|
||||
'pcm': 'audio/pcm',
|
||||
'flac': 'audio/flac',
|
||||
'aac': 'audio/aac',
|
||||
};
|
||||
return mimeTypes[format] || 'audio/mpeg';
|
||||
};
|
||||
|
||||
router.post('/generate-voice', async (request, response) => {
|
||||
try {
|
||||
const {
|
||||
text,
|
||||
voiceId,
|
||||
apiHost = 'https://api.minimax.io',
|
||||
model = 'speech-02-hd',
|
||||
speed = 1.0,
|
||||
volume = 1.0,
|
||||
pitch = 1.0,
|
||||
audioSampleRate = 32000,
|
||||
bitrate = 128000,
|
||||
format = 'mp3',
|
||||
language,
|
||||
} = request.body;
|
||||
|
||||
const apiKey = readSecret(request.user.directories, SECRET_KEYS.MINIMAX);
|
||||
const groupId = readSecret(request.user.directories, SECRET_KEYS.MINIMAX_GROUP_ID);
|
||||
|
||||
// Validate required parameters
|
||||
if (!text || !voiceId || !apiKey || !groupId) {
|
||||
console.warn('MiniMax TTS: Missing required parameters');
|
||||
return response.status(400).json({ error: 'Missing required parameters: text, voiceId, apiKey, and groupId are required' });
|
||||
}
|
||||
|
||||
const requestBody = {
|
||||
model: model,
|
||||
text: text,
|
||||
stream: false,
|
||||
voice_setting: {
|
||||
voice_id: voiceId,
|
||||
speed: Number(speed),
|
||||
vol: Number(volume),
|
||||
pitch: Number(pitch),
|
||||
},
|
||||
audio_setting: {
|
||||
sample_rate: Number(audioSampleRate),
|
||||
bitrate: Number(bitrate),
|
||||
format: format,
|
||||
channel: 1,
|
||||
},
|
||||
};
|
||||
|
||||
// Add language parameter if provided
|
||||
if (language) {
|
||||
requestBody.lang = language;
|
||||
}
|
||||
|
||||
const apiUrl = `${apiHost}/v1/t2a_v2?GroupId=${groupId}`;
|
||||
|
||||
console.debug('MiniMax TTS Request:', {
|
||||
url: apiUrl,
|
||||
body: { ...requestBody, voice_setting: { ...requestBody.voice_setting, voice_id: '[REDACTED]' } },
|
||||
});
|
||||
|
||||
const apiResponse = await fetch(apiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
'MM-API-Source': 'SillyTavern-TTS',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
});
|
||||
|
||||
if (!apiResponse.ok) {
|
||||
let errorMessage = `HTTP ${apiResponse.status}`;
|
||||
|
||||
try {
|
||||
// Try to parse JSON error response
|
||||
/** @type {any} */
|
||||
const errorData = await apiResponse.json();
|
||||
console.error('MiniMax TTS API error (JSON):', errorData);
|
||||
|
||||
// Check for MiniMax specific error format
|
||||
const baseResp = errorData?.base_resp;
|
||||
if (baseResp && baseResp.status_code !== 0) {
|
||||
if (baseResp.status_code === 1004) {
|
||||
errorMessage = 'Authentication failed - Please check your API key and API host';
|
||||
} else {
|
||||
errorMessage = `API Error: ${baseResp.status_msg}`;
|
||||
}
|
||||
} else {
|
||||
errorMessage = errorData.error?.message || errorData.message || errorData.detail || `HTTP ${apiResponse.status}`;
|
||||
}
|
||||
} catch (jsonError) {
|
||||
// If not JSON, try to read text
|
||||
try {
|
||||
const errorText = await apiResponse.text();
|
||||
console.error('MiniMax TTS API error (Text):', errorText);
|
||||
if (errorText && errorText.length > 500) {
|
||||
errorMessage = `HTTP ${apiResponse.status}: Response too large (${errorText.length} characters)`;
|
||||
} else {
|
||||
errorMessage = errorText || `HTTP ${apiResponse.status}`;
|
||||
}
|
||||
} catch (textError) {
|
||||
console.error('MiniMax TTS: Failed to read error response:', textError);
|
||||
errorMessage = `HTTP ${apiResponse.status}: Unable to read error details`;
|
||||
}
|
||||
}
|
||||
|
||||
console.error('MiniMax TTS API request failed:', errorMessage);
|
||||
return response.status(500).json({ error: errorMessage });
|
||||
}
|
||||
|
||||
// Parse the response
|
||||
/** @type {any} */
|
||||
let responseData;
|
||||
try {
|
||||
responseData = await apiResponse.json();
|
||||
console.debug('MiniMax TTS Response received');
|
||||
} catch (jsonError) {
|
||||
console.error('MiniMax TTS: Failed to parse response as JSON:', jsonError);
|
||||
return response.status(500).json({ error: 'Invalid response format from MiniMax API' });
|
||||
}
|
||||
|
||||
// Check for API error codes in response data
|
||||
const baseResp = responseData?.base_resp;
|
||||
if (baseResp && baseResp.status_code !== 0) {
|
||||
let errorMessage;
|
||||
if (baseResp.status_code === 1004) {
|
||||
errorMessage = 'Authentication failed - Please check your API key and API host';
|
||||
} else {
|
||||
errorMessage = `API Error: ${baseResp.status_msg}`;
|
||||
}
|
||||
console.error('MiniMax TTS API error:', baseResp);
|
||||
return response.status(500).json({ error: errorMessage });
|
||||
}
|
||||
|
||||
// Process the audio data
|
||||
if (responseData.data && responseData.data.audio) {
|
||||
// Process hex-encoded audio data
|
||||
const hexAudio = responseData.data.audio;
|
||||
|
||||
if (!hexAudio || typeof hexAudio !== 'string') {
|
||||
console.error('MiniMax TTS: Invalid audio data format');
|
||||
return response.status(500).json({ error: 'Invalid audio data format' });
|
||||
}
|
||||
|
||||
// Remove possible prefix and spaces
|
||||
const cleanHex = hexAudio.replace(/^0x/, '').replace(/\s/g, '');
|
||||
|
||||
// Validate hex string format
|
||||
if (!/^[0-9a-fA-F]*$/.test(cleanHex)) {
|
||||
console.error('MiniMax TTS: Invalid hex string format');
|
||||
return response.status(500).json({ error: 'Invalid audio data format' });
|
||||
}
|
||||
|
||||
// Ensure hex string length is even
|
||||
const paddedHex = cleanHex.length % 2 === 0 ? cleanHex : '0' + cleanHex;
|
||||
|
||||
try {
|
||||
// Convert hex string to byte array
|
||||
const hexMatches = paddedHex.match(/.{1,2}/g);
|
||||
if (!hexMatches) {
|
||||
console.error('MiniMax TTS: Failed to parse hex string');
|
||||
return response.status(500).json({ error: 'Invalid hex string format' });
|
||||
}
|
||||
const audioBytes = new Uint8Array(hexMatches.map(byte => parseInt(byte, 16)));
|
||||
|
||||
if (audioBytes.length === 0) {
|
||||
console.error('MiniMax TTS: Audio conversion resulted in empty array');
|
||||
return response.status(500).json({ error: 'Audio data conversion failed' });
|
||||
}
|
||||
|
||||
console.debug(`MiniMax TTS: Converted ${paddedHex.length} hex characters to ${audioBytes.length} bytes`);
|
||||
|
||||
// Set appropriate headers and send audio data
|
||||
const mimeType = getAudioMimeType(format);
|
||||
response.setHeader('Content-Type', mimeType);
|
||||
response.setHeader('Content-Length', audioBytes.length);
|
||||
|
||||
return response.send(Buffer.from(audioBytes));
|
||||
|
||||
} catch (conversionError) {
|
||||
console.error('MiniMax TTS: Audio conversion error:', conversionError);
|
||||
return response.status(500).json({ error: `Audio data conversion failed: ${conversionError.message}` });
|
||||
}
|
||||
} else if (responseData.data && responseData.data.url) {
|
||||
// Handle URL-based audio response
|
||||
console.debug('MiniMax TTS: Received audio URL:', responseData.data.url);
|
||||
|
||||
try {
|
||||
const audioResponse = await fetch(responseData.data.url);
|
||||
if (!audioResponse.ok) {
|
||||
console.error('MiniMax TTS: Failed to fetch audio from URL:', audioResponse.status);
|
||||
return response.status(500).json({ error: `Failed to fetch audio from URL: ${audioResponse.status}` });
|
||||
}
|
||||
|
||||
const audioBuffer = await audioResponse.arrayBuffer();
|
||||
const mimeType = getAudioMimeType(format);
|
||||
|
||||
response.setHeader('Content-Type', mimeType);
|
||||
response.setHeader('Content-Length', audioBuffer.byteLength);
|
||||
|
||||
return response.send(Buffer.from(audioBuffer));
|
||||
} catch (urlError) {
|
||||
console.error('MiniMax TTS: Error fetching audio from URL:', urlError);
|
||||
return response.status(500).json({ error: `Failed to fetch audio: ${urlError.message}` });
|
||||
}
|
||||
} else {
|
||||
// Handle error response
|
||||
const errorMessage = responseData.base_resp?.status_msg || responseData.error?.message || 'Unknown error';
|
||||
console.error('MiniMax TTS: No valid audio data in response:', responseData);
|
||||
return response.status(500).json({ error: `API Error: ${errorMessage}` });
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('MiniMax TTS generation failed:', error);
|
||||
return response.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
});
|
||||
17
web-app/src/endpoints/moving-ui.js
Normal file
17
web-app/src/endpoints/moving-ui.js
Normal file
@@ -0,0 +1,17 @@
|
||||
import path from 'node:path';
|
||||
import express from 'express';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/save', (request, response) => {
|
||||
if (!request.body || !request.body.name) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const filename = path.join(request.user.directories.movingUI, sanitize(`${request.body.name}.json`));
|
||||
writeFileAtomicSync(filename, JSON.stringify(request.body, null, 4), 'utf8');
|
||||
|
||||
return response.sendStatus(200);
|
||||
});
|
||||
484
web-app/src/endpoints/novelai.js
Normal file
484
web-app/src/endpoints/novelai.js
Normal file
@@ -0,0 +1,484 @@
|
||||
import util from 'node:util';
|
||||
import { Buffer } from 'node:buffer';
|
||||
|
||||
import fetch from 'node-fetch';
|
||||
import express from 'express';
|
||||
|
||||
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||
import { readAllChunks, extractFileFromZipBuffer, forwardFetchResponse } from '../util.js';
|
||||
|
||||
const API_NOVELAI = 'https://api.novelai.net';
|
||||
const TEXT_NOVELAI = 'https://text.novelai.net';
|
||||
const IMAGE_NOVELAI = 'https://image.novelai.net';
|
||||
|
||||
// Constants for skip_cfg_above_sigma (Variety+) calculation
|
||||
const REFERENCE_PIXEL_COUNT = 1011712; // 832 * 1216 reference image size
|
||||
const SIGMA_MAGIC_NUMBER = 19; // Base sigma multiplier for V3 and V4 models
|
||||
const SIGMA_MAGIC_NUMBER_V4_5 = 58; // Base sigma multiplier for V4.5 models
|
||||
|
||||
// Ban bracket generation, plus defaults
|
||||
const badWordsList = [
|
||||
[3], [49356], [1431], [31715], [34387], [20765], [30702], [10691], [49333], [1266],
|
||||
[19438], [43145], [26523], [41471], [2936], [85, 85], [49332], [7286], [1115], [24],
|
||||
];
|
||||
|
||||
const eratoBadWordsList = [
|
||||
[16067], [933, 11144], [25106, 11144], [58, 106901, 16073, 33710, 25, 109933],
|
||||
[933, 58, 11144], [128030], [58, 30591, 33503, 17663, 100204, 25, 11144],
|
||||
];
|
||||
|
||||
const hypeBotBadWordsList = [
|
||||
[58], [60], [90], [92], [685], [1391], [1782], [2361], [3693], [4083], [4357], [4895],
|
||||
[5512], [5974], [7131], [8183], [8351], [8762], [8964], [8973], [9063], [11208],
|
||||
[11709], [11907], [11919], [12878], [12962], [13018], [13412], [14631], [14692],
|
||||
[14980], [15090], [15437], [16151], [16410], [16589], [17241], [17414], [17635],
|
||||
[17816], [17912], [18083], [18161], [18477], [19629], [19779], [19953], [20520],
|
||||
[20598], [20662], [20740], [21476], [21737], [22133], [22241], [22345], [22935],
|
||||
[23330], [23785], [23834], [23884], [25295], [25597], [25719], [25787], [25915],
|
||||
[26076], [26358], [26398], [26894], [26933], [27007], [27422], [28013], [29164],
|
||||
[29225], [29342], [29565], [29795], [30072], [30109], [30138], [30866], [31161],
|
||||
[31478], [32092], [32239], [32509], [33116], [33250], [33761], [34171], [34758],
|
||||
[34949], [35944], [36338], [36463], [36563], [36786], [36796], [36937], [37250],
|
||||
[37913], [37981], [38165], [38362], [38381], [38430], [38892], [39850], [39893],
|
||||
[41832], [41888], [42535], [42669], [42785], [42924], [43839], [44438], [44587],
|
||||
[44926], [45144], [45297], [46110], [46570], [46581], [46956], [47175], [47182],
|
||||
[47527], [47715], [48600], [48683], [48688], [48874], [48999], [49074], [49082],
|
||||
[49146], [49946], [10221], [4841], [1427], [2602, 834], [29343], [37405], [35780], [2602], [50256],
|
||||
];
|
||||
|
||||
// Used for phrase repetition penalty
|
||||
const repPenaltyAllowList = [
|
||||
[49256, 49264, 49231, 49230, 49287, 85, 49255, 49399, 49262, 336, 333, 432, 363, 468, 492, 745, 401, 426, 623, 794,
|
||||
1096, 2919, 2072, 7379, 1259, 2110, 620, 526, 487, 16562, 603, 805, 761, 2681, 942, 8917, 653, 3513, 506, 5301,
|
||||
562, 5010, 614, 10942, 539, 2976, 462, 5189, 567, 2032, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 588,
|
||||
803, 1040, 49209, 4, 5, 6, 7, 8, 9, 10, 11, 12],
|
||||
];
|
||||
|
||||
const eratoRepPenWhitelist = [
|
||||
6, 1, 11, 13, 25, 198, 12, 9, 8, 279, 264, 459, 323, 477, 539, 912, 374, 574, 1051, 1550, 1587, 4536, 5828, 15058,
|
||||
3287, 3250, 1461, 1077, 813, 11074, 872, 1202, 1436, 7846, 1288, 13434, 1053, 8434, 617, 9167, 1047, 19117, 706,
|
||||
12775, 649, 4250, 527, 7784, 690, 2834, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 1210, 1359, 608, 220, 596, 956,
|
||||
3077, 44886, 4265, 3358, 2351, 2846, 311, 389, 315, 304, 520, 505, 430,
|
||||
];
|
||||
|
||||
// Ban the dinkus and asterism
|
||||
const logitBiasExp = [
|
||||
{ 'sequence': [23], 'bias': -0.08, 'ensure_sequence_finish': false, 'generate_once': false },
|
||||
{ 'sequence': [21], 'bias': -0.08, 'ensure_sequence_finish': false, 'generate_once': false },
|
||||
];
|
||||
|
||||
const eratoLogitBiasExp = [
|
||||
{ 'sequence': [12488], 'bias': -0.08, 'ensure_sequence_finish': false, 'generate_once': false },
|
||||
{ 'sequence': [128041], 'bias': -0.08, 'ensure_sequence_finish': false, 'generate_once': false },
|
||||
];
|
||||
|
||||
function getBadWordsList(model) {
|
||||
let list = [];
|
||||
|
||||
if (model.includes('hypebot')) {
|
||||
list = hypeBotBadWordsList;
|
||||
}
|
||||
|
||||
if (model.includes('clio') || model.includes('kayra')) {
|
||||
list = badWordsList;
|
||||
}
|
||||
|
||||
if (model.includes('erato')) {
|
||||
list = eratoBadWordsList;
|
||||
}
|
||||
|
||||
// Clone the list so we don't modify the original
|
||||
return list.slice();
|
||||
}
|
||||
|
||||
function getLogitBiasList(model) {
|
||||
let list = [];
|
||||
|
||||
if (model.includes('erato')) {
|
||||
list = eratoLogitBiasExp;
|
||||
}
|
||||
|
||||
if (model.includes('clio') || model.includes('kayra')) {
|
||||
list = logitBiasExp;
|
||||
}
|
||||
|
||||
return list.slice();
|
||||
}
|
||||
|
||||
function getRepPenaltyWhitelist(model) {
|
||||
if (model.includes('clio') || model.includes('kayra')) {
|
||||
return repPenaltyAllowList.flat();
|
||||
}
|
||||
|
||||
if (model.includes('erato')) {
|
||||
return eratoRepPenWhitelist.flat();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function calculateSkipCfgAboveSigma(width, height, modelName) {
|
||||
const magicConstant = modelName?.includes('nai-diffusion-4-5')
|
||||
? SIGMA_MAGIC_NUMBER_V4_5
|
||||
: SIGMA_MAGIC_NUMBER;
|
||||
|
||||
const pixelCount = width * height;
|
||||
const ratio = pixelCount / REFERENCE_PIXEL_COUNT;
|
||||
|
||||
return Math.pow(ratio, 0.5) * magicConstant;
|
||||
}
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/status', async function (req, res) {
|
||||
if (!req.body) return res.sendStatus(400);
|
||||
const api_key_novel = readSecret(req.user.directories, SECRET_KEYS.NOVEL);
|
||||
|
||||
if (!api_key_novel) {
|
||||
console.warn('NovelAI Access Token is missing.');
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(API_NOVELAI + '/user/subscription', {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': 'Bearer ' + api_key_novel,
|
||||
},
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
const data = await response.json();
|
||||
return res.send(data);
|
||||
} else if (response.status == 401) {
|
||||
console.error('NovelAI Access Token is incorrect.');
|
||||
return res.send({ error: true });
|
||||
}
|
||||
else {
|
||||
console.warn('NovelAI returned an error:', response.statusText);
|
||||
return res.send({ error: true });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.send({ error: true });
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/generate', async function (req, res) {
|
||||
if (!req.body) return res.sendStatus(400);
|
||||
|
||||
const api_key_novel = readSecret(req.user.directories, SECRET_KEYS.NOVEL);
|
||||
|
||||
if (!api_key_novel) {
|
||||
console.warn('NovelAI Access Token is missing.');
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
const controller = new AbortController();
|
||||
req.socket.removeAllListeners('close');
|
||||
req.socket.on('close', function () {
|
||||
controller.abort();
|
||||
});
|
||||
|
||||
// Add customized bad words for Clio, Kayra, and Erato
|
||||
const badWordsList = getBadWordsList(req.body.model);
|
||||
|
||||
if (Array.isArray(badWordsList) && Array.isArray(req.body.bad_words_ids)) {
|
||||
for (const badWord of req.body.bad_words_ids) {
|
||||
if (Array.isArray(badWord) && badWord.every(x => Number.isInteger(x))) {
|
||||
badWordsList.push(badWord);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove empty arrays from bad words list
|
||||
for (const badWord of badWordsList) {
|
||||
if (badWord.length === 0) {
|
||||
badWordsList.splice(badWordsList.indexOf(badWord), 1);
|
||||
}
|
||||
}
|
||||
|
||||
// Add default biases for dinkus and asterism
|
||||
const logitBiasList = getLogitBiasList(req.body.model);
|
||||
|
||||
if (Array.isArray(logitBiasList) && Array.isArray(req.body.logit_bias_exp)) {
|
||||
logitBiasList.push(...req.body.logit_bias_exp);
|
||||
}
|
||||
|
||||
const repPenWhitelist = getRepPenaltyWhitelist(req.body.model);
|
||||
|
||||
const data = {
|
||||
'input': req.body.input,
|
||||
'model': req.body.model,
|
||||
'parameters': {
|
||||
'use_string': req.body.use_string ?? true,
|
||||
'temperature': req.body.temperature,
|
||||
'max_length': req.body.max_length,
|
||||
'min_length': req.body.min_length,
|
||||
'tail_free_sampling': req.body.tail_free_sampling,
|
||||
'repetition_penalty': req.body.repetition_penalty,
|
||||
'repetition_penalty_range': req.body.repetition_penalty_range,
|
||||
'repetition_penalty_slope': req.body.repetition_penalty_slope,
|
||||
'repetition_penalty_frequency': req.body.repetition_penalty_frequency,
|
||||
'repetition_penalty_presence': req.body.repetition_penalty_presence,
|
||||
'repetition_penalty_whitelist': repPenWhitelist,
|
||||
'top_a': req.body.top_a,
|
||||
'top_p': req.body.top_p,
|
||||
'top_k': req.body.top_k,
|
||||
'typical_p': req.body.typical_p,
|
||||
'mirostat_lr': req.body.mirostat_lr,
|
||||
'mirostat_tau': req.body.mirostat_tau,
|
||||
'phrase_rep_pen': req.body.phrase_rep_pen,
|
||||
'stop_sequences': req.body.stop_sequences,
|
||||
'bad_words_ids': badWordsList.length ? badWordsList : null,
|
||||
'logit_bias_exp': logitBiasList,
|
||||
'generate_until_sentence': req.body.generate_until_sentence,
|
||||
'use_cache': req.body.use_cache,
|
||||
'return_full_text': req.body.return_full_text,
|
||||
'prefix': req.body.prefix,
|
||||
'order': req.body.order,
|
||||
'num_logprobs': req.body.num_logprobs,
|
||||
'min_p': req.body.min_p,
|
||||
'math1_temp': req.body.math1_temp,
|
||||
'math1_quad': req.body.math1_quad,
|
||||
'math1_quad_entropy_scale': req.body.math1_quad_entropy_scale,
|
||||
},
|
||||
};
|
||||
|
||||
// Tells the model to stop generation at '>'
|
||||
if ('theme_textadventure' === req.body.prefix) {
|
||||
if (req.body.model.includes('clio') || req.body.model.includes('kayra')) {
|
||||
data.parameters.eos_token_id = 49405;
|
||||
}
|
||||
if (req.body.model.includes('erato')) {
|
||||
data.parameters.eos_token_id = 29;
|
||||
}
|
||||
}
|
||||
|
||||
console.debug(util.inspect(data, { depth: 4 }));
|
||||
|
||||
const args = {
|
||||
body: JSON.stringify(data),
|
||||
headers: { 'Content-Type': 'application/json', 'Authorization': 'Bearer ' + api_key_novel },
|
||||
signal: controller.signal,
|
||||
};
|
||||
|
||||
try {
|
||||
const baseURL = (req.body.model.includes('kayra') || req.body.model.includes('erato')) ? TEXT_NOVELAI : API_NOVELAI;
|
||||
const url = req.body.streaming ? `${baseURL}/ai/generate-stream` : `${baseURL}/ai/generate`;
|
||||
const response = await fetch(url, { method: 'POST', ...args });
|
||||
|
||||
if (req.body.streaming) {
|
||||
// Pipe remote SSE stream to Express response
|
||||
forwardFetchResponse(response, res);
|
||||
} else {
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
let message = text;
|
||||
console.warn(`Novel API returned error: ${response.status} ${response.statusText} ${text}`);
|
||||
|
||||
try {
|
||||
const data = JSON.parse(text);
|
||||
message = data.message;
|
||||
}
|
||||
catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
return res.status(500).send({ error: { message } });
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await response.json();
|
||||
console.info('NovelAI Output', data?.output);
|
||||
return res.send(data);
|
||||
}
|
||||
} catch (error) {
|
||||
return res.send({ error: true });
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/generate-image', async (request, response) => {
|
||||
if (!request.body) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const key = readSecret(request.user.directories, SECRET_KEYS.NOVEL);
|
||||
|
||||
if (!key) {
|
||||
console.warn('NovelAI Access Token is missing.');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
try {
|
||||
console.debug('NAI Diffusion request:', request.body);
|
||||
const generateUrl = `${IMAGE_NOVELAI}/ai/generate-image`;
|
||||
const generateResult = await fetch(generateUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${key}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
action: 'generate',
|
||||
input: request.body.prompt ?? '',
|
||||
model: request.body.model ?? 'nai-diffusion',
|
||||
parameters: {
|
||||
params_version: 3,
|
||||
prefer_brownian: true,
|
||||
negative_prompt: request.body.negative_prompt ?? '',
|
||||
height: request.body.height ?? 512,
|
||||
width: request.body.width ?? 512,
|
||||
scale: request.body.scale ?? 9,
|
||||
seed: request.body.seed >= 0 ? request.body.seed : Math.floor(Math.random() * 9999999999),
|
||||
sampler: request.body.sampler ?? 'k_dpmpp_2m',
|
||||
noise_schedule: request.body.scheduler ?? 'karras',
|
||||
steps: request.body.steps ?? 28,
|
||||
n_samples: 1,
|
||||
// NAI handholding for prompts
|
||||
ucPreset: 0,
|
||||
qualityToggle: false,
|
||||
add_original_image: false,
|
||||
controlnet_strength: 1,
|
||||
deliberate_euler_ancestral_bug: false,
|
||||
dynamic_thresholding: request.body.decrisper ?? false,
|
||||
legacy: false,
|
||||
legacy_v3_extend: false,
|
||||
sm: request.body.sm ?? false,
|
||||
sm_dyn: request.body.sm_dyn ?? false,
|
||||
uncond_scale: 1,
|
||||
skip_cfg_above_sigma: request.body.variety_boost
|
||||
? calculateSkipCfgAboveSigma(
|
||||
request.body.width ?? 512,
|
||||
request.body.height ?? 512,
|
||||
request.body.model ?? 'nai-diffusion',
|
||||
)
|
||||
: null,
|
||||
use_coords: false,
|
||||
characterPrompts: [],
|
||||
reference_image_multiple: [],
|
||||
reference_information_extracted_multiple: [],
|
||||
reference_strength_multiple: [],
|
||||
v4_negative_prompt: {
|
||||
caption: {
|
||||
base_caption: request.body.negative_prompt ?? '',
|
||||
char_captions: [],
|
||||
},
|
||||
},
|
||||
v4_prompt: {
|
||||
caption: {
|
||||
base_caption: request.body.prompt ?? '',
|
||||
char_captions: [],
|
||||
},
|
||||
use_coords: false,
|
||||
use_order: true,
|
||||
},
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
if (!generateResult.ok) {
|
||||
const text = await generateResult.text();
|
||||
console.warn('NovelAI returned an error.', generateResult.statusText, text);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
const archiveBuffer = await generateResult.arrayBuffer();
|
||||
const imageBuffer = await extractFileFromZipBuffer(archiveBuffer, '.png');
|
||||
|
||||
if (!imageBuffer) {
|
||||
console.error('NovelAI generated an image, but the PNG file was not found.');
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
const originalBase64 = imageBuffer.toString('base64');
|
||||
|
||||
// No upscaling
|
||||
if (isNaN(request.body.upscale_ratio) || request.body.upscale_ratio <= 1) {
|
||||
return response.send(originalBase64);
|
||||
}
|
||||
|
||||
try {
|
||||
console.info('Upscaling image...');
|
||||
const upscaleUrl = `${API_NOVELAI}/ai/upscale`;
|
||||
const upscaleResult = await fetch(upscaleUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${key}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
image: originalBase64,
|
||||
height: request.body.height,
|
||||
width: request.body.width,
|
||||
scale: request.body.upscale_ratio,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!upscaleResult.ok) {
|
||||
const text = await upscaleResult.text();
|
||||
throw new Error('NovelAI returned an error.', { cause: text });
|
||||
}
|
||||
|
||||
const upscaledArchiveBuffer = await upscaleResult.arrayBuffer();
|
||||
const upscaledImageBuffer = await extractFileFromZipBuffer(upscaledArchiveBuffer, '.png');
|
||||
|
||||
if (!upscaledImageBuffer) {
|
||||
throw new Error('NovelAI upscaled an image, but the PNG file was not found.');
|
||||
}
|
||||
|
||||
const upscaledBase64 = upscaledImageBuffer.toString('base64');
|
||||
|
||||
return response.send(upscaledBase64);
|
||||
} catch (error) {
|
||||
console.warn('NovelAI generated an image, but upscaling failed. Returning original image.', error);
|
||||
return response.send(originalBase64);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/generate-voice', async (request, response) => {
|
||||
const token = readSecret(request.user.directories, SECRET_KEYS.NOVEL);
|
||||
|
||||
if (!token) {
|
||||
console.error('NovelAI Access Token is missing.');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const text = request.body.text;
|
||||
const voice = request.body.voice;
|
||||
|
||||
if (!text || !voice) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
try {
|
||||
const url = `${API_NOVELAI}/ai/generate-voice?text=${encodeURIComponent(text)}&voice=-1&seed=${encodeURIComponent(voice)}&opus=false&version=v2`;
|
||||
const result = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${token}`,
|
||||
'Accept': 'audio/mpeg',
|
||||
},
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const errorText = await result.text();
|
||||
console.error('NovelAI returned an error.', result.statusText, errorText);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
const chunks = await readAllChunks(result.body);
|
||||
const buffer = Buffer.concat(chunks.map(chunk => new Uint8Array(chunk)));
|
||||
response.setHeader('Content-Type', 'audio/mpeg');
|
||||
return response.send(buffer);
|
||||
}
|
||||
catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
799
web-app/src/endpoints/openai.js
Normal file
799
web-app/src/endpoints/openai.js
Normal file
@@ -0,0 +1,799 @@
|
||||
import fs from 'node:fs';
|
||||
import { Buffer } from 'node:buffer';
|
||||
|
||||
import fetch from 'node-fetch';
|
||||
import FormData from 'form-data';
|
||||
import express from 'express';
|
||||
|
||||
import { getConfigValue, mergeObjectWithYaml, excludeKeysByYaml, trimV1, delay } from '../util.js';
|
||||
import { setAdditionalHeaders } from '../additional-headers.js';
|
||||
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||
import { AIMLAPI_HEADERS, OPENROUTER_HEADERS, ZAI_ENDPOINT } from '../constants.js';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/caption-image', async (request, response) => {
|
||||
try {
|
||||
let key = '';
|
||||
let headers = {};
|
||||
let bodyParams = {};
|
||||
|
||||
if (request.body.api === 'openai' && !request.body.reverse_proxy) {
|
||||
key = readSecret(request.user.directories, SECRET_KEYS.OPENAI);
|
||||
}
|
||||
|
||||
if (request.body.api === 'xai' && !request.body.reverse_proxy) {
|
||||
key = readSecret(request.user.directories, SECRET_KEYS.XAI);
|
||||
}
|
||||
|
||||
if (request.body.api === 'mistral' && !request.body.reverse_proxy) {
|
||||
key = readSecret(request.user.directories, SECRET_KEYS.MISTRALAI);
|
||||
}
|
||||
|
||||
if (request.body.reverse_proxy && request.body.proxy_password) {
|
||||
key = request.body.proxy_password;
|
||||
}
|
||||
|
||||
if (request.body.api === 'custom') {
|
||||
key = readSecret(request.user.directories, SECRET_KEYS.CUSTOM);
|
||||
mergeObjectWithYaml(bodyParams, request.body.custom_include_body);
|
||||
mergeObjectWithYaml(headers, request.body.custom_include_headers);
|
||||
}
|
||||
|
||||
if (request.body.api === 'openrouter') {
|
||||
key = readSecret(request.user.directories, SECRET_KEYS.OPENROUTER);
|
||||
}
|
||||
|
||||
if (request.body.api === 'ooba') {
|
||||
key = readSecret(request.user.directories, SECRET_KEYS.OOBA);
|
||||
bodyParams.temperature = 0.1;
|
||||
}
|
||||
|
||||
if (request.body.api === 'koboldcpp') {
|
||||
key = readSecret(request.user.directories, SECRET_KEYS.KOBOLDCPP);
|
||||
}
|
||||
|
||||
if (request.body.api === 'llamacpp') {
|
||||
key = readSecret(request.user.directories, SECRET_KEYS.LLAMACPP);
|
||||
}
|
||||
|
||||
if (request.body.api === 'vllm') {
|
||||
key = readSecret(request.user.directories, SECRET_KEYS.VLLM);
|
||||
}
|
||||
|
||||
if (request.body.api === 'aimlapi') {
|
||||
key = readSecret(request.user.directories, SECRET_KEYS.AIMLAPI);
|
||||
}
|
||||
|
||||
if (request.body.api === 'groq') {
|
||||
key = readSecret(request.user.directories, SECRET_KEYS.GROQ);
|
||||
}
|
||||
|
||||
if (request.body.api === 'cohere') {
|
||||
key = readSecret(request.user.directories, SECRET_KEYS.COHERE);
|
||||
}
|
||||
|
||||
if (request.body.api === 'moonshot') {
|
||||
key = readSecret(request.user.directories, SECRET_KEYS.MOONSHOT);
|
||||
}
|
||||
|
||||
if (request.body.api === 'nanogpt') {
|
||||
key = readSecret(request.user.directories, SECRET_KEYS.NANOGPT);
|
||||
}
|
||||
|
||||
if (request.body.api === 'chutes') {
|
||||
key = readSecret(request.user.directories, SECRET_KEYS.CHUTES);
|
||||
}
|
||||
|
||||
if (request.body.api === 'electronhub') {
|
||||
key = readSecret(request.user.directories, SECRET_KEYS.ELECTRONHUB);
|
||||
}
|
||||
|
||||
if (request.body.api === 'zai') {
|
||||
key = readSecret(request.user.directories, SECRET_KEYS.ZAI);
|
||||
bodyParams.max_tokens = 4096; // default is 1024
|
||||
}
|
||||
|
||||
const noKeyTypes = ['custom', 'ooba', 'koboldcpp', 'vllm', 'llamacpp', 'pollinations'];
|
||||
if (!key && !request.body.reverse_proxy && !noKeyTypes.includes(request.body.api)) {
|
||||
console.warn('No key found for API', request.body.api);
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const body = {
|
||||
model: request.body.model,
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: [
|
||||
{ type: 'text', text: request.body.prompt },
|
||||
{ type: 'image_url', image_url: { 'url': request.body.image } },
|
||||
],
|
||||
},
|
||||
],
|
||||
...bodyParams,
|
||||
};
|
||||
|
||||
const captionSystemPrompt = getConfigValue('openai.captionSystemPrompt');
|
||||
if (captionSystemPrompt) {
|
||||
body.messages.unshift({
|
||||
role: 'system',
|
||||
content: captionSystemPrompt,
|
||||
});
|
||||
}
|
||||
|
||||
if (request.body.api === 'custom') {
|
||||
excludeKeysByYaml(body, request.body.custom_exclude_body);
|
||||
}
|
||||
|
||||
let apiUrl = '';
|
||||
|
||||
if (request.body.api === 'openrouter') {
|
||||
apiUrl = 'https://openrouter.ai/api/v1/chat/completions';
|
||||
Object.assign(headers, OPENROUTER_HEADERS);
|
||||
}
|
||||
|
||||
if (request.body.api === 'openai') {
|
||||
apiUrl = 'https://api.openai.com/v1/chat/completions';
|
||||
}
|
||||
|
||||
if (request.body.reverse_proxy) {
|
||||
apiUrl = `${request.body.reverse_proxy}/chat/completions`;
|
||||
}
|
||||
|
||||
if (request.body.api === 'custom') {
|
||||
apiUrl = `${request.body.server_url}/chat/completions`;
|
||||
}
|
||||
|
||||
if (request.body.api === 'aimlapi') {
|
||||
apiUrl = 'https://api.aimlapi.com/v1/chat/completions';
|
||||
Object.assign(headers, AIMLAPI_HEADERS);
|
||||
}
|
||||
|
||||
if (request.body.api === 'groq') {
|
||||
apiUrl = 'https://api.groq.com/openai/v1/chat/completions';
|
||||
if (body.messages?.[0]?.role === 'system') {
|
||||
body.messages[0].role = 'user';
|
||||
}
|
||||
}
|
||||
|
||||
if (request.body.api === 'mistral') {
|
||||
apiUrl = 'https://api.mistral.ai/v1/chat/completions';
|
||||
}
|
||||
|
||||
if (request.body.api === 'cohere') {
|
||||
apiUrl = 'https://api.cohere.ai/v2/chat';
|
||||
}
|
||||
|
||||
if (request.body.api === 'xai') {
|
||||
apiUrl = 'https://api.x.ai/v1/chat/completions';
|
||||
}
|
||||
|
||||
if (request.body.api === 'pollinations') {
|
||||
headers = { Authorization: '' };
|
||||
apiUrl = 'https://text.pollinations.ai/openai/chat/completions';
|
||||
}
|
||||
|
||||
if (request.body.api === 'moonshot') {
|
||||
apiUrl = 'https://api.moonshot.ai/v1/chat/completions';
|
||||
}
|
||||
|
||||
if (request.body.api === 'nanogpt') {
|
||||
apiUrl = 'https://nano-gpt.com/api/v1/chat/completions';
|
||||
}
|
||||
|
||||
if (request.body.api === 'chutes') {
|
||||
apiUrl = 'https://llm.chutes.ai/v1/chat/completions';
|
||||
}
|
||||
|
||||
if (request.body.api === 'electronhub') {
|
||||
apiUrl = 'https://api.electronhub.ai/v1/chat/completions';
|
||||
}
|
||||
|
||||
if (request.body.api === 'zai') {
|
||||
apiUrl = request.body.zai_endpoint === ZAI_ENDPOINT.CODING
|
||||
? 'https://api.z.ai/api/coding/paas/v4/chat/completions'
|
||||
: 'https://api.z.ai/api/paas/v4/chat/completions';
|
||||
|
||||
// Handle video inlining for Z.AI
|
||||
if (/data:video\/\w+;base64,/.test(request.body.image)) {
|
||||
const message = body.messages.find(msg => Array.isArray(msg.content));
|
||||
if (message) {
|
||||
const imgContent = message.content.find(c => c.type === 'image_url');
|
||||
if (imgContent) {
|
||||
imgContent.type = 'video_url';
|
||||
imgContent.video_url = imgContent.image_url;
|
||||
delete imgContent.image_url;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (['koboldcpp', 'vllm', 'llamacpp', 'ooba'].includes(request.body.api)) {
|
||||
apiUrl = `${trimV1(request.body.server_url)}/v1/chat/completions`;
|
||||
}
|
||||
|
||||
if (request.body.api === 'ooba') {
|
||||
const imgMessage = body.messages.pop();
|
||||
body.messages.push({
|
||||
role: 'user',
|
||||
content: imgMessage?.content?.[0]?.text,
|
||||
});
|
||||
body.messages.push({
|
||||
role: 'user',
|
||||
content: [],
|
||||
image_url: imgMessage?.content?.[1]?.image_url?.url,
|
||||
});
|
||||
}
|
||||
|
||||
setAdditionalHeaders(request, { headers }, apiUrl);
|
||||
console.debug('Multimodal captioning request', body);
|
||||
|
||||
const result = await fetch(apiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${key}`,
|
||||
...headers,
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const text = await result.text();
|
||||
console.warn('Multimodal captioning request failed', result.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await result.json();
|
||||
console.info('Multimodal captioning response', data);
|
||||
const caption = data?.choices?.[0]?.message?.content ?? data?.message?.content?.[0]?.text;
|
||||
|
||||
if (!caption) {
|
||||
return response.status(500).send('No caption found');
|
||||
}
|
||||
|
||||
return response.json({ caption });
|
||||
}
|
||||
catch (error) {
|
||||
console.error(error);
|
||||
response.status(500).send('Internal server error');
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/generate-voice', async (request, response) => {
|
||||
try {
|
||||
const key = readSecret(request.user.directories, SECRET_KEYS.OPENAI);
|
||||
|
||||
if (!key) {
|
||||
console.warn('No OpenAI key found');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const requestBody = {
|
||||
input: request.body.text,
|
||||
response_format: 'mp3',
|
||||
voice: request.body.voice ?? 'alloy',
|
||||
speed: request.body.speed ?? 1,
|
||||
model: request.body.model ?? 'tts-1',
|
||||
};
|
||||
|
||||
if (request.body.instructions) {
|
||||
requestBody.instructions = request.body.instructions;
|
||||
}
|
||||
|
||||
console.debug('OpenAI TTS request', requestBody);
|
||||
|
||||
const result = await fetch('https://api.openai.com/v1/audio/speech', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${key}`,
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const text = await result.text();
|
||||
console.warn('OpenAI request failed', result.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
const buffer = await result.arrayBuffer();
|
||||
response.setHeader('Content-Type', 'audio/mpeg');
|
||||
return response.send(Buffer.from(buffer));
|
||||
} catch (error) {
|
||||
console.error('OpenAI TTS generation failed', error);
|
||||
response.status(500).send('Internal server error');
|
||||
}
|
||||
});
|
||||
|
||||
// ElectronHub TTS proxy
|
||||
router.post('/electronhub/generate-voice', async (request, response) => {
|
||||
try {
|
||||
const key = readSecret(request.user.directories, SECRET_KEYS.ELECTRONHUB);
|
||||
|
||||
if (!key) {
|
||||
console.warn('No ElectronHub key found');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const requestBody = {
|
||||
input: request.body.input,
|
||||
voice: request.body.voice,
|
||||
speed: request.body.speed ?? 1,
|
||||
temperature: request.body.temperature ?? undefined,
|
||||
model: request.body.model || 'tts-1',
|
||||
response_format: 'mp3',
|
||||
};
|
||||
|
||||
// Optional provider-specific params
|
||||
if (request.body.instructions) requestBody.instructions = request.body.instructions;
|
||||
if (request.body.speaker_transcript) requestBody.speaker_transcript = request.body.speaker_transcript;
|
||||
if (Number.isFinite(request.body.cfg_scale)) requestBody.cfg_scale = Number(request.body.cfg_scale);
|
||||
if (Number.isFinite(request.body.cfg_filter_top_k)) requestBody.cfg_filter_top_k = Number(request.body.cfg_filter_top_k);
|
||||
if (Number.isFinite(request.body.speech_rate)) requestBody.speech_rate = Number(request.body.speech_rate);
|
||||
if (Number.isFinite(request.body.pitch_adjustment)) requestBody.pitch_adjustment = Number(request.body.pitch_adjustment);
|
||||
if (request.body.emotional_style) requestBody.emotional_style = request.body.emotional_style;
|
||||
|
||||
// Handle dynamic parameters sent from the frontend
|
||||
const knownParams = new Set(Object.keys(requestBody));
|
||||
for (const key in request.body) {
|
||||
if (!knownParams.has(key) && request.body[key] !== undefined) {
|
||||
requestBody[key] = request.body[key];
|
||||
}
|
||||
}
|
||||
|
||||
// Clean undefineds
|
||||
Object.keys(requestBody).forEach(k => requestBody[k] === undefined && delete requestBody[k]);
|
||||
|
||||
console.debug('ElectronHub TTS request', requestBody);
|
||||
|
||||
const result = await fetch('https://api.electronhub.ai/v1/audio/speech', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${key}`,
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const text = await result.text();
|
||||
console.warn('ElectronHub TTS request failed', result.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
const contentType = result.headers.get('content-type') || 'audio/mpeg';
|
||||
const buffer = await result.arrayBuffer();
|
||||
response.setHeader('Content-Type', contentType);
|
||||
return response.send(Buffer.from(buffer));
|
||||
} catch (error) {
|
||||
console.error('ElectronHub TTS generation failed', error);
|
||||
response.status(500).send('Internal server error');
|
||||
}
|
||||
});
|
||||
|
||||
// ElectronHub model list
|
||||
router.post('/electronhub/models', async (request, response) => {
|
||||
try {
|
||||
const key = readSecret(request.user.directories, SECRET_KEYS.ELECTRONHUB);
|
||||
|
||||
if (!key) {
|
||||
console.warn('No ElectronHub key found');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const result = await fetch('https://api.electronhub.ai/v1/models', {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${key}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const text = await result.text();
|
||||
console.warn('ElectronHub models request failed', result.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
const models = data && Array.isArray(data['data']) ? data['data'] : [];
|
||||
return response.json(models);
|
||||
} catch (error) {
|
||||
console.error('ElectronHub models fetch failed', error);
|
||||
response.status(500).send('Internal server error');
|
||||
}
|
||||
});
|
||||
|
||||
// Chutes TTS
|
||||
router.post('/chutes/generate-voice', async (request, response) => {
|
||||
try {
|
||||
const key = readSecret(request.user.directories, SECRET_KEYS.CHUTES);
|
||||
|
||||
if (!key) {
|
||||
console.warn('No Chutes key found');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const requestBody = {
|
||||
text: request.body.input,
|
||||
voice: request.body.voice || 'af_heart',
|
||||
speed: request.body.speed || 1,
|
||||
};
|
||||
|
||||
console.debug('Chutes TTS request', requestBody);
|
||||
|
||||
const result = await fetch('https://chutes-kokoro.chutes.ai/speak', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${key}`,
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const text = await result.text();
|
||||
console.warn('Chutes TTS request failed', result.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
const contentType = result.headers.get('content-type') || 'audio/mpeg';
|
||||
const buffer = await result.arrayBuffer();
|
||||
response.setHeader('Content-Type', contentType);
|
||||
return response.send(Buffer.from(buffer));
|
||||
} catch (error) {
|
||||
console.error('Chutes TTS generation failed', error);
|
||||
response.status(500).send('Internal server error');
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/chutes/models/embedding', async (request, response) => {
|
||||
try {
|
||||
const key = readSecret(request.user.directories, SECRET_KEYS.CHUTES);
|
||||
|
||||
if (!key) {
|
||||
console.warn('No Chutes key found');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const result = await fetch('https://api.chutes.ai/chutes/?template=embedding&include_public=true&limit=999', {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${key}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const text = await result.text();
|
||||
console.warn('Chutes embedding models request failed', result.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await result.json();
|
||||
|
||||
if (!Array.isArray(data?.items)) {
|
||||
console.warn('Chutes embedding models response invalid', data);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
return response.json(data.items);
|
||||
} catch (error) {
|
||||
console.error('Chutes embedding models fetch failed', error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/generate-image', async (request, response) => {
|
||||
try {
|
||||
const key = readSecret(request.user.directories, SECRET_KEYS.OPENAI);
|
||||
|
||||
if (!key) {
|
||||
console.warn('No OpenAI key found');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.debug('OpenAI request', request.body);
|
||||
|
||||
const result = await fetch('https://api.openai.com/v1/images/generations', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${key}`,
|
||||
},
|
||||
body: JSON.stringify(request.body),
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const text = await result.text();
|
||||
console.warn('OpenAI request failed', result.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
return response.json(data);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.status(500).send('Internal server error');
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/generate-video', async (request, response) => {
|
||||
try {
|
||||
const controller = new AbortController();
|
||||
request.socket.removeAllListeners('close');
|
||||
request.socket.on('close', function () {
|
||||
controller.abort();
|
||||
});
|
||||
|
||||
const key = readSecret(request.user.directories, SECRET_KEYS.OPENAI);
|
||||
|
||||
if (!key) {
|
||||
console.warn('No OpenAI key found');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.debug('OpenAI video generation request', request.body);
|
||||
|
||||
const videoJobResponse = await fetch('https://api.openai.com/v1/videos', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Bearer ${key}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
prompt: request.body.prompt,
|
||||
model: request.body.model || 'sora-2',
|
||||
size: request.body.size || '720x1280',
|
||||
seconds: request.body.seconds || '8',
|
||||
}),
|
||||
});
|
||||
|
||||
if (!videoJobResponse.ok) {
|
||||
const text = await videoJobResponse.text();
|
||||
console.warn('OpenAI video generation request failed', videoJobResponse.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const videoJob = await videoJobResponse.json();
|
||||
|
||||
if (!videoJob || !videoJob.id) {
|
||||
console.warn('OpenAI video generation returned no job ID', videoJob);
|
||||
return response.status(500).send('No video job ID returned');
|
||||
}
|
||||
|
||||
// Poll for video generation completion
|
||||
for (let attempt = 0; attempt < 30; attempt++) {
|
||||
if (controller.signal.aborted) {
|
||||
console.info('OpenAI video generation aborted by client');
|
||||
return response.status(500).send('Video generation aborted by client');
|
||||
}
|
||||
|
||||
await delay(5000 + attempt * 1000);
|
||||
console.debug(`Polling OpenAI video job ${videoJob.id}, attempt ${attempt + 1}`);
|
||||
|
||||
const pollResponse = await fetch(`https://api.openai.com/v1/videos/${videoJob.id}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${key}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!pollResponse.ok) {
|
||||
const text = await pollResponse.text();
|
||||
console.warn('OpenAI video job polling failed', pollResponse.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const pollResult = await pollResponse.json();
|
||||
console.debug(`OpenAI video job status: ${pollResult.status}, progress: ${pollResult.progress}`);
|
||||
|
||||
if (pollResult.status === 'failed') {
|
||||
console.warn('OpenAI video generation failed', pollResult);
|
||||
return response.status(500).send('Video generation failed');
|
||||
}
|
||||
|
||||
if (pollResult.status === 'completed') {
|
||||
const contentResponse = await fetch(`https://api.openai.com/v1/videos/${videoJob.id}/content`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${key}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!contentResponse.ok) {
|
||||
const text = await contentResponse.text();
|
||||
console.warn('OpenAI video content fetch failed', contentResponse.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
const contentBuffer = await contentResponse.arrayBuffer();
|
||||
return response.send({ format: 'mp4', data: Buffer.from(contentBuffer).toString('base64') });
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('OpenAI video generation failed', error);
|
||||
response.status(500).send('Internal server error');
|
||||
}
|
||||
});
|
||||
|
||||
const custom = express.Router();
|
||||
|
||||
custom.post('/generate-voice', async (request, response) => {
|
||||
try {
|
||||
const key = readSecret(request.user.directories, SECRET_KEYS.CUSTOM_OPENAI_TTS);
|
||||
const { input, provider_endpoint, response_format, voice, speed, model } = request.body;
|
||||
|
||||
if (!provider_endpoint) {
|
||||
console.warn('No OpenAI-compatible TTS provider endpoint provided');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const result = await fetch(provider_endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${key ?? ''}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
input: input ?? '',
|
||||
response_format: response_format ?? 'mp3',
|
||||
voice: voice ?? 'alloy',
|
||||
speed: speed ?? 1,
|
||||
model: model ?? 'tts-1',
|
||||
}),
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const text = await result.text();
|
||||
console.warn('OpenAI request failed', result.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
const buffer = await result.arrayBuffer();
|
||||
response.setHeader('Content-Type', 'audio/mpeg');
|
||||
return response.send(Buffer.from(buffer));
|
||||
} catch (error) {
|
||||
console.error('OpenAI TTS generation failed', error);
|
||||
response.status(500).send('Internal server error');
|
||||
}
|
||||
});
|
||||
|
||||
router.use('/custom', custom);
|
||||
|
||||
/**
|
||||
* Creates a transcribe-audio endpoint handler for a given provider.
|
||||
* @param {object} config - Provider configuration
|
||||
* @param {string} config.secretKey - The SECRET_KEYS enum value for the provider
|
||||
* @param {string} config.apiUrl - The transcription API endpoint URL
|
||||
* @param {string} config.providerName - Display name for logging
|
||||
* @returns {import('express').RequestHandler} Express request handler
|
||||
*/
|
||||
function createTranscribeHandler({ secretKey, apiUrl, providerName }) {
|
||||
return async (request, response) => {
|
||||
try {
|
||||
const key = readSecret(request.user.directories, secretKey);
|
||||
|
||||
if (!key) {
|
||||
console.warn(`No ${providerName} key found`);
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
if (!request.file) {
|
||||
console.warn('No audio file found');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.info(`Processing audio file with ${providerName}`, request.file.path);
|
||||
const formData = new FormData();
|
||||
formData.append('file', fs.createReadStream(request.file.path), { filename: 'audio.wav', contentType: 'audio/wav' });
|
||||
formData.append('model', request.body.model);
|
||||
|
||||
if (request.body.language) {
|
||||
formData.append('language', request.body.language);
|
||||
}
|
||||
|
||||
const result = await fetch(apiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${key}`,
|
||||
...formData.getHeaders(),
|
||||
},
|
||||
body: formData,
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const text = await result.text();
|
||||
console.warn(`${providerName} request failed`, result.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
fs.unlinkSync(request.file.path);
|
||||
const data = await result.json();
|
||||
console.debug(`${providerName} transcription response`, data);
|
||||
return response.json(data);
|
||||
} catch (error) {
|
||||
console.error(`${providerName} transcription failed`, error);
|
||||
response.status(500).send('Internal server error');
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
router.post('/transcribe-audio', createTranscribeHandler({
|
||||
secretKey: SECRET_KEYS.OPENAI,
|
||||
apiUrl: 'https://api.openai.com/v1/audio/transcriptions',
|
||||
providerName: 'OpenAI',
|
||||
}));
|
||||
|
||||
router.post('/groq/transcribe-audio', createTranscribeHandler({
|
||||
secretKey: SECRET_KEYS.GROQ,
|
||||
apiUrl: 'https://api.groq.com/openai/v1/audio/transcriptions',
|
||||
providerName: 'Groq',
|
||||
}));
|
||||
|
||||
router.post('/mistral/transcribe-audio', createTranscribeHandler({
|
||||
secretKey: SECRET_KEYS.MISTRALAI,
|
||||
apiUrl: 'https://api.mistral.ai/v1/audio/transcriptions',
|
||||
providerName: 'MistralAI',
|
||||
}));
|
||||
|
||||
router.post('/zai/transcribe-audio', createTranscribeHandler({
|
||||
secretKey: SECRET_KEYS.ZAI,
|
||||
apiUrl: 'https://api.z.ai/api/paas/v4/audio/transcriptions',
|
||||
providerName: 'Z.AI',
|
||||
}));
|
||||
|
||||
router.post('/chutes/transcribe-audio', async (request, response) => {
|
||||
try {
|
||||
const key = readSecret(request.user.directories, SECRET_KEYS.CHUTES);
|
||||
|
||||
if (!key) {
|
||||
console.warn('No Chutes key found');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
if (!request.file) {
|
||||
console.warn('No audio file found');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.info('Processing audio file with Chutes', request.file.path);
|
||||
const audioBase64 = fs.readFileSync(request.file.path).toString('base64');
|
||||
|
||||
const result = await fetch(`https://${request.body.model}.chutes.ai/transcribe`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${key}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
audio_b64: audioBase64,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const text = await result.text();
|
||||
console.warn('Chutes request failed', result.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
fs.unlinkSync(request.file.path);
|
||||
const data = await result.json();
|
||||
console.debug('Chutes transcription response', data);
|
||||
|
||||
if (!Array.isArray(data)) {
|
||||
console.warn('Chutes transcription response invalid', data);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
const fullText = data.map(chunk => chunk.text || '').join('').trim();
|
||||
return response.json({ text: fullText });
|
||||
} catch (error) {
|
||||
console.error('Chutes transcription failed', error);
|
||||
response.status(500).send('Internal server error');
|
||||
}
|
||||
});
|
||||
172
web-app/src/endpoints/openrouter.js
Normal file
172
web-app/src/endpoints/openrouter.js
Normal file
@@ -0,0 +1,172 @@
|
||||
import express from 'express';
|
||||
import fetch from 'node-fetch';
|
||||
import mime from 'mime-types';
|
||||
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||
|
||||
export const router = express.Router();
|
||||
const API_OPENROUTER = 'https://openrouter.ai/api/v1';
|
||||
|
||||
router.post('/models/providers', async (req, res) => {
|
||||
try {
|
||||
const { model } = req.body;
|
||||
const response = await fetch(`${API_OPENROUTER}/models/${model}/endpoints`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return res.json([]);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await response.json();
|
||||
const endpoints = data?.data?.endpoints || [];
|
||||
const providerNames = endpoints.map(e => e.provider_name);
|
||||
|
||||
return res.json(providerNames);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Fetches and filters models from OpenRouter API based on modality criteria.
|
||||
* @param {string} endpoint - The API endpoint to fetch from
|
||||
* @param {string} inputModality - Required input modality
|
||||
* @param {string} outputModality - Required output modality
|
||||
* @param {boolean} [idsOnly=false] - Whether to return only model IDs
|
||||
* @returns {Promise<any[]>} Filtered models or model IDs
|
||||
*/
|
||||
async function fetchModelsByModality(endpoint, inputModality, outputModality, idsOnly = false) {
|
||||
const response = await fetch(`${API_OPENROUTER}${endpoint}`, {
|
||||
method: 'GET',
|
||||
headers: { 'Accept': 'application/json' },
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
console.warn('OpenRouter API request failed', response.statusText);
|
||||
return [];
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await response.json();
|
||||
|
||||
if (!Array.isArray(data?.data)) {
|
||||
console.warn('OpenRouter API response was not an array');
|
||||
return [];
|
||||
}
|
||||
|
||||
const filtered = data.data
|
||||
.filter(m => Array.isArray(m?.architecture?.input_modalities))
|
||||
.filter(m => m.architecture.input_modalities.includes(inputModality))
|
||||
.filter(m => Array.isArray(m?.architecture?.output_modalities))
|
||||
.filter(m => m.architecture.output_modalities.includes(outputModality))
|
||||
.sort((a, b) => a?.id && b?.id ? a.id.localeCompare(b.id) : 0);
|
||||
|
||||
return idsOnly ? filtered.map(m => m.id) : filtered;
|
||||
}
|
||||
|
||||
router.post('/models/multimodal', async (_req, res) => {
|
||||
try {
|
||||
const models = await fetchModelsByModality('/models', 'image', 'text', true);
|
||||
return res.json(models);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/models/embedding', async (_req, res) => {
|
||||
try {
|
||||
const models = await fetchModelsByModality('/embeddings/models', 'text', 'embeddings');
|
||||
return res.json(models);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/models/image', async (_req, res) => {
|
||||
try {
|
||||
const models = await fetchModelsByModality('/models', 'text', 'image');
|
||||
return res.json(models.map(m => ({ value: m.id, text: m.name || m.id })));
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/image/generate', async (req, res) => {
|
||||
try {
|
||||
const key = readSecret(req.user.directories, SECRET_KEYS.OPENROUTER);
|
||||
|
||||
if (!key) {
|
||||
console.warn('OpenRouter API key not found');
|
||||
return res.status(400).json({ error: 'OpenRouter API key not found' });
|
||||
}
|
||||
|
||||
console.debug('OpenRouter image generation request', req.body);
|
||||
|
||||
const { model, prompt } = req.body;
|
||||
|
||||
if (!model || !prompt) {
|
||||
return res.status(400).json({ error: 'Model and prompt are required' });
|
||||
}
|
||||
|
||||
const response = await fetch(`${API_OPENROUTER}/chat/completions`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Bearer ${key}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: model,
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: prompt,
|
||||
},
|
||||
],
|
||||
modalities: ['image', 'text'],
|
||||
image_config: {
|
||||
aspect_ratio: req.body.aspect_ratio || '1:1',
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
console.warn('OpenRouter image generation failed', await response.text());
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await response.json();
|
||||
|
||||
const imageUrl = data?.choices?.[0]?.message?.images?.[0]?.image_url?.url;
|
||||
|
||||
if (!imageUrl) {
|
||||
console.warn('No image URL found in OpenRouter response', data);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
|
||||
const [mimeType, base64Data] = /^data:(.*);base64,(.*)$/.exec(imageUrl)?.slice(1) || [];
|
||||
|
||||
if (!mimeType || !base64Data) {
|
||||
console.warn('Invalid image data format', imageUrl);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
|
||||
const result = {
|
||||
format: mime.extension(mimeType) || 'png',
|
||||
image: base64Data,
|
||||
};
|
||||
|
||||
return res.json(result);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
103
web-app/src/endpoints/presets.js
Normal file
103
web-app/src/endpoints/presets.js
Normal file
@@ -0,0 +1,103 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
import express from 'express';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||
|
||||
import { getDefaultPresetFile, getDefaultPresets } from './content-manager.js';
|
||||
|
||||
/**
|
||||
* Gets the folder and extension for the preset settings based on the API source ID.
|
||||
* @param {string} apiId API source ID
|
||||
* @param {import('../users.js').UserDirectoryList} directories User directories
|
||||
* @returns {{folder: string?, extension: string?}} Object containing the folder and extension for the preset settings
|
||||
*/
|
||||
function getPresetSettingsByAPI(apiId, directories) {
|
||||
switch (apiId) {
|
||||
case 'kobold':
|
||||
case 'koboldhorde':
|
||||
return { folder: directories.koboldAI_Settings, extension: '.json' };
|
||||
case 'novel':
|
||||
return { folder: directories.novelAI_Settings, extension: '.json' };
|
||||
case 'textgenerationwebui':
|
||||
return { folder: directories.textGen_Settings, extension: '.json' };
|
||||
case 'openai':
|
||||
return { folder: directories.openAI_Settings, extension: '.json' };
|
||||
case 'instruct':
|
||||
return { folder: directories.instruct, extension: '.json' };
|
||||
case 'context':
|
||||
return { folder: directories.context, extension: '.json' };
|
||||
case 'sysprompt':
|
||||
return { folder: directories.sysprompt, extension: '.json' };
|
||||
case 'reasoning':
|
||||
return { folder: directories.reasoning, extension: '.json' };
|
||||
default:
|
||||
return { folder: null, extension: null };
|
||||
}
|
||||
}
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/save', function (request, response) {
|
||||
const name = sanitize(request.body.name);
|
||||
if (!request.body.preset || !name) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const settings = getPresetSettingsByAPI(request.body.apiId, request.user.directories);
|
||||
const filename = name + settings.extension;
|
||||
|
||||
if (!settings.folder) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const fullpath = path.join(settings.folder, filename);
|
||||
writeFileAtomicSync(fullpath, JSON.stringify(request.body.preset, null, 4), 'utf-8');
|
||||
return response.send({ name });
|
||||
});
|
||||
|
||||
router.post('/delete', function (request, response) {
|
||||
const name = sanitize(request.body.name);
|
||||
if (!name) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const settings = getPresetSettingsByAPI(request.body.apiId, request.user.directories);
|
||||
const filename = name + settings.extension;
|
||||
|
||||
if (!settings.folder) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const fullpath = path.join(settings.folder, filename);
|
||||
|
||||
if (fs.existsSync(fullpath)) {
|
||||
fs.unlinkSync(fullpath);
|
||||
return response.sendStatus(200);
|
||||
} else {
|
||||
return response.sendStatus(404);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/restore', function (request, response) {
|
||||
try {
|
||||
const settings = getPresetSettingsByAPI(request.body.apiId, request.user.directories);
|
||||
const name = sanitize(request.body.name);
|
||||
const defaultPresets = getDefaultPresets(request.user.directories);
|
||||
|
||||
const defaultPreset = defaultPresets.find(p => p.name === name && p.folder === settings.folder);
|
||||
|
||||
const result = { isDefault: false, preset: {} };
|
||||
|
||||
if (defaultPreset) {
|
||||
result.isDefault = true;
|
||||
result.preset = getDefaultPresetFile(defaultPreset.filename) || {};
|
||||
}
|
||||
|
||||
return response.send(result);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
32
web-app/src/endpoints/quick-replies.js
Normal file
32
web-app/src/endpoints/quick-replies.js
Normal file
@@ -0,0 +1,32 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
import express from 'express';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/save', (request, response) => {
|
||||
if (!request.body || !request.body.name) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const filename = path.join(request.user.directories.quickreplies, sanitize(`${request.body.name}.json`));
|
||||
writeFileAtomicSync(filename, JSON.stringify(request.body, null, 4), 'utf8');
|
||||
|
||||
return response.sendStatus(200);
|
||||
});
|
||||
|
||||
router.post('/delete', (request, response) => {
|
||||
if (!request.body || !request.body.name) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const filename = path.join(request.user.directories.quickreplies, sanitize(`${request.body.name}.json`));
|
||||
if (fs.existsSync(filename)) {
|
||||
fs.unlinkSync(filename);
|
||||
}
|
||||
|
||||
return response.sendStatus(200);
|
||||
});
|
||||
455
web-app/src/endpoints/search.js
Normal file
455
web-app/src/endpoints/search.js
Normal file
@@ -0,0 +1,455 @@
|
||||
import fetch from 'node-fetch';
|
||||
import express from 'express';
|
||||
|
||||
import { decode } from 'html-entities';
|
||||
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||
import { trimV1 } from '../util.js';
|
||||
import { setAdditionalHeaders } from '../additional-headers.js';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
// Cosplay as Chrome
|
||||
const visitHeaders = {
|
||||
'Accept': 'text/html',
|
||||
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36',
|
||||
'Accept-Language': 'en-US,en;q=0.5',
|
||||
'Accept-Encoding': 'gzip, deflate, br',
|
||||
'Connection': 'keep-alive',
|
||||
'Cache-Control': 'no-cache',
|
||||
'Pragma': 'no-cache',
|
||||
'TE': 'trailers',
|
||||
'DNT': '1',
|
||||
'Sec-Fetch-Dest': 'document',
|
||||
'Sec-Fetch-Mode': 'navigate',
|
||||
'Sec-Fetch-Site': 'none',
|
||||
'Sec-Fetch-User': '?1',
|
||||
};
|
||||
|
||||
/**
|
||||
* Extract the transcript of a YouTube video
|
||||
* @param {string} videoPageBody HTML of the video page
|
||||
* @param {string} lang Language code
|
||||
* @returns {Promise<string>} Transcript text
|
||||
*/
|
||||
async function extractTranscript(videoPageBody, lang) {
|
||||
const RE_XML_TRANSCRIPT = /<text start="([^"]*)" dur="([^"]*)">([^<]*)<\/text>/g;
|
||||
const splittedHTML = videoPageBody.split('"captions":');
|
||||
|
||||
if (splittedHTML.length <= 1) {
|
||||
if (videoPageBody.includes('class="g-recaptcha"')) {
|
||||
throw new Error('Too many requests');
|
||||
}
|
||||
if (!videoPageBody.includes('"playabilityStatus":')) {
|
||||
throw new Error('Video is not available');
|
||||
}
|
||||
throw new Error('Transcript not available');
|
||||
}
|
||||
|
||||
const captions = (() => {
|
||||
try {
|
||||
return JSON.parse(splittedHTML[1].split(',"videoDetails')[0].replace('\n', ''));
|
||||
} catch (e) {
|
||||
return undefined;
|
||||
}
|
||||
})()?.['playerCaptionsTracklistRenderer'];
|
||||
|
||||
if (!captions) {
|
||||
throw new Error('Transcript disabled');
|
||||
}
|
||||
|
||||
if (!('captionTracks' in captions)) {
|
||||
throw new Error('Transcript not available');
|
||||
}
|
||||
|
||||
if (lang && !captions.captionTracks.some(track => track.languageCode === lang)) {
|
||||
throw new Error('Transcript not available in this language');
|
||||
}
|
||||
|
||||
const transcriptURL = (lang ? captions.captionTracks.find(track => track.languageCode === lang) : captions.captionTracks[0]).baseUrl;
|
||||
const transcriptResponse = await fetch(transcriptURL, {
|
||||
headers: {
|
||||
...(lang && { 'Accept-Language': lang }),
|
||||
'User-Agent': visitHeaders['User-Agent'],
|
||||
},
|
||||
});
|
||||
|
||||
if (!transcriptResponse.ok) {
|
||||
throw new Error('Transcript request failed');
|
||||
}
|
||||
|
||||
const transcriptBody = await transcriptResponse.text();
|
||||
const results = [...transcriptBody.matchAll(RE_XML_TRANSCRIPT)];
|
||||
const transcript = results.map((result) => ({
|
||||
text: result[3],
|
||||
duration: parseFloat(result[2]),
|
||||
offset: parseFloat(result[1]),
|
||||
lang: lang ?? captions.captionTracks[0].languageCode,
|
||||
}));
|
||||
// The text is double-encoded
|
||||
const transcriptText = transcript.map((line) => decode(decode(line.text))).join(' ');
|
||||
return transcriptText;
|
||||
}
|
||||
|
||||
router.post('/serpapi', async (request, response) => {
|
||||
try {
|
||||
const key = readSecret(request.user.directories, SECRET_KEYS.SERPAPI);
|
||||
|
||||
if (!key) {
|
||||
console.error('No SerpApi key found');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const { query } = request.body;
|
||||
const result = await fetch(`https://serpapi.com/search.json?q=${encodeURIComponent(query)}&api_key=${key}`);
|
||||
|
||||
console.debug('SerpApi query', query);
|
||||
|
||||
if (!result.ok) {
|
||||
const text = await result.text();
|
||||
console.error('SerpApi request failed', result.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
console.debug('SerpApi response', data);
|
||||
return response.json(data);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Get the transcript of a YouTube video
|
||||
* @copyright https://github.com/Kakulukian/youtube-transcript (MIT License)
|
||||
*/
|
||||
router.post('/transcript', async (request, response) => {
|
||||
try {
|
||||
const id = request.body.id;
|
||||
const lang = request.body.lang;
|
||||
const json = request.body.json;
|
||||
|
||||
if (!id) {
|
||||
console.error('Id is required for /transcript');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const videoPageResponse = await fetch(`https://www.youtube.com/watch?v=${id}`, {
|
||||
headers: {
|
||||
...(lang && { 'Accept-Language': lang }),
|
||||
'User-Agent': visitHeaders['User-Agent'],
|
||||
},
|
||||
});
|
||||
|
||||
const videoPageBody = await videoPageResponse.text();
|
||||
|
||||
try {
|
||||
const transcriptText = await extractTranscript(videoPageBody, lang);
|
||||
return json
|
||||
? response.json({ transcript: transcriptText, html: videoPageBody })
|
||||
: response.send(transcriptText);
|
||||
} catch (error) {
|
||||
if (json) {
|
||||
return response.json({ html: videoPageBody, transcript: '' });
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/searxng', async (request, response) => {
|
||||
try {
|
||||
const { baseUrl, query, preferences, categories } = request.body;
|
||||
|
||||
if (!baseUrl || !query) {
|
||||
console.error('Missing required parameters for /searxng');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.debug('SearXNG query', baseUrl, query);
|
||||
|
||||
const mainPageUrl = new URL(baseUrl);
|
||||
const mainPageRequest = await fetch(mainPageUrl, { headers: visitHeaders });
|
||||
|
||||
if (!mainPageRequest.ok) {
|
||||
console.error('SearXNG request failed', mainPageRequest.statusText);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
const mainPageText = await mainPageRequest.text();
|
||||
const clientHref = mainPageText.match(/href="(\/client.+\.css)"/)?.[1];
|
||||
|
||||
if (clientHref) {
|
||||
const clientUrl = new URL(clientHref, baseUrl);
|
||||
await fetch(clientUrl, { headers: visitHeaders });
|
||||
}
|
||||
|
||||
const searchUrl = new URL('/search', baseUrl);
|
||||
const searchParams = new URLSearchParams();
|
||||
searchParams.append('q', query);
|
||||
if (preferences) {
|
||||
searchParams.append('preferences', preferences);
|
||||
}
|
||||
if (categories) {
|
||||
searchParams.append('categories', categories);
|
||||
}
|
||||
searchUrl.search = searchParams.toString();
|
||||
|
||||
const searchResult = await fetch(searchUrl, { headers: visitHeaders });
|
||||
|
||||
if (!searchResult.ok) {
|
||||
const text = await searchResult.text();
|
||||
console.error('SearXNG request failed', searchResult.statusText, text);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
const data = await searchResult.text();
|
||||
return response.send(data);
|
||||
} catch (error) {
|
||||
console.error('SearXNG request failed', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/tavily', async (request, response) => {
|
||||
try {
|
||||
const apiKey = readSecret(request.user.directories, SECRET_KEYS.TAVILY);
|
||||
|
||||
if (!apiKey) {
|
||||
console.error('No Tavily key found');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const { query, include_images } = request.body;
|
||||
|
||||
const body = {
|
||||
query: query,
|
||||
api_key: apiKey,
|
||||
search_depth: 'basic',
|
||||
topic: 'general',
|
||||
include_answer: true,
|
||||
include_raw_content: false,
|
||||
include_images: !!include_images,
|
||||
include_image_descriptions: false,
|
||||
include_domains: [],
|
||||
max_results: 10,
|
||||
};
|
||||
|
||||
const result = await fetch('https://api.tavily.com/search', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
|
||||
console.debug('Tavily query', query);
|
||||
|
||||
if (!result.ok) {
|
||||
const text = await result.text();
|
||||
console.error('Tavily request failed', result.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
console.debug('Tavily response', data);
|
||||
return response.json(data);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/koboldcpp', async (request, response) => {
|
||||
try {
|
||||
const { query, url } = request.body;
|
||||
|
||||
if (!url) {
|
||||
console.error('No URL provided for KoboldCpp search');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.debug('KoboldCpp search query', query);
|
||||
|
||||
const baseUrl = trimV1(url);
|
||||
const args = {
|
||||
method: 'POST',
|
||||
headers: {},
|
||||
body: JSON.stringify({ q: query }),
|
||||
};
|
||||
|
||||
setAdditionalHeaders(request, args, baseUrl);
|
||||
const result = await fetch(`${baseUrl}/api/extra/websearch`, args);
|
||||
|
||||
if (!result.ok) {
|
||||
const text = await result.text();
|
||||
console.error('KoboldCpp request failed', result.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
console.debug('KoboldCpp search response', data);
|
||||
return response.json(data);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/serper', async (request, response) => {
|
||||
try {
|
||||
const key = readSecret(request.user.directories, SECRET_KEYS.SERPER);
|
||||
|
||||
if (!key) {
|
||||
console.error('No Serper key found');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const { query, images } = request.body;
|
||||
|
||||
const url = images
|
||||
? 'https://google.serper.dev/images'
|
||||
: 'https://google.serper.dev/search';
|
||||
|
||||
const result = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'X-API-KEY': key,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
redirect: 'follow',
|
||||
body: JSON.stringify({ q: query }),
|
||||
});
|
||||
|
||||
console.debug('Serper query', query);
|
||||
|
||||
if (!result.ok) {
|
||||
const text = await result.text();
|
||||
console.warn('Serper request failed', result.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
console.debug('Serper response', data);
|
||||
return response.json(data);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/zai', async (request, response) => {
|
||||
try {
|
||||
const key = readSecret(request.user.directories, SECRET_KEYS.ZAI);
|
||||
|
||||
if (!key) {
|
||||
console.error('No Z.AI key found');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const { query } = request.body;
|
||||
|
||||
if (!query) {
|
||||
console.error('No query provided for /zai');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.debug('Z.AI web search query', query);
|
||||
|
||||
const result = await fetch('https://api.z.ai/api/paas/v4/web_search', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Bearer ${key}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
// TODO: There's only one engine option for now
|
||||
search_engine: 'search-prime',
|
||||
search_query: query,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const text = await result.text();
|
||||
console.error('Z.AI request failed', result.statusText, text);
|
||||
return response.status(500).send(text);
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
console.debug('Z.AI web search response', data);
|
||||
return response.json(data);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/visit', async (request, response) => {
|
||||
try {
|
||||
const url = request.body.url;
|
||||
const html = Boolean(request.body.html ?? true);
|
||||
|
||||
if (!url) {
|
||||
console.error('No url provided for /visit');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
try {
|
||||
const urlObj = new URL(url);
|
||||
|
||||
// Reject relative URLs
|
||||
if (urlObj.protocol === null || urlObj.host === null) {
|
||||
throw new Error('Invalid URL format');
|
||||
}
|
||||
|
||||
// Reject non-HTTP URLs
|
||||
if (urlObj.protocol !== 'http:' && urlObj.protocol !== 'https:') {
|
||||
throw new Error('Invalid protocol');
|
||||
}
|
||||
|
||||
// Reject URLs with a non-standard port
|
||||
if (urlObj.port !== '') {
|
||||
throw new Error('Invalid port');
|
||||
}
|
||||
|
||||
// Reject IP addresses
|
||||
if (urlObj.hostname.match(/^\d+\.\d+\.\d+\.\d+$/)) {
|
||||
throw new Error('Invalid hostname');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Invalid url provided for /visit', url);
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.info('Visiting web URL', url);
|
||||
|
||||
const result = await fetch(url, { headers: visitHeaders });
|
||||
|
||||
if (!result.ok) {
|
||||
console.error(`Visit failed ${result.status} ${result.statusText}`);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
const contentType = String(result.headers.get('content-type'));
|
||||
|
||||
if (html) {
|
||||
if (!contentType.includes('text/html')) {
|
||||
console.error(`Visit failed, content-type is ${contentType}, expected text/html`);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
const text = await result.text();
|
||||
return response.send(text);
|
||||
}
|
||||
|
||||
response.setHeader('Content-Type', contentType);
|
||||
const buffer = await result.arrayBuffer();
|
||||
return response.send(Buffer.from(buffer));
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
635
web-app/src/endpoints/secrets.js
Normal file
635
web-app/src/endpoints/secrets.js
Normal file
@@ -0,0 +1,635 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
import express from 'express';
|
||||
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||
import { color, getConfigValue, uuidv4 } from '../util.js';
|
||||
|
||||
export const SECRETS_FILE = 'secrets.json';
|
||||
export const SECRET_KEYS = {
|
||||
_MIGRATED: '_migrated',
|
||||
HORDE: 'api_key_horde',
|
||||
MANCER: 'api_key_mancer',
|
||||
VLLM: 'api_key_vllm',
|
||||
APHRODITE: 'api_key_aphrodite',
|
||||
TABBY: 'api_key_tabby',
|
||||
OPENAI: 'api_key_openai',
|
||||
NOVEL: 'api_key_novel',
|
||||
CLAUDE: 'api_key_claude',
|
||||
DEEPL: 'deepl',
|
||||
LIBRE: 'libre',
|
||||
LIBRE_URL: 'libre_url',
|
||||
LINGVA_URL: 'lingva_url',
|
||||
OPENROUTER: 'api_key_openrouter',
|
||||
AI21: 'api_key_ai21',
|
||||
ONERING_URL: 'oneringtranslator_url',
|
||||
DEEPLX_URL: 'deeplx_url',
|
||||
MAKERSUITE: 'api_key_makersuite',
|
||||
VERTEXAI: 'api_key_vertexai',
|
||||
SERPAPI: 'api_key_serpapi',
|
||||
TOGETHERAI: 'api_key_togetherai',
|
||||
MISTRALAI: 'api_key_mistralai',
|
||||
CUSTOM: 'api_key_custom',
|
||||
OOBA: 'api_key_ooba',
|
||||
INFERMATICAI: 'api_key_infermaticai',
|
||||
DREAMGEN: 'api_key_dreamgen',
|
||||
NOMICAI: 'api_key_nomicai',
|
||||
KOBOLDCPP: 'api_key_koboldcpp',
|
||||
LLAMACPP: 'api_key_llamacpp',
|
||||
COHERE: 'api_key_cohere',
|
||||
PERPLEXITY: 'api_key_perplexity',
|
||||
GROQ: 'api_key_groq',
|
||||
AZURE_TTS: 'api_key_azure_tts',
|
||||
FEATHERLESS: 'api_key_featherless',
|
||||
HUGGINGFACE: 'api_key_huggingface',
|
||||
STABILITY: 'api_key_stability',
|
||||
CUSTOM_OPENAI_TTS: 'api_key_custom_openai_tts',
|
||||
TAVILY: 'api_key_tavily',
|
||||
CHUTES: 'api_key_chutes',
|
||||
ELECTRONHUB: 'api_key_electronhub',
|
||||
NANOGPT: 'api_key_nanogpt',
|
||||
BFL: 'api_key_bfl',
|
||||
COMFY_RUNPOD: 'api_key_comfy_runpod',
|
||||
FALAI: 'api_key_falai',
|
||||
GENERIC: 'api_key_generic',
|
||||
DEEPSEEK: 'api_key_deepseek',
|
||||
SERPER: 'api_key_serper',
|
||||
AIMLAPI: 'api_key_aimlapi',
|
||||
XAI: 'api_key_xai',
|
||||
FIREWORKS: 'api_key_fireworks',
|
||||
VERTEXAI_SERVICE_ACCOUNT: 'vertexai_service_account_json',
|
||||
MINIMAX: 'api_key_minimax',
|
||||
MINIMAX_GROUP_ID: 'minimax_group_id',
|
||||
MOONSHOT: 'api_key_moonshot',
|
||||
COMETAPI: 'api_key_cometapi',
|
||||
AZURE_OPENAI: 'api_key_azure_openai',
|
||||
ZAI: 'api_key_zai',
|
||||
SILICONFLOW: 'api_key_siliconflow',
|
||||
ELEVENLABS: 'api_key_elevenlabs',
|
||||
};
|
||||
|
||||
/**
|
||||
* @typedef {object} SecretValue
|
||||
* @property {string} id The unique identifier for the secret
|
||||
* @property {string} value The secret value
|
||||
* @property {string} label The label for the secret
|
||||
* @property {boolean} active Whether the secret is currently active
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} SecretState
|
||||
* @property {string} id The unique identifier for the secret
|
||||
* @property {string} value The secret value, masked for security
|
||||
* @property {string} label The label for the secret
|
||||
* @property {boolean} active Whether the secret is currently active
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Record<string, SecretState[]|null>} SecretStateMap
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {{[key: string]: SecretValue[]}} SecretKeys
|
||||
* @typedef {{[key: string]: string}} FlatSecretKeys
|
||||
*/
|
||||
|
||||
// These are the keys that are safe to expose, even if allowKeysExposure is false
|
||||
const EXPORTABLE_KEYS = [
|
||||
SECRET_KEYS.LIBRE_URL,
|
||||
SECRET_KEYS.LINGVA_URL,
|
||||
SECRET_KEYS.ONERING_URL,
|
||||
SECRET_KEYS.DEEPLX_URL,
|
||||
];
|
||||
|
||||
const allowKeysExposure = !!getConfigValue('allowKeysExposure', false, 'boolean');
|
||||
|
||||
/**
|
||||
* SecretManager class to handle all secret operations
|
||||
*/
|
||||
export class SecretManager {
|
||||
/**
|
||||
* @param {import('../users.js').UserDirectoryList} directories
|
||||
*/
|
||||
constructor(directories) {
|
||||
this.directories = directories;
|
||||
this.filePath = path.join(directories.root, SECRETS_FILE);
|
||||
this.defaultSecrets = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures the secrets file exists, creating an empty one if necessary
|
||||
* @private
|
||||
*/
|
||||
_ensureSecretsFile() {
|
||||
if (!fs.existsSync(this.filePath)) {
|
||||
writeFileAtomicSync(this.filePath, JSON.stringify(this.defaultSecrets), 'utf-8');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads and parses the secrets file
|
||||
* @private
|
||||
* @returns {SecretKeys}
|
||||
*/
|
||||
_readSecretsFile() {
|
||||
this._ensureSecretsFile();
|
||||
const fileContents = fs.readFileSync(this.filePath, 'utf-8');
|
||||
return /** @type {SecretKeys} */ (JSON.parse(fileContents));
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes secrets to the file atomically
|
||||
* @private
|
||||
* @param {SecretKeys} secrets
|
||||
*/
|
||||
_writeSecretsFile(secrets) {
|
||||
writeFileAtomicSync(this.filePath, JSON.stringify(secrets, null, 4), 'utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Deactivates all secrets for a given key
|
||||
* @private
|
||||
* @param {SecretValue[]} secretArray
|
||||
*/
|
||||
_deactivateAllSecrets(secretArray) {
|
||||
secretArray.forEach(secret => {
|
||||
secret.active = false;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates that the secret key exists and has valid structure
|
||||
* @private
|
||||
* @param {SecretKeys} secrets
|
||||
* @param {string} key
|
||||
* @returns {boolean}
|
||||
*/
|
||||
_validateSecretKey(secrets, key) {
|
||||
return Object.hasOwn(secrets, key) && Array.isArray(secrets[key]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Masks a secret value with asterisks in the middle
|
||||
* @param {string} value The secret value to mask
|
||||
* @param {string} key The secret key
|
||||
* @returns {string} A masked version of the value for peeking
|
||||
*/
|
||||
getMaskedValue(value, key) {
|
||||
// No masking if exposure is allowed
|
||||
if (allowKeysExposure || EXPORTABLE_KEYS.includes(key)) {
|
||||
return value;
|
||||
}
|
||||
const threshold = 10;
|
||||
const exposedChars = 3;
|
||||
const placeholder = '*';
|
||||
if (value.length <= threshold) {
|
||||
return placeholder.repeat(threshold);
|
||||
}
|
||||
const visibleEnd = value.slice(-exposedChars);
|
||||
const maskedMiddle = placeholder.repeat(threshold - exposedChars);
|
||||
return `${maskedMiddle}${visibleEnd}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes a secret to the secrets file
|
||||
* @param {string} key Secret key
|
||||
* @param {string} value Secret value
|
||||
* @param {string} label Label for the secret
|
||||
* @returns {string} The ID of the newly created secret
|
||||
*/
|
||||
writeSecret(key, value, label = 'Unlabeled') {
|
||||
const secrets = this._readSecretsFile();
|
||||
|
||||
if (!Array.isArray(secrets[key])) {
|
||||
secrets[key] = [];
|
||||
}
|
||||
|
||||
this._deactivateAllSecrets(secrets[key]);
|
||||
|
||||
const secret = {
|
||||
id: uuidv4(),
|
||||
value: value,
|
||||
label: label,
|
||||
active: true,
|
||||
};
|
||||
secrets[key].push(secret);
|
||||
|
||||
this._writeSecretsFile(secrets);
|
||||
return secret.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a secret from the secrets file by its ID
|
||||
* @param {string} key Secret key
|
||||
* @param {string?} id Secret ID to delete
|
||||
*/
|
||||
deleteSecret(key, id) {
|
||||
if (!fs.existsSync(this.filePath)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const secrets = this._readSecretsFile();
|
||||
|
||||
if (!this._validateSecretKey(secrets, key)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const secretArray = secrets[key];
|
||||
const targetIndex = secretArray.findIndex(s => id ? s.id === id : s.active);
|
||||
|
||||
// Delete the secret if found
|
||||
if (targetIndex !== -1) {
|
||||
secretArray.splice(targetIndex, 1);
|
||||
}
|
||||
|
||||
// Reactivate the first secret if none are active
|
||||
if (secretArray.length && !secretArray.some(s => s.active)) {
|
||||
secretArray[0].active = true;
|
||||
}
|
||||
|
||||
// Remove the key if no secrets left
|
||||
if (secretArray.length === 0) {
|
||||
delete secrets[key];
|
||||
}
|
||||
|
||||
this._writeSecretsFile(secrets);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads the active secret value for a given key
|
||||
* @param {string} key Secret key
|
||||
* @param {string?} id ID of the secret to read (optional)
|
||||
* @returns {string} Secret value or empty string if not found
|
||||
*/
|
||||
readSecret(key, id) {
|
||||
if (!fs.existsSync(this.filePath)) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const secrets = this._readSecretsFile();
|
||||
const secretArray = secrets[key];
|
||||
|
||||
if (Array.isArray(secretArray) && secretArray.length > 0) {
|
||||
const activeSecret = secretArray.find(s => id ? s.id === id : s.active);
|
||||
return activeSecret?.value || '';
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Activates a specific secret by ID for a given key
|
||||
* @param {string} key Secret key to rotate
|
||||
* @param {string} id ID of the secret to activate
|
||||
*/
|
||||
rotateSecret(key, id) {
|
||||
if (!fs.existsSync(this.filePath)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const secrets = this._readSecretsFile();
|
||||
|
||||
if (!this._validateSecretKey(secrets, key)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const secretArray = secrets[key];
|
||||
const targetIndex = secretArray.findIndex(s => s.id === id);
|
||||
|
||||
if (targetIndex === -1) {
|
||||
console.warn(`Secret with ID ${id} not found for key ${key}`);
|
||||
return;
|
||||
}
|
||||
|
||||
this._deactivateAllSecrets(secretArray);
|
||||
secretArray[targetIndex].active = true;
|
||||
|
||||
this._writeSecretsFile(secrets);
|
||||
}
|
||||
|
||||
/**
|
||||
* Renames a secret by its ID
|
||||
* @param {string} key Secret key to rename
|
||||
* @param {string} id ID of the secret to rename
|
||||
* @param {string} label New label for the secret
|
||||
*/
|
||||
renameSecret(key, id, label) {
|
||||
const secrets = this._readSecretsFile();
|
||||
|
||||
if (!this._validateSecretKey(secrets, key)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const secretArray = secrets[key];
|
||||
const targetIndex = secretArray.findIndex(s => s.id === id);
|
||||
|
||||
if (targetIndex === -1) {
|
||||
console.warn(`Secret with ID ${id} not found for key ${key}`);
|
||||
return;
|
||||
}
|
||||
|
||||
secretArray[targetIndex].label = label;
|
||||
this._writeSecretsFile(secrets);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the state of all secrets (whether they exist or not)
|
||||
* @returns {SecretStateMap} Secret state
|
||||
*/
|
||||
getSecretState() {
|
||||
const secrets = this._readSecretsFile();
|
||||
/** @type {SecretStateMap} */
|
||||
const state = {};
|
||||
|
||||
for (const key of Object.values(SECRET_KEYS)) {
|
||||
// Skip migration marker
|
||||
if (key === SECRET_KEYS._MIGRATED) {
|
||||
continue;
|
||||
}
|
||||
const value = secrets[key];
|
||||
if (value && Array.isArray(value) && value.length > 0) {
|
||||
state[key] = value.map(secret => ({
|
||||
id: secret.id,
|
||||
value: this.getMaskedValue(secret.value, key),
|
||||
label: secret.label,
|
||||
active: secret.active,
|
||||
}));
|
||||
} else {
|
||||
// No secrets for this key
|
||||
state[key] = null;
|
||||
}
|
||||
}
|
||||
|
||||
return state;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all secrets (for admin viewing)
|
||||
* @returns {SecretKeys} All secrets
|
||||
*/
|
||||
getAllSecrets() {
|
||||
return this._readSecretsFile();
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrates legacy flat secrets format to new format
|
||||
*/
|
||||
migrateFlatSecrets() {
|
||||
if (!fs.existsSync(this.filePath)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const fileContents = fs.readFileSync(this.filePath, 'utf8');
|
||||
const secrets = /** @type {FlatSecretKeys} */ (JSON.parse(fileContents));
|
||||
const values = Object.values(secrets);
|
||||
|
||||
// Check if already migrated
|
||||
if (secrets[SECRET_KEYS._MIGRATED] || values.length === 0 || values.some(v => Array.isArray(v))) {
|
||||
return;
|
||||
}
|
||||
|
||||
/** @type {SecretKeys} */
|
||||
const migratedSecrets = {};
|
||||
|
||||
for (const [key, value] of Object.entries(secrets)) {
|
||||
if (typeof value === 'string' && value.trim()) {
|
||||
migratedSecrets[key] = [{
|
||||
id: uuidv4(),
|
||||
value: value,
|
||||
label: key,
|
||||
active: true,
|
||||
}];
|
||||
}
|
||||
}
|
||||
|
||||
// Mark as migrated
|
||||
migratedSecrets[SECRET_KEYS._MIGRATED] = [];
|
||||
|
||||
// Save backup of the old secrets file
|
||||
const backupFilePath = path.join(this.directories.backups, `secrets_migration_${Date.now()}.json`);
|
||||
fs.cpSync(this.filePath, backupFilePath);
|
||||
|
||||
this._writeSecretsFile(migratedSecrets);
|
||||
console.info(color.green('Secrets migrated successfully, old secrets backed up to:'), backupFilePath);
|
||||
}
|
||||
}
|
||||
|
||||
//#region Backwards compatibility
|
||||
/**
|
||||
* Writes a secret to the secrets file
|
||||
* @param {import('../users.js').UserDirectoryList} directories User directories
|
||||
* @param {string} key Secret key
|
||||
* @param {string} value Secret value
|
||||
*/
|
||||
export function writeSecret(directories, key, value) {
|
||||
return new SecretManager(directories).writeSecret(key, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a secret from the secrets file
|
||||
* @param {import('../users.js').UserDirectoryList} directories User directories
|
||||
* @param {string} key Secret key
|
||||
*/
|
||||
export function deleteSecret(directories, key) {
|
||||
return new SecretManager(directories).deleteSecret(key, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a secret from the secrets file
|
||||
* @param {import('../users.js').UserDirectoryList} directories User directories
|
||||
* @param {string} key Secret key
|
||||
* @returns {string} Secret value
|
||||
*/
|
||||
export function readSecret(directories, key) {
|
||||
return new SecretManager(directories).readSecret(key, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads the secret state from the secrets file
|
||||
* @param {import('../users.js').UserDirectoryList} directories User directories
|
||||
* @returns {Record<string, boolean>} Secret state
|
||||
*/
|
||||
export function readSecretState(directories) {
|
||||
const state = new SecretManager(directories).getSecretState();
|
||||
const result = /** @type {Record<string, boolean>} */ ({});
|
||||
for (const key of Object.values(SECRET_KEYS)) {
|
||||
// Skip migration marker
|
||||
if (key === SECRET_KEYS._MIGRATED) {
|
||||
continue;
|
||||
}
|
||||
result[key] = Array.isArray(state[key]) && state[key].length > 0;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads all secrets from the secrets file
|
||||
* @param {import('../users.js').UserDirectoryList} directories User directories
|
||||
* @returns {Record<string, string>} Secrets
|
||||
*/
|
||||
export function getAllSecrets(directories) {
|
||||
const secrets = new SecretManager(directories).getAllSecrets();
|
||||
const result = /** @type {Record<string, string>} */ ({});
|
||||
for (const [key, values] of Object.entries(secrets)) {
|
||||
// Skip migration marker
|
||||
if (key === SECRET_KEYS._MIGRATED) {
|
||||
continue;
|
||||
}
|
||||
if (Array.isArray(values) && values.length > 0) {
|
||||
const activeSecret = values.find(secret => secret.active);
|
||||
if (activeSecret) {
|
||||
result[key] = activeSecret.value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
//#endregion
|
||||
|
||||
/**
|
||||
* Migrates legacy flat secrets format to the new format for all user directories
|
||||
* @param {import('../users.js').UserDirectoryList[]} directoriesList User directories
|
||||
*/
|
||||
export function migrateFlatSecrets(directoriesList) {
|
||||
for (const directories of directoriesList) {
|
||||
try {
|
||||
const manager = new SecretManager(directories);
|
||||
manager.migrateFlatSecrets();
|
||||
} catch (error) {
|
||||
console.warn(color.red(`Failed to migrate secrets for ${directories.root}:`), error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/write', (request, response) => {
|
||||
try {
|
||||
const { key, value, label } = request.body;
|
||||
|
||||
if (!key || typeof value !== 'string') {
|
||||
return response.status(400).send('Invalid key or value');
|
||||
}
|
||||
|
||||
const manager = new SecretManager(request.user.directories);
|
||||
const id = manager.writeSecret(key, value, label);
|
||||
|
||||
return response.send({ id });
|
||||
} catch (error) {
|
||||
console.error('Error writing secret:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/read', (request, response) => {
|
||||
try {
|
||||
const manager = new SecretManager(request.user.directories);
|
||||
const state = manager.getSecretState();
|
||||
return response.send(state);
|
||||
} catch (error) {
|
||||
console.error('Error reading secret state:', error);
|
||||
return response.send({});
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/view', (request, response) => {
|
||||
try {
|
||||
if (!allowKeysExposure) {
|
||||
console.error('secrets.json could not be viewed unless allowKeysExposure in config.yaml is set to true');
|
||||
return response.sendStatus(403);
|
||||
}
|
||||
|
||||
const secrets = getAllSecrets(request.user.directories);
|
||||
|
||||
if (!secrets) {
|
||||
return response.sendStatus(404);
|
||||
}
|
||||
|
||||
return response.send(secrets);
|
||||
} catch (error) {
|
||||
console.error('Error viewing secrets:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/find', (request, response) => {
|
||||
try {
|
||||
const { key, id } = request.body;
|
||||
|
||||
if (!key) {
|
||||
return response.status(400).send('Key is required');
|
||||
}
|
||||
|
||||
if (!allowKeysExposure && !EXPORTABLE_KEYS.includes(key)) {
|
||||
console.error('Cannot fetch secrets unless allowKeysExposure in config.yaml is set to true');
|
||||
return response.sendStatus(403);
|
||||
}
|
||||
|
||||
const manager = new SecretManager(request.user.directories);
|
||||
const state = manager.getSecretState();
|
||||
|
||||
if (!state[key]) {
|
||||
return response.sendStatus(404);
|
||||
}
|
||||
|
||||
const secretValue = manager.readSecret(key, id);
|
||||
return response.send({ value: secretValue });
|
||||
} catch (error) {
|
||||
console.error('Error finding secret:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/delete', (request, response) => {
|
||||
try {
|
||||
const { key, id } = request.body;
|
||||
|
||||
if (!key) {
|
||||
return response.status(400).send('Key and ID are required');
|
||||
}
|
||||
|
||||
const manager = new SecretManager(request.user.directories);
|
||||
manager.deleteSecret(key, id);
|
||||
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error('Error deleting secret:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/rotate', (request, response) => {
|
||||
try {
|
||||
const { key, id } = request.body;
|
||||
|
||||
if (!key || !id) {
|
||||
return response.status(400).send('Key and ID are required');
|
||||
}
|
||||
|
||||
const manager = new SecretManager(request.user.directories);
|
||||
manager.rotateSecret(key, id);
|
||||
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error('Error rotating secret:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/rename', (request, response) => {
|
||||
try {
|
||||
const { key, id, label } = request.body;
|
||||
|
||||
if (!key || !id || !label) {
|
||||
return response.status(400).send('Key, ID, and label are required');
|
||||
}
|
||||
|
||||
const manager = new SecretManager(request.user.directories);
|
||||
manager.renameSecret(key, id, label);
|
||||
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error('Error renaming secret:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
371
web-app/src/endpoints/settings.js
Normal file
371
web-app/src/endpoints/settings.js
Normal file
@@ -0,0 +1,371 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
import express from 'express';
|
||||
import _ from 'lodash';
|
||||
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||
|
||||
import { SETTINGS_FILE } from '../constants.js';
|
||||
import { getConfigValue, generateTimestamp, removeOldBackups } from '../util.js';
|
||||
import { getAllUserHandles, getUserDirectories } from '../users.js';
|
||||
import { getFileNameValidationFunction } from '../middleware/validateFileName.js';
|
||||
|
||||
const ENABLE_EXTENSIONS = !!getConfigValue('extensions.enabled', true, 'boolean');
|
||||
const ENABLE_EXTENSIONS_AUTO_UPDATE = !!getConfigValue('extensions.autoUpdate', true, 'boolean');
|
||||
const ENABLE_ACCOUNTS = !!getConfigValue('enableUserAccounts', false, 'boolean');
|
||||
|
||||
// 10 minutes
|
||||
const AUTOSAVE_INTERVAL = 10 * 60 * 1000;
|
||||
|
||||
/**
|
||||
* Map of functions to trigger settings autosave for a user.
|
||||
* @type {Map<string, function>}
|
||||
*/
|
||||
const AUTOSAVE_FUNCTIONS = new Map();
|
||||
|
||||
/**
|
||||
* Triggers autosave for a user every 10 minutes.
|
||||
* @param {string} handle User handle
|
||||
* @returns {void}
|
||||
*/
|
||||
function triggerAutoSave(handle) {
|
||||
if (!AUTOSAVE_FUNCTIONS.has(handle)) {
|
||||
const throttledAutoSave = _.throttle(() => backupUserSettings(handle, true), AUTOSAVE_INTERVAL);
|
||||
AUTOSAVE_FUNCTIONS.set(handle, throttledAutoSave);
|
||||
}
|
||||
|
||||
const functionToCall = AUTOSAVE_FUNCTIONS.get(handle);
|
||||
if (functionToCall && typeof functionToCall === 'function') {
|
||||
functionToCall();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads and parses files from a directory.
|
||||
* @param {string} directoryPath Path to the directory
|
||||
* @param {string} fileExtension File extension
|
||||
* @returns {Array} Parsed files
|
||||
*/
|
||||
function readAndParseFromDirectory(directoryPath, fileExtension = '.json') {
|
||||
const files = fs
|
||||
.readdirSync(directoryPath)
|
||||
.filter(x => path.parse(x).ext == fileExtension)
|
||||
.sort();
|
||||
|
||||
const parsedFiles = [];
|
||||
|
||||
files.forEach(item => {
|
||||
try {
|
||||
const file = fs.readFileSync(path.join(directoryPath, item), 'utf-8');
|
||||
parsedFiles.push(fileExtension == '.json' ? JSON.parse(file) : file);
|
||||
}
|
||||
catch {
|
||||
// skip
|
||||
}
|
||||
});
|
||||
|
||||
return parsedFiles;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a sort function for sorting strings.
|
||||
* @param {*} _
|
||||
* @returns {(a: string, b: string) => number} Sort function
|
||||
*/
|
||||
function sortByName(_) {
|
||||
return (a, b) => a.localeCompare(b);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets backup file prefix for user settings.
|
||||
* @param {string} handle User handle
|
||||
* @returns {string} File prefix
|
||||
*/
|
||||
export function getSettingsBackupFilePrefix(handle) {
|
||||
return `settings_${handle}_`;
|
||||
}
|
||||
|
||||
function readPresetsFromDirectory(directoryPath, options = {}) {
|
||||
const {
|
||||
sortFunction,
|
||||
removeFileExtension = false,
|
||||
fileExtension = '.json',
|
||||
} = options;
|
||||
|
||||
const files = fs.readdirSync(directoryPath).sort(sortFunction).filter(x => path.parse(x).ext == fileExtension);
|
||||
const fileContents = [];
|
||||
const fileNames = [];
|
||||
|
||||
files.forEach(item => {
|
||||
try {
|
||||
const file = fs.readFileSync(path.join(directoryPath, item), 'utf8');
|
||||
JSON.parse(file);
|
||||
fileContents.push(file);
|
||||
fileNames.push(removeFileExtension ? item.replace(/\.[^/.]+$/, '') : item);
|
||||
} catch {
|
||||
// skip
|
||||
console.warn(`${item} is not a valid JSON`);
|
||||
}
|
||||
});
|
||||
|
||||
return { fileContents, fileNames };
|
||||
}
|
||||
|
||||
async function backupSettings() {
|
||||
try {
|
||||
const userHandles = await getAllUserHandles();
|
||||
|
||||
for (const handle of userHandles) {
|
||||
backupUserSettings(handle, true);
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Could not backup settings file', err);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a backup of the user's settings file.
|
||||
* @param {string} handle User handle
|
||||
* @param {boolean} preventDuplicates Prevent duplicate backups
|
||||
* @returns {void}
|
||||
*/
|
||||
function backupUserSettings(handle, preventDuplicates) {
|
||||
const userDirectories = getUserDirectories(handle);
|
||||
|
||||
if (!fs.existsSync(userDirectories.root)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const backupFile = path.join(userDirectories.backups, `${getSettingsBackupFilePrefix(handle)}${generateTimestamp()}.json`);
|
||||
const sourceFile = path.join(userDirectories.root, SETTINGS_FILE);
|
||||
|
||||
if (preventDuplicates && isDuplicateBackup(handle, sourceFile)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!fs.existsSync(sourceFile)) {
|
||||
return;
|
||||
}
|
||||
|
||||
fs.copyFileSync(sourceFile, backupFile);
|
||||
removeOldBackups(userDirectories.backups, `settings_${handle}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the backup would be a duplicate.
|
||||
* @param {string} handle User handle
|
||||
* @param {string} sourceFile Source file path
|
||||
* @returns {boolean} True if the backup is a duplicate
|
||||
*/
|
||||
function isDuplicateBackup(handle, sourceFile) {
|
||||
const latestBackup = getLatestBackup(handle);
|
||||
if (!latestBackup) {
|
||||
return false;
|
||||
}
|
||||
return areFilesEqual(latestBackup, sourceFile);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the two files are equal.
|
||||
* @param {string} file1 File path
|
||||
* @param {string} file2 File path
|
||||
*/
|
||||
function areFilesEqual(file1, file2) {
|
||||
if (!fs.existsSync(file1) || !fs.existsSync(file2)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const content1 = fs.readFileSync(file1);
|
||||
const content2 = fs.readFileSync(file2);
|
||||
return content1.toString() === content2.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the latest backup file for a user.
|
||||
* @param {string} handle User handle
|
||||
* @returns {string|null} Latest backup file. Null if no backup exists.
|
||||
*/
|
||||
function getLatestBackup(handle) {
|
||||
const userDirectories = getUserDirectories(handle);
|
||||
const backupFiles = fs.readdirSync(userDirectories.backups)
|
||||
.filter(x => x.startsWith(getSettingsBackupFilePrefix(handle)))
|
||||
.map(x => ({ name: x, ctime: fs.statSync(path.join(userDirectories.backups, x)).ctimeMs }));
|
||||
const latestBackup = backupFiles.sort((a, b) => b.ctime - a.ctime)[0]?.name;
|
||||
if (!latestBackup) {
|
||||
return null;
|
||||
}
|
||||
return path.join(userDirectories.backups, latestBackup);
|
||||
}
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/save', function (request, response) {
|
||||
try {
|
||||
const pathToSettings = path.join(request.user.directories.root, SETTINGS_FILE);
|
||||
writeFileAtomicSync(pathToSettings, JSON.stringify(request.body, null, 4), 'utf8');
|
||||
triggerAutoSave(request.user.profile.handle);
|
||||
response.send({ result: 'ok' });
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
response.send(err);
|
||||
}
|
||||
});
|
||||
|
||||
// Wintermute's code
|
||||
router.post('/get', (request, response) => {
|
||||
let settings;
|
||||
try {
|
||||
const pathToSettings = path.join(request.user.directories.root, SETTINGS_FILE);
|
||||
settings = fs.readFileSync(pathToSettings, 'utf8');
|
||||
} catch (e) {
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
// NovelAI Settings
|
||||
const { fileContents: novelai_settings, fileNames: novelai_setting_names }
|
||||
= readPresetsFromDirectory(request.user.directories.novelAI_Settings, {
|
||||
sortFunction: sortByName(request.user.directories.novelAI_Settings),
|
||||
removeFileExtension: true,
|
||||
});
|
||||
|
||||
// OpenAI Settings
|
||||
const { fileContents: openai_settings, fileNames: openai_setting_names }
|
||||
= readPresetsFromDirectory(request.user.directories.openAI_Settings, {
|
||||
sortFunction: sortByName(request.user.directories.openAI_Settings), removeFileExtension: true,
|
||||
});
|
||||
|
||||
// TextGenerationWebUI Settings
|
||||
const { fileContents: textgenerationwebui_presets, fileNames: textgenerationwebui_preset_names }
|
||||
= readPresetsFromDirectory(request.user.directories.textGen_Settings, {
|
||||
sortFunction: sortByName(request.user.directories.textGen_Settings), removeFileExtension: true,
|
||||
});
|
||||
|
||||
//Kobold
|
||||
const { fileContents: koboldai_settings, fileNames: koboldai_setting_names }
|
||||
= readPresetsFromDirectory(request.user.directories.koboldAI_Settings, {
|
||||
sortFunction: sortByName(request.user.directories.koboldAI_Settings), removeFileExtension: true,
|
||||
});
|
||||
|
||||
const worldFiles = fs
|
||||
.readdirSync(request.user.directories.worlds)
|
||||
.filter(file => path.extname(file).toLowerCase() === '.json')
|
||||
.sort((a, b) => a.localeCompare(b));
|
||||
const world_names = worldFiles.map(item => path.parse(item).name);
|
||||
|
||||
const themes = readAndParseFromDirectory(request.user.directories.themes);
|
||||
const movingUIPresets = readAndParseFromDirectory(request.user.directories.movingUI);
|
||||
const quickReplyPresets = readAndParseFromDirectory(request.user.directories.quickreplies);
|
||||
|
||||
const instruct = readAndParseFromDirectory(request.user.directories.instruct);
|
||||
const context = readAndParseFromDirectory(request.user.directories.context);
|
||||
const sysprompt = readAndParseFromDirectory(request.user.directories.sysprompt);
|
||||
const reasoning = readAndParseFromDirectory(request.user.directories.reasoning);
|
||||
|
||||
response.send({
|
||||
settings,
|
||||
koboldai_settings,
|
||||
koboldai_setting_names,
|
||||
world_names,
|
||||
novelai_settings,
|
||||
novelai_setting_names,
|
||||
openai_settings,
|
||||
openai_setting_names,
|
||||
textgenerationwebui_presets,
|
||||
textgenerationwebui_preset_names,
|
||||
themes,
|
||||
movingUIPresets,
|
||||
quickReplyPresets,
|
||||
instruct,
|
||||
context,
|
||||
sysprompt,
|
||||
reasoning,
|
||||
enable_extensions: ENABLE_EXTENSIONS,
|
||||
enable_extensions_auto_update: ENABLE_EXTENSIONS_AUTO_UPDATE,
|
||||
enable_accounts: ENABLE_ACCOUNTS,
|
||||
});
|
||||
});
|
||||
|
||||
router.post('/get-snapshots', async (request, response) => {
|
||||
try {
|
||||
const snapshots = fs.readdirSync(request.user.directories.backups);
|
||||
const userFilesPattern = getSettingsBackupFilePrefix(request.user.profile.handle);
|
||||
const userSnapshots = snapshots.filter(x => x.startsWith(userFilesPattern));
|
||||
|
||||
const result = userSnapshots.map(x => {
|
||||
const stat = fs.statSync(path.join(request.user.directories.backups, x));
|
||||
return { date: stat.ctimeMs, name: x, size: stat.size };
|
||||
});
|
||||
|
||||
response.json(result);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/load-snapshot', getFileNameValidationFunction('name'), async (request, response) => {
|
||||
try {
|
||||
const userFilesPattern = getSettingsBackupFilePrefix(request.user.profile.handle);
|
||||
|
||||
if (!request.body.name || !request.body.name.startsWith(userFilesPattern)) {
|
||||
return response.status(400).send({ error: 'Invalid snapshot name' });
|
||||
}
|
||||
|
||||
const snapshotName = request.body.name;
|
||||
const snapshotPath = path.join(request.user.directories.backups, snapshotName);
|
||||
|
||||
if (!fs.existsSync(snapshotPath)) {
|
||||
return response.sendStatus(404);
|
||||
}
|
||||
|
||||
const content = fs.readFileSync(snapshotPath, 'utf8');
|
||||
|
||||
response.send(content);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/make-snapshot', async (request, response) => {
|
||||
try {
|
||||
backupUserSettings(request.user.profile.handle, false);
|
||||
response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/restore-snapshot', getFileNameValidationFunction('name'), async (request, response) => {
|
||||
try {
|
||||
const userFilesPattern = getSettingsBackupFilePrefix(request.user.profile.handle);
|
||||
|
||||
if (!request.body.name || !request.body.name.startsWith(userFilesPattern)) {
|
||||
return response.status(400).send({ error: 'Invalid snapshot name' });
|
||||
}
|
||||
|
||||
const snapshotName = request.body.name;
|
||||
const snapshotPath = path.join(request.user.directories.backups, snapshotName);
|
||||
|
||||
if (!fs.existsSync(snapshotPath)) {
|
||||
return response.sendStatus(404);
|
||||
}
|
||||
|
||||
const pathToSettings = path.join(request.user.directories.root, SETTINGS_FILE);
|
||||
fs.rmSync(pathToSettings, { force: true });
|
||||
fs.copyFileSync(snapshotPath, pathToSettings);
|
||||
|
||||
response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Initializes the settings endpoint
|
||||
*/
|
||||
export async function init() {
|
||||
await backupSettings();
|
||||
}
|
||||
401
web-app/src/endpoints/speech.js
Normal file
401
web-app/src/endpoints/speech.js
Normal file
@@ -0,0 +1,401 @@
|
||||
import { Buffer } from 'node:buffer';
|
||||
import fs from 'node:fs';
|
||||
import express from 'express';
|
||||
import wavefile from 'wavefile';
|
||||
import fetch from 'node-fetch';
|
||||
import FormData from 'form-data';
|
||||
import mime from 'mime-types';
|
||||
import { getPipeline } from '../transformers.js';
|
||||
import { forwardFetchResponse } from '../util.js';
|
||||
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
/**
|
||||
* Gets the audio data from a base64-encoded audio file.
|
||||
* @param {string} audio Base64-encoded audio
|
||||
* @returns {Float64Array} Audio data
|
||||
*/
|
||||
function getWaveFile(audio) {
|
||||
const wav = new wavefile.WaveFile();
|
||||
wav.fromDataURI(audio);
|
||||
wav.toBitDepth('32f');
|
||||
wav.toSampleRate(16000);
|
||||
let audioData = wav.getSamples();
|
||||
if (Array.isArray(audioData)) {
|
||||
if (audioData.length > 1) {
|
||||
const SCALING_FACTOR = Math.sqrt(2);
|
||||
|
||||
// Merge channels (into first channel to save memory)
|
||||
for (let i = 0; i < audioData[0].length; ++i) {
|
||||
audioData[0][i] = SCALING_FACTOR * (audioData[0][i] + audioData[1][i]) / 2;
|
||||
}
|
||||
}
|
||||
|
||||
// Select first channel
|
||||
audioData = audioData[0];
|
||||
}
|
||||
|
||||
return audioData;
|
||||
}
|
||||
|
||||
router.post('/recognize', async (req, res) => {
|
||||
try {
|
||||
const TASK = 'automatic-speech-recognition';
|
||||
const { model, audio, lang } = req.body;
|
||||
const pipe = await getPipeline(TASK, model);
|
||||
const wav = getWaveFile(audio);
|
||||
const start = performance.now();
|
||||
const result = await pipe(wav, { language: lang || null, task: 'transcribe' });
|
||||
const end = performance.now();
|
||||
console.info(`Execution duration: ${(end - start) / 1000} seconds`);
|
||||
console.info('Transcribed audio:', result.text);
|
||||
|
||||
return res.json({ text: result.text });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/synthesize', async (req, res) => {
|
||||
try {
|
||||
const TASK = 'text-to-speech';
|
||||
const { text, model, speaker } = req.body;
|
||||
const pipe = await getPipeline(TASK, model);
|
||||
const speaker_embeddings = speaker
|
||||
? new Float32Array(new Uint8Array(Buffer.from(speaker.startsWith('data:') ? speaker.split(',')[1] : speaker, 'base64')).buffer)
|
||||
: null;
|
||||
const start = performance.now();
|
||||
const result = await pipe(text, { speaker_embeddings: speaker_embeddings });
|
||||
const end = performance.now();
|
||||
console.debug(`Execution duration: ${(end - start) / 1000} seconds`);
|
||||
|
||||
const wav = new wavefile.WaveFile();
|
||||
wav.fromScratch(1, result.sampling_rate, '32f', result.audio);
|
||||
const buffer = wav.toBuffer();
|
||||
|
||||
res.set('Content-Type', 'audio/wav');
|
||||
return res.send(Buffer.from(buffer));
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
const pollinations = express.Router();
|
||||
|
||||
pollinations.post('/voices', async (req, res) => {
|
||||
try {
|
||||
const model = req.body.model || 'openai-audio';
|
||||
|
||||
const response = await fetch('https://text.pollinations.ai/models');
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch Pollinations models');
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (!Array.isArray(data)) {
|
||||
throw new Error('Invalid data format received from Pollinations');
|
||||
}
|
||||
|
||||
const audioModelData = data.find(m => m.name === model);
|
||||
if (!audioModelData || !Array.isArray(audioModelData.voices)) {
|
||||
throw new Error('No voices found for the specified model');
|
||||
}
|
||||
|
||||
const voices = audioModelData.voices;
|
||||
return res.json(voices);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
pollinations.post('/generate', async (req, res) => {
|
||||
try {
|
||||
const text = req.body.text;
|
||||
const model = req.body.model || 'openai-audio';
|
||||
const voice = req.body.voice || 'alloy';
|
||||
|
||||
const url = new URL(`https://text.pollinations.ai/generate/${encodeURIComponent(text)}`);
|
||||
url.searchParams.append('model', model);
|
||||
url.searchParams.append('voice', voice);
|
||||
url.searchParams.append('referrer', 'sillytavern');
|
||||
console.info('Pollinations request URL:', url.toString());
|
||||
|
||||
const response = await fetch(url);
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
throw new Error(`Failed to generate audio from Pollinations: ${text}`);
|
||||
}
|
||||
|
||||
res.set('Content-Type', 'audio/mpeg');
|
||||
forwardFetchResponse(response, res);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.use('/pollinations', pollinations);
|
||||
|
||||
const elevenlabs = express.Router();
|
||||
|
||||
elevenlabs.post('/voices', async (req, res) => {
|
||||
try {
|
||||
const apiKey = readSecret(req.user.directories, SECRET_KEYS.ELEVENLABS);
|
||||
if (!apiKey) {
|
||||
console.warn('ElevenLabs API key not found');
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
const response = await fetch('https://api.elevenlabs.io/v1/voices', {
|
||||
headers: {
|
||||
'xi-api-key': apiKey,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
console.warn(`ElevenLabs voices fetch failed: HTTP ${response.status} - ${text}`);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
|
||||
const responseJson = await response.json();
|
||||
return res.json(responseJson);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
elevenlabs.post('/voice-settings', async (req, res) => {
|
||||
try {
|
||||
const apiKey = readSecret(req.user.directories, SECRET_KEYS.ELEVENLABS);
|
||||
if (!apiKey) {
|
||||
console.warn('ElevenLabs API key not found');
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
const response = await fetch('https://api.elevenlabs.io/v1/voices/settings/default', {
|
||||
headers: {
|
||||
'xi-api-key': apiKey,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
console.warn(`ElevenLabs voice settings fetch failed: HTTP ${response.status} - ${text}`);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
const responseJson = await response.json();
|
||||
return res.json(responseJson);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
elevenlabs.post('/synthesize', async (req, res) => {
|
||||
try {
|
||||
const apiKey = readSecret(req.user.directories, SECRET_KEYS.ELEVENLABS);
|
||||
if (!apiKey) {
|
||||
console.warn('ElevenLabs API key not found');
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
const { voiceId, request } = req.body;
|
||||
|
||||
if (!voiceId || !request) {
|
||||
console.warn('ElevenLabs synthesis request missing voiceId or request body');
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
console.debug('ElevenLabs TTS request:', request);
|
||||
|
||||
const response = await fetch(`https://api.elevenlabs.io/v1/text-to-speech/${voiceId}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'xi-api-key': apiKey,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(request),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
console.warn(`ElevenLabs synthesis failed: HTTP ${response.status} - ${text}`);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
|
||||
res.set('Content-Type', 'audio/mpeg');
|
||||
forwardFetchResponse(response, res);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
elevenlabs.post('/history', async (req, res) => {
|
||||
try {
|
||||
const apiKey = readSecret(req.user.directories, SECRET_KEYS.ELEVENLABS);
|
||||
if (!apiKey) {
|
||||
console.warn('ElevenLabs API key not found');
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
const response = await fetch('https://api.elevenlabs.io/v1/history', {
|
||||
headers: {
|
||||
'xi-api-key': apiKey,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
console.warn(`ElevenLabs history fetch failed: HTTP ${response.status} - ${text}`);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
|
||||
const responseJson = await response.json();
|
||||
return res.json(responseJson);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
elevenlabs.post('/history-audio', async (req, res) => {
|
||||
try {
|
||||
const apiKey = readSecret(req.user.directories, SECRET_KEYS.ELEVENLABS);
|
||||
if (!apiKey) {
|
||||
console.warn('ElevenLabs API key not found');
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
const { historyItemId } = req.body;
|
||||
if (!historyItemId) {
|
||||
console.warn('ElevenLabs history audio request missing historyItemId');
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
console.debug('ElevenLabs history audio request for ID:', historyItemId);
|
||||
|
||||
const response = await fetch(`https://api.elevenlabs.io/v1/history/${historyItemId}/audio`, {
|
||||
headers: {
|
||||
'xi-api-key': apiKey,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
console.warn(`ElevenLabs history audio fetch failed: HTTP ${response.status} - ${text}`);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
|
||||
res.set('Content-Type', 'audio/mpeg');
|
||||
forwardFetchResponse(response, res);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
elevenlabs.post('/voices/add', async (req, res) => {
|
||||
try {
|
||||
const apiKey = readSecret(req.user.directories, SECRET_KEYS.ELEVENLABS);
|
||||
if (!apiKey) {
|
||||
console.warn('ElevenLabs API key not found');
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
const { name, description, labels, files } = req.body;
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('name', name || 'Custom Voice');
|
||||
formData.append('description', description || 'Uploaded via SillyTavern');
|
||||
formData.append('labels', labels || '');
|
||||
|
||||
for (const fileData of (files || [])) {
|
||||
const [mimeType, base64Data] = /^data:(.+);base64,(.+)$/.exec(fileData)?.slice(1) || [];
|
||||
if (!mimeType || !base64Data) {
|
||||
console.warn('Invalid audio file data provided for ElevenLabs voice upload');
|
||||
continue;
|
||||
}
|
||||
const buffer = Buffer.from(base64Data, 'base64');
|
||||
formData.append('files', buffer, {
|
||||
filename: `audio.${mime.extension(mimeType) || 'wav'}`,
|
||||
contentType: mimeType,
|
||||
});
|
||||
}
|
||||
|
||||
console.debug('ElevenLabs voice upload request:', { name, description, labels, files: files?.length || 0 });
|
||||
|
||||
const response = await fetch('https://api.elevenlabs.io/v1/voices/add', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'xi-api-key': apiKey,
|
||||
},
|
||||
body: formData,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
console.warn(`ElevenLabs voice upload failed: HTTP ${response.status} - ${text}`);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
|
||||
const responseJson = await response.json();
|
||||
return res.json(responseJson);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
elevenlabs.post('/recognize', async (req, res) => {
|
||||
try {
|
||||
const apiKey = readSecret(req.user.directories, SECRET_KEYS.ELEVENLABS);
|
||||
if (!apiKey) {
|
||||
console.warn('ElevenLabs API key not found');
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
if (!req.file) {
|
||||
console.warn('No audio file found');
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
console.info('Processing audio file with ElevenLabs', req.file.path);
|
||||
const formData = new FormData();
|
||||
formData.append('file', fs.createReadStream(req.file.path), { filename: 'audio.wav', contentType: 'audio/wav' });
|
||||
formData.append('model_id', req.body.model);
|
||||
|
||||
const response = await fetch('https://api.elevenlabs.io/v1/speech-to-text', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'xi-api-key': apiKey,
|
||||
},
|
||||
body: formData,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
console.warn(`ElevenLabs speech recognition failed: HTTP ${response.status} - ${text}`);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
|
||||
fs.unlinkSync(req.file.path);
|
||||
const responseJson = await response.json();
|
||||
console.debug('ElevenLabs speech recognition response:', responseJson);
|
||||
return res.json(responseJson);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.use('/elevenlabs', elevenlabs);
|
||||
290
web-app/src/endpoints/sprites.js
Normal file
290
web-app/src/endpoints/sprites.js
Normal file
@@ -0,0 +1,290 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
import express from 'express';
|
||||
import mime from 'mime-types';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||
|
||||
import { getImageBuffers } from '../util.js';
|
||||
|
||||
/**
|
||||
* Gets the path to the sprites folder for the provided character name
|
||||
* @param {import('../users.js').UserDirectoryList} directories - User directories
|
||||
* @param {string} name - The name of the character
|
||||
* @param {boolean} isSubfolder - Whether the name contains a subfolder
|
||||
* @returns {string | null} The path to the sprites folder. Null if the name is invalid.
|
||||
*/
|
||||
function getSpritesPath(directories, name, isSubfolder) {
|
||||
if (isSubfolder) {
|
||||
const nameParts = name.split('/');
|
||||
const characterName = sanitize(nameParts[0]);
|
||||
const subfolderName = sanitize(nameParts[1]);
|
||||
|
||||
if (!characterName || !subfolderName) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return path.join(directories.characters, characterName, subfolderName);
|
||||
}
|
||||
|
||||
name = sanitize(name);
|
||||
|
||||
if (!name) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return path.join(directories.characters, name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Imports base64 encoded sprites from RisuAI character data.
|
||||
* The sprites are saved in the character's sprites folder.
|
||||
* The additionalAssets and emotions are removed from the data.
|
||||
* @param {import('../users.js').UserDirectoryList} directories User directories
|
||||
* @param {object} data RisuAI character data
|
||||
* @returns {void}
|
||||
*/
|
||||
export function importRisuSprites(directories, data) {
|
||||
try {
|
||||
const name = data?.data?.name;
|
||||
const risuData = data?.data?.extensions?.risuai;
|
||||
|
||||
// Not a Risu AI character
|
||||
if (!risuData || !name) {
|
||||
return;
|
||||
}
|
||||
|
||||
let images = [];
|
||||
|
||||
if (Array.isArray(risuData.additionalAssets)) {
|
||||
images = images.concat(risuData.additionalAssets);
|
||||
}
|
||||
|
||||
if (Array.isArray(risuData.emotions)) {
|
||||
images = images.concat(risuData.emotions);
|
||||
}
|
||||
|
||||
// No sprites to import
|
||||
if (images.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Create sprites folder if it doesn't exist
|
||||
const spritesPath = getSpritesPath(directories, name, false);
|
||||
|
||||
// Invalid sprites path
|
||||
if (!spritesPath) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Create sprites folder if it doesn't exist
|
||||
if (!fs.existsSync(spritesPath)) {
|
||||
fs.mkdirSync(spritesPath, { recursive: true });
|
||||
}
|
||||
|
||||
// Path to sprites is not a directory. This should never happen.
|
||||
if (!fs.statSync(spritesPath).isDirectory()) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.info(`RisuAI: Found ${images.length} sprites for ${name}. Writing to disk.`);
|
||||
const files = fs.readdirSync(spritesPath);
|
||||
|
||||
outer: for (const [label, fileBase64] of images) {
|
||||
// Remove existing sprite with the same label
|
||||
for (const file of files) {
|
||||
if (path.parse(file).name === label) {
|
||||
console.warn(`RisuAI: The sprite ${label} for ${name} already exists. Skipping.`);
|
||||
continue outer;
|
||||
}
|
||||
}
|
||||
|
||||
const filename = label + '.png';
|
||||
const pathToFile = path.join(spritesPath, sanitize(filename));
|
||||
writeFileAtomicSync(pathToFile, fileBase64, { encoding: 'base64' });
|
||||
}
|
||||
|
||||
// Remove additionalAssets and emotions from data (they are now in the sprites folder)
|
||||
delete data.data.extensions.risuai.additionalAssets;
|
||||
delete data.data.extensions.risuai.emotions;
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.get('/get', function (request, response) {
|
||||
const name = String(request.query.name);
|
||||
const isSubfolder = name.includes('/');
|
||||
const spritesPath = getSpritesPath(request.user.directories, name, isSubfolder);
|
||||
let sprites = [];
|
||||
|
||||
try {
|
||||
if (spritesPath && fs.existsSync(spritesPath) && fs.statSync(spritesPath).isDirectory()) {
|
||||
sprites = fs.readdirSync(spritesPath)
|
||||
.filter(file => {
|
||||
const mimeType = mime.lookup(file);
|
||||
return mimeType && mimeType.startsWith('image/');
|
||||
})
|
||||
.map((file) => {
|
||||
const pathToSprite = path.join(spritesPath, file);
|
||||
const mtime = fs.statSync(pathToSprite).mtime?.toISOString().replace(/[^0-9]/g, '').slice(0, 14);
|
||||
|
||||
const fileName = path.parse(pathToSprite).name.toLowerCase();
|
||||
// Extract the label from the filename via regex, which can be suffixed with a sub-name, either connected with a dash or a dot.
|
||||
// Examples: joy.png, joy-1.png, joy.expressive.png
|
||||
const label = fileName.match(/^(.+?)(?:[-\\.].*?)?$/)?.[1] ?? fileName;
|
||||
|
||||
return {
|
||||
label: label,
|
||||
path: `/characters/${name}/${file}` + (mtime ? `?t=${mtime}` : ''),
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
return response.send(sprites);
|
||||
});
|
||||
|
||||
router.post('/delete', async (request, response) => {
|
||||
const label = request.body.label;
|
||||
const name = String(request.body.name);
|
||||
const isSubfolder = name.includes('/');
|
||||
const spriteName = request.body.spriteName || label;
|
||||
|
||||
if (!spriteName || !name) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
try {
|
||||
const spritesPath = getSpritesPath(request.user.directories, name, isSubfolder);
|
||||
|
||||
// No sprites folder exists, or not a directory
|
||||
if (!spritesPath || !fs.existsSync(spritesPath) || !fs.statSync(spritesPath).isDirectory()) {
|
||||
return response.sendStatus(404);
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(spritesPath);
|
||||
|
||||
// Remove existing sprite with the same label
|
||||
for (const file of files) {
|
||||
if (path.parse(file).name === spriteName) {
|
||||
fs.unlinkSync(path.join(spritesPath, file));
|
||||
}
|
||||
}
|
||||
|
||||
return response.sendStatus(200);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/upload-zip', async (request, response) => {
|
||||
const file = request.file;
|
||||
const name = String(request.body.name);
|
||||
const isSubfolder = name.includes('/');
|
||||
|
||||
if (!file || !name) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
try {
|
||||
const spritesPath = getSpritesPath(request.user.directories, name, isSubfolder);
|
||||
|
||||
// Invalid sprites path
|
||||
if (!spritesPath) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
// Create sprites folder if it doesn't exist
|
||||
if (!fs.existsSync(spritesPath)) {
|
||||
fs.mkdirSync(spritesPath, { recursive: true });
|
||||
}
|
||||
|
||||
// Path to sprites is not a directory. This should never happen.
|
||||
if (!fs.statSync(spritesPath).isDirectory()) {
|
||||
return response.sendStatus(404);
|
||||
}
|
||||
|
||||
const spritePackPath = path.join(file.destination, file.filename);
|
||||
const sprites = await getImageBuffers(spritePackPath);
|
||||
const files = fs.readdirSync(spritesPath);
|
||||
|
||||
for (const [filename, buffer] of sprites) {
|
||||
// Remove existing sprite with the same label
|
||||
const existingFile = files.find(file => path.parse(file).name === path.parse(filename).name);
|
||||
|
||||
if (existingFile) {
|
||||
fs.unlinkSync(path.join(spritesPath, existingFile));
|
||||
}
|
||||
|
||||
// Write sprite buffer to disk
|
||||
const pathToSprite = path.join(spritesPath, sanitize(filename));
|
||||
writeFileAtomicSync(pathToSprite, buffer);
|
||||
}
|
||||
|
||||
// Remove uploaded ZIP file
|
||||
fs.unlinkSync(spritePackPath);
|
||||
return response.send({ ok: true, count: sprites.length });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/upload', async (request, response) => {
|
||||
const file = request.file;
|
||||
const label = request.body.label;
|
||||
const name = String(request.body.name);
|
||||
const isSubfolder = name.includes('/');
|
||||
const spriteName = request.body.spriteName || label;
|
||||
|
||||
if (!file || !label || !name) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
try {
|
||||
const spritesPath = getSpritesPath(request.user.directories, name, isSubfolder);
|
||||
|
||||
// Invalid sprites path
|
||||
if (!spritesPath) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
// Create sprites folder if it doesn't exist
|
||||
if (!fs.existsSync(spritesPath)) {
|
||||
fs.mkdirSync(spritesPath, { recursive: true });
|
||||
}
|
||||
|
||||
// Path to sprites is not a directory. This should never happen.
|
||||
if (!fs.statSync(spritesPath).isDirectory()) {
|
||||
return response.sendStatus(404);
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(spritesPath);
|
||||
|
||||
// Remove existing sprite with the same label
|
||||
for (const file of files) {
|
||||
if (path.parse(file).name === spriteName) {
|
||||
fs.unlinkSync(path.join(spritesPath, file));
|
||||
}
|
||||
}
|
||||
|
||||
const filename = spriteName + path.parse(file.originalname).ext;
|
||||
const spritePath = path.join(file.destination, file.filename);
|
||||
const pathToFile = path.join(spritesPath, sanitize(filename));
|
||||
// Copy uploaded file to sprites folder
|
||||
fs.cpSync(spritePath, pathToFile);
|
||||
// Remove uploaded file
|
||||
fs.unlinkSync(spritePath);
|
||||
return response.send({ ok: true });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
1822
web-app/src/endpoints/stable-diffusion.js
Normal file
1822
web-app/src/endpoints/stable-diffusion.js
Normal file
File diff suppressed because it is too large
Load Diff
469
web-app/src/endpoints/stats.js
Normal file
469
web-app/src/endpoints/stats.js
Normal file
@@ -0,0 +1,469 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import crypto from 'node:crypto';
|
||||
|
||||
import express from 'express';
|
||||
import writeFileAtomic from 'write-file-atomic';
|
||||
|
||||
const readFile = fs.promises.readFile;
|
||||
const readdir = fs.promises.readdir;
|
||||
|
||||
import { getAllUserHandles, getUserDirectories } from '../users.js';
|
||||
|
||||
const STATS_FILE = 'stats.json';
|
||||
|
||||
const monthNames = [
|
||||
'January',
|
||||
'February',
|
||||
'March',
|
||||
'April',
|
||||
'May',
|
||||
'June',
|
||||
'July',
|
||||
'August',
|
||||
'September',
|
||||
'October',
|
||||
'November',
|
||||
'December',
|
||||
];
|
||||
|
||||
/**
|
||||
* @type {Map<string, Object>} The stats object for each user.
|
||||
*/
|
||||
const STATS = new Map();
|
||||
/**
|
||||
* @type {Map<string, number>} The timestamps for each user.
|
||||
*/
|
||||
const TIMESTAMPS = new Map();
|
||||
|
||||
/**
|
||||
* Convert a timestamp to an integer timestamp.
|
||||
* This function can handle several different timestamp formats:
|
||||
* 1. Date.now timestamps (the number of milliseconds since the Unix Epoch)
|
||||
* 2. ST "humanized" timestamps, formatted like `YYYY-MM-DD@HHhMMmSSsMSms`
|
||||
* 3. Date strings in the format `Month DD, YYYY H:MMam/pm`
|
||||
* 4. ISO 8601 formatted strings
|
||||
* 5. Date objects
|
||||
*
|
||||
* The function returns the timestamp as the number of milliseconds since
|
||||
* the Unix Epoch, which can be converted to a JavaScript Date object with new Date().
|
||||
*
|
||||
* @param {string|number|Date} timestamp - The timestamp to convert.
|
||||
* @returns {number} The timestamp in milliseconds since the Unix Epoch, or 0 if the input cannot be parsed.
|
||||
*
|
||||
* @example
|
||||
* // Unix timestamp
|
||||
* parseTimestamp(1609459200);
|
||||
* // ST humanized timestamp
|
||||
* parseTimestamp("2021-01-01 \@00h 00m 00s 000ms");
|
||||
* // Date string
|
||||
* parseTimestamp("January 1, 2021 12:00am");
|
||||
*/
|
||||
function parseTimestamp(timestamp) {
|
||||
if (!timestamp) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Date object
|
||||
if (timestamp instanceof Date) {
|
||||
return timestamp.getTime();
|
||||
}
|
||||
|
||||
// Unix time
|
||||
if (typeof timestamp === 'number' || /^\d+$/.test(timestamp)) {
|
||||
const unixTime = Number(timestamp);
|
||||
const isValid = Number.isFinite(unixTime) && !Number.isNaN(unixTime) && unixTime >= 0;
|
||||
if (!isValid) return 0;
|
||||
return new Date(unixTime).getTime();
|
||||
}
|
||||
|
||||
// ISO 8601 format
|
||||
const isoPattern = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?Z$/;
|
||||
if (isoPattern.test(timestamp)) {
|
||||
return new Date(timestamp).getTime();
|
||||
}
|
||||
|
||||
let dateFormats = [];
|
||||
|
||||
// meridiem-based format
|
||||
const convertFromMeridiemBased = (_, month, day, year, hour, minute, meridiem) => {
|
||||
const monthNum = monthNames.indexOf(month) + 1;
|
||||
const hour24 = meridiem.toLowerCase() === 'pm' ? (parseInt(hour, 10) % 12) + 12 : parseInt(hour, 10) % 12;
|
||||
return `${year}-${monthNum}-${day.padStart(2, '0')}T${hour24.toString().padStart(2, '0')}:${minute.padStart(2, '0')}:00`;
|
||||
};
|
||||
// June 19, 2023 2:20pm
|
||||
dateFormats.push({ callback: convertFromMeridiemBased, pattern: /(\w+)\s(\d{1,2}),\s(\d{4})\s(\d{1,2}):(\d{1,2})(am|pm)/i });
|
||||
|
||||
// ST "humanized" format patterns
|
||||
const convertFromHumanized = (_, year, month, day, hour, min, sec, ms) => {
|
||||
ms = typeof ms !== 'undefined' ? `.${ms.padStart(3, '0')}` : '';
|
||||
return `${year.padStart(4, '0')}-${month.padStart(2, '0')}-${day.padStart(2, '0')}T${hour.padStart(2, '0')}:${min.padStart(2, '0')}:${sec.padStart(2, '0')}${ms}Z`;
|
||||
};
|
||||
// 2024-07-12@01h31m37s123ms
|
||||
dateFormats.push({ callback: convertFromHumanized, pattern: /(\d{4})-(\d{1,2})-(\d{1,2})@(\d{1,2})h(\d{1,2})m(\d{1,2})s(\d{1,3})ms/ });
|
||||
// 2024-7-12@01h31m37s
|
||||
dateFormats.push({ callback: convertFromHumanized, pattern: /(\d{4})-(\d{1,2})-(\d{1,2})@(\d{1,2})h(\d{1,2})m(\d{1,2})s/ });
|
||||
// 2024-6-5 @14h 56m 50s 682ms
|
||||
dateFormats.push({ callback: convertFromHumanized, pattern: /(\d{4})-(\d{1,2})-(\d{1,2}) @(\d{1,2})h (\d{1,2})m (\d{1,2})s (\d{1,3})ms/ });
|
||||
|
||||
for (const x of dateFormats) {
|
||||
const rgxMatch = timestamp.match(x.pattern);
|
||||
if (!rgxMatch) continue;
|
||||
const isoTimestamp = x.callback(...rgxMatch);
|
||||
return new Date(isoTimestamp).getTime();
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects and aggregates stats for all characters.
|
||||
*
|
||||
* @param {string} chatsPath - The path to the directory containing the chat files.
|
||||
* @param {string} charactersPath - The path to the directory containing the character files.
|
||||
* @returns {Promise<Object>} The aggregated stats object.
|
||||
*/
|
||||
async function collectAndCreateStats(chatsPath, charactersPath) {
|
||||
const files = await readdir(charactersPath);
|
||||
|
||||
const pngFiles = files.filter((file) => file.endsWith('.png'));
|
||||
|
||||
let processingPromises = pngFiles.map((file) =>
|
||||
calculateStats(chatsPath, file),
|
||||
);
|
||||
const statsArr = await Promise.all(processingPromises);
|
||||
|
||||
let finalStats = {};
|
||||
for (let stat of statsArr) {
|
||||
finalStats = { ...finalStats, ...stat };
|
||||
}
|
||||
// tag with timestamp on when stats were generated
|
||||
finalStats.timestamp = Date.now();
|
||||
return finalStats;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recreates the stats object for a user.
|
||||
* @param {string} handle User handle
|
||||
* @param {string} chatsPath Path to the directory containing the chat files.
|
||||
* @param {string} charactersPath Path to the directory containing the character files.
|
||||
*/
|
||||
export async function recreateStats(handle, chatsPath, charactersPath) {
|
||||
console.info('Collecting and creating stats for user:', handle);
|
||||
const stats = await collectAndCreateStats(chatsPath, charactersPath);
|
||||
STATS.set(handle, stats);
|
||||
await saveStatsToFile();
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the stats file into memory. If the file doesn't exist or is invalid,
|
||||
* initializes stats by collecting and creating them for each character.
|
||||
*/
|
||||
export async function init() {
|
||||
try {
|
||||
const userHandles = await getAllUserHandles();
|
||||
for (const handle of userHandles) {
|
||||
const directories = getUserDirectories(handle);
|
||||
try {
|
||||
const statsFilePath = path.join(directories.root, STATS_FILE);
|
||||
const statsFileContent = await readFile(statsFilePath, 'utf-8');
|
||||
STATS.set(handle, JSON.parse(statsFileContent));
|
||||
} catch (err) {
|
||||
// If the file doesn't exist or is invalid, initialize stats
|
||||
if (err.code === 'ENOENT' || err instanceof SyntaxError) {
|
||||
await recreateStats(handle, directories.chats, directories.characters);
|
||||
} else {
|
||||
throw err; // Rethrow the error if it's something we didn't expect
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Failed to initialize stats:', err);
|
||||
}
|
||||
// Save stats every 5 minutes
|
||||
setInterval(saveStatsToFile, 5 * 60 * 1000);
|
||||
}
|
||||
/**
|
||||
* Saves the current state of charStats to a file, only if the data has changed since the last save.
|
||||
*/
|
||||
async function saveStatsToFile() {
|
||||
const userHandles = await getAllUserHandles();
|
||||
for (const handle of userHandles) {
|
||||
if (!STATS.has(handle)) {
|
||||
continue;
|
||||
}
|
||||
const charStats = STATS.get(handle);
|
||||
const lastSaveTimestamp = TIMESTAMPS.get(handle) || 0;
|
||||
if (charStats.timestamp > lastSaveTimestamp) {
|
||||
try {
|
||||
const directories = getUserDirectories(handle);
|
||||
const statsFilePath = path.join(directories.root, STATS_FILE);
|
||||
await writeFileAtomic(statsFilePath, JSON.stringify(charStats));
|
||||
TIMESTAMPS.set(handle, Date.now());
|
||||
} catch (error) {
|
||||
console.error('Failed to save stats to file.', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to save charStats to a file and then terminates the process.
|
||||
* If an error occurs during the file write, it logs the error before exiting.
|
||||
*/
|
||||
export async function onExit() {
|
||||
try {
|
||||
await saveStatsToFile();
|
||||
} catch (err) {
|
||||
console.error('Failed to write stats to file:', err);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads the contents of a file and returns the lines in the file as an array.
|
||||
*
|
||||
* @param {string} filepath - The path of the file to be read.
|
||||
* @returns {Array<string>} - The lines in the file.
|
||||
* @throws Will throw an error if the file cannot be read.
|
||||
*/
|
||||
function readAndParseFile(filepath) {
|
||||
try {
|
||||
let file = fs.readFileSync(filepath, 'utf8');
|
||||
let lines = file.split('\n');
|
||||
return lines;
|
||||
} catch (error) {
|
||||
console.error(`Error reading file at ${filepath}: ${error}`);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the time difference between two dates.
|
||||
*
|
||||
* @param {string} gen_started - The start time in ISO 8601 format.
|
||||
* @param {string} gen_finished - The finish time in ISO 8601 format.
|
||||
* @returns {number} - The difference in time in milliseconds.
|
||||
*/
|
||||
function calculateGenTime(gen_started, gen_finished) {
|
||||
let startDate = new Date(gen_started);
|
||||
let endDate = new Date(gen_finished);
|
||||
return Number(endDate) - Number(startDate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Counts the number of words in a string.
|
||||
*
|
||||
* @param {string} str - The string to count words in.
|
||||
* @returns {number} - The number of words in the string.
|
||||
*/
|
||||
function countWordsInString(str) {
|
||||
const match = str.match(/\b\w+\b/g);
|
||||
return match ? match.length : 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* calculateStats - Calculate statistics for a given character chat directory.
|
||||
*
|
||||
* @param {string} chatsPath The directory containing the chat files.
|
||||
* @param {string} item The name of the character.
|
||||
* @return {object} An object containing the calculated statistics.
|
||||
*/
|
||||
const calculateStats = (chatsPath, item) => {
|
||||
const chatDir = path.join(chatsPath, item.replace('.png', ''));
|
||||
const stats = {
|
||||
total_gen_time: 0,
|
||||
user_word_count: 0,
|
||||
non_user_word_count: 0,
|
||||
user_msg_count: 0,
|
||||
non_user_msg_count: 0,
|
||||
total_swipe_count: 0,
|
||||
chat_size: 0,
|
||||
date_last_chat: 0,
|
||||
date_first_chat: new Date('9999-12-31T23:59:59.999Z').getTime(),
|
||||
};
|
||||
let uniqueGenStartTimes = new Set();
|
||||
|
||||
if (fs.existsSync(chatDir)) {
|
||||
const chats = fs.readdirSync(chatDir);
|
||||
if (Array.isArray(chats) && chats.length) {
|
||||
for (const chat of chats) {
|
||||
const result = calculateTotalGenTimeAndWordCount(
|
||||
chatDir,
|
||||
chat,
|
||||
uniqueGenStartTimes,
|
||||
);
|
||||
stats.total_gen_time += result.totalGenTime || 0;
|
||||
stats.user_word_count += result.userWordCount || 0;
|
||||
stats.non_user_word_count += result.nonUserWordCount || 0;
|
||||
stats.user_msg_count += result.userMsgCount || 0;
|
||||
stats.non_user_msg_count += result.nonUserMsgCount || 0;
|
||||
stats.total_swipe_count += result.totalSwipeCount || 0;
|
||||
|
||||
const chatStat = fs.statSync(path.join(chatDir, chat));
|
||||
stats.chat_size += chatStat.size;
|
||||
stats.date_last_chat = Math.max(
|
||||
stats.date_last_chat,
|
||||
Math.floor(chatStat.mtimeMs),
|
||||
);
|
||||
stats.date_first_chat = Math.min(
|
||||
stats.date_first_chat,
|
||||
result.firstChatTime,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { [item]: stats };
|
||||
};
|
||||
|
||||
/**
|
||||
* Sets the current charStats object.
|
||||
* @param {string} handle - The user handle.
|
||||
* @param {Object} stats - The new charStats object.
|
||||
**/
|
||||
function setCharStats(handle, stats) {
|
||||
stats.timestamp = Date.now();
|
||||
STATS.set(handle, stats);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the total generation time and word count for a chat with a character.
|
||||
*
|
||||
* @param {string} chatDir - The directory path where character chat files are stored.
|
||||
* @param {string} chat - The name of the chat file.
|
||||
* @returns {Object} - An object containing the total generation time, user word count, and non-user word count.
|
||||
* @throws Will throw an error if the file cannot be read or parsed.
|
||||
*/
|
||||
function calculateTotalGenTimeAndWordCount(
|
||||
chatDir,
|
||||
chat,
|
||||
uniqueGenStartTimes,
|
||||
) {
|
||||
let filepath = path.join(chatDir, chat);
|
||||
let lines = readAndParseFile(filepath);
|
||||
|
||||
let totalGenTime = 0;
|
||||
let userWordCount = 0;
|
||||
let nonUserWordCount = 0;
|
||||
let nonUserMsgCount = 0;
|
||||
let userMsgCount = 0;
|
||||
let totalSwipeCount = 0;
|
||||
let firstChatTime = new Date('9999-12-31T23:59:59.999Z').getTime();
|
||||
|
||||
for (let line of lines) {
|
||||
if (line.length) {
|
||||
try {
|
||||
let json = JSON.parse(line);
|
||||
if (json.mes) {
|
||||
let hash = crypto
|
||||
.createHash('sha256')
|
||||
.update(json.mes)
|
||||
.digest('hex');
|
||||
if (uniqueGenStartTimes.has(hash)) {
|
||||
continue;
|
||||
}
|
||||
if (hash) {
|
||||
uniqueGenStartTimes.add(hash);
|
||||
}
|
||||
}
|
||||
|
||||
if (json.gen_started && json.gen_finished) {
|
||||
let genTime = calculateGenTime(
|
||||
json.gen_started,
|
||||
json.gen_finished,
|
||||
);
|
||||
totalGenTime += genTime;
|
||||
|
||||
if (json.swipes && !json.swipe_info) {
|
||||
// If there are swipes but no swipe_info, estimate the genTime
|
||||
totalGenTime += genTime * json.swipes.length;
|
||||
}
|
||||
}
|
||||
|
||||
if (json.mes) {
|
||||
let wordCount = countWordsInString(json.mes);
|
||||
json.is_user
|
||||
? (userWordCount += wordCount)
|
||||
: (nonUserWordCount += wordCount);
|
||||
json.is_user ? userMsgCount++ : nonUserMsgCount++;
|
||||
}
|
||||
|
||||
if (json.swipes && json.swipes.length > 1) {
|
||||
totalSwipeCount += json.swipes.length - 1; // Subtract 1 to not count the first swipe
|
||||
for (let i = 1; i < json.swipes.length; i++) {
|
||||
// Start from the second swipe
|
||||
let swipeText = json.swipes[i];
|
||||
|
||||
let wordCount = countWordsInString(swipeText);
|
||||
json.is_user
|
||||
? (userWordCount += wordCount)
|
||||
: (nonUserWordCount += wordCount);
|
||||
json.is_user ? userMsgCount++ : nonUserMsgCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (json.swipe_info && json.swipe_info.length > 1) {
|
||||
for (let i = 1; i < json.swipe_info.length; i++) {
|
||||
// Start from the second swipe
|
||||
let swipe = json.swipe_info[i];
|
||||
if (swipe.gen_started && swipe.gen_finished) {
|
||||
totalGenTime += calculateGenTime(
|
||||
swipe.gen_started,
|
||||
swipe.gen_finished,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If this is the first user message, set the first chat time
|
||||
if (json.is_user) {
|
||||
//get min between firstChatTime and timestampToMoment(json.send_date)
|
||||
firstChatTime = Math.min(parseTimestamp(json.send_date), firstChatTime);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error parsing line ${line}: ${error}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
totalGenTime,
|
||||
userWordCount,
|
||||
nonUserWordCount,
|
||||
userMsgCount,
|
||||
nonUserMsgCount,
|
||||
totalSwipeCount,
|
||||
firstChatTime,
|
||||
};
|
||||
}
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
/**
|
||||
* Handle a POST request to get the stats object
|
||||
*/
|
||||
router.post('/get', function (request, response) {
|
||||
const stats = STATS.get(request.user.profile.handle) || {};
|
||||
response.send(stats);
|
||||
});
|
||||
|
||||
/**
|
||||
* Triggers the recreation of statistics from chat files.
|
||||
*/
|
||||
router.post('/recreate', async function (request, response) {
|
||||
try {
|
||||
await recreateStats(request.user.profile.handle, request.user.directories.chats, request.user.directories.characters);
|
||||
return response.sendStatus(200);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Handle a POST request to update the stats object
|
||||
*/
|
||||
router.post('/update', function (request, response) {
|
||||
if (!request.body) return response.sendStatus(400);
|
||||
setCharStats(request.user.profile.handle, request.body);
|
||||
return response.sendStatus(200);
|
||||
});
|
||||
38
web-app/src/endpoints/themes.js
Normal file
38
web-app/src/endpoints/themes.js
Normal file
@@ -0,0 +1,38 @@
|
||||
import path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
|
||||
import express from 'express';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/save', (request, response) => {
|
||||
if (!request.body || !request.body.name) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const filename = path.join(request.user.directories.themes, sanitize(`${request.body.name}.json`));
|
||||
writeFileAtomicSync(filename, JSON.stringify(request.body, null, 4), 'utf8');
|
||||
|
||||
return response.sendStatus(200);
|
||||
});
|
||||
|
||||
router.post('/delete', (request, response) => {
|
||||
if (!request.body || !request.body.name) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
try {
|
||||
const filename = path.join(request.user.directories.themes, sanitize(`${request.body.name}.json`));
|
||||
if (!fs.existsSync(filename)) {
|
||||
console.error('Theme file not found:', filename);
|
||||
return response.sendStatus(404);
|
||||
}
|
||||
fs.unlinkSync(filename);
|
||||
return response.sendStatus(200);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
252
web-app/src/endpoints/thumbnails.js
Normal file
252
web-app/src/endpoints/thumbnails.js
Normal file
@@ -0,0 +1,252 @@
|
||||
import fs from 'node:fs';
|
||||
import { promises as fsPromises } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
import mime from 'mime-types';
|
||||
import express from 'express';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import { Jimp, JimpMime } from '../jimp.js';
|
||||
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||
|
||||
import { getConfigValue, invalidateFirefoxCache } from '../util.js';
|
||||
|
||||
const thumbnailsEnabled = !!getConfigValue('thumbnails.enabled', true, 'boolean');
|
||||
const quality = Math.min(100, Math.max(1, parseInt(getConfigValue('thumbnails.quality', 95, 'number'))));
|
||||
const pngFormat = String(getConfigValue('thumbnails.format', 'jpg')).toLowerCase().trim() === 'png';
|
||||
|
||||
/**
|
||||
* @typedef {'bg' | 'avatar' | 'persona'} ThumbnailType
|
||||
*/
|
||||
|
||||
/** @type {Record<string, number[]>} */
|
||||
export const dimensions = {
|
||||
'bg': getConfigValue('thumbnails.dimensions.bg', [160, 90]),
|
||||
'avatar': getConfigValue('thumbnails.dimensions.avatar', [96, 144]),
|
||||
'persona': getConfigValue('thumbnails.dimensions.persona', [96, 144]),
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets a path to thumbnail folder based on the type.
|
||||
* @param {import('../users.js').UserDirectoryList} directories User directories
|
||||
* @param {ThumbnailType} type Thumbnail type
|
||||
* @returns {string} Path to the thumbnails folder
|
||||
*/
|
||||
function getThumbnailFolder(directories, type) {
|
||||
let thumbnailFolder;
|
||||
|
||||
switch (type) {
|
||||
case 'bg':
|
||||
thumbnailFolder = directories.thumbnailsBg;
|
||||
break;
|
||||
case 'avatar':
|
||||
thumbnailFolder = directories.thumbnailsAvatar;
|
||||
break;
|
||||
case 'persona':
|
||||
thumbnailFolder = directories.thumbnailsPersona;
|
||||
break;
|
||||
}
|
||||
|
||||
return thumbnailFolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a path to the original images folder based on the type.
|
||||
* @param {import('../users.js').UserDirectoryList} directories User directories
|
||||
* @param {ThumbnailType} type Thumbnail type
|
||||
* @returns {string} Path to the original images folder
|
||||
*/
|
||||
function getOriginalFolder(directories, type) {
|
||||
let originalFolder;
|
||||
|
||||
switch (type) {
|
||||
case 'bg':
|
||||
originalFolder = directories.backgrounds;
|
||||
break;
|
||||
case 'avatar':
|
||||
originalFolder = directories.characters;
|
||||
break;
|
||||
case 'persona':
|
||||
originalFolder = directories.avatars;
|
||||
break;
|
||||
}
|
||||
|
||||
return originalFolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the generated thumbnail from the disk.
|
||||
* @param {import('../users.js').UserDirectoryList} directories User directories
|
||||
* @param {ThumbnailType} type Type of the thumbnail
|
||||
* @param {string} file Name of the file
|
||||
*/
|
||||
export function invalidateThumbnail(directories, type, file) {
|
||||
const folder = getThumbnailFolder(directories, type);
|
||||
if (folder === undefined) throw new Error('Invalid thumbnail type');
|
||||
|
||||
const pathToThumbnail = path.join(folder, sanitize(file));
|
||||
|
||||
if (fs.existsSync(pathToThumbnail)) {
|
||||
fs.unlinkSync(pathToThumbnail);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a thumbnail for the given file.
|
||||
* @param {import('../users.js').UserDirectoryList} directories User directories
|
||||
* @param {ThumbnailType} type Type of the thumbnail
|
||||
* @param {string} file Name of the file
|
||||
* @returns
|
||||
*/
|
||||
async function generateThumbnail(directories, type, file) {
|
||||
let thumbnailFolder = getThumbnailFolder(directories, type);
|
||||
let originalFolder = getOriginalFolder(directories, type);
|
||||
if (thumbnailFolder === undefined || originalFolder === undefined) throw new Error('Invalid thumbnail type');
|
||||
const pathToCachedFile = path.join(thumbnailFolder, file);
|
||||
const pathToOriginalFile = path.join(originalFolder, file);
|
||||
|
||||
const cachedFileExists = fs.existsSync(pathToCachedFile);
|
||||
const originalFileExists = fs.existsSync(pathToOriginalFile);
|
||||
|
||||
// to handle cases when original image was updated after thumb creation
|
||||
let shouldRegenerate = false;
|
||||
|
||||
if (cachedFileExists && originalFileExists) {
|
||||
const originalStat = fs.statSync(pathToOriginalFile);
|
||||
const cachedStat = fs.statSync(pathToCachedFile);
|
||||
|
||||
if (originalStat.mtimeMs > cachedStat.ctimeMs) {
|
||||
//console.warn('Original file changed. Regenerating thumbnail...');
|
||||
shouldRegenerate = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (cachedFileExists && !shouldRegenerate) {
|
||||
return pathToCachedFile;
|
||||
}
|
||||
|
||||
if (!originalFileExists) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
let buffer;
|
||||
|
||||
try {
|
||||
const size = dimensions[type];
|
||||
const image = await Jimp.read(pathToOriginalFile);
|
||||
const width = !isNaN(size?.[0]) && size?.[0] > 0 ? size[0] : image.bitmap.width;
|
||||
const height = !isNaN(size?.[1]) && size?.[1] > 0 ? size[1] : image.bitmap.height;
|
||||
image.cover({ w: width, h: height });
|
||||
buffer = pngFormat
|
||||
? await image.getBuffer(JimpMime.png)
|
||||
: await image.getBuffer(JimpMime.jpeg, { quality: quality, jpegColorSpace: 'ycbcr' });
|
||||
}
|
||||
catch (inner) {
|
||||
console.warn(`Thumbnailer can not process the image: ${pathToOriginalFile}. Using original size`, inner);
|
||||
buffer = fs.readFileSync(pathToOriginalFile);
|
||||
}
|
||||
|
||||
writeFileAtomicSync(pathToCachedFile, buffer);
|
||||
}
|
||||
catch (outer) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return pathToCachedFile;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures that the thumbnail cache for backgrounds is valid.
|
||||
* @param {import('../users.js').UserDirectoryList[]} directoriesList User directories
|
||||
* @returns {Promise<void>} Promise that resolves when the cache is validated
|
||||
*/
|
||||
export async function ensureThumbnailCache(directoriesList) {
|
||||
for (const directories of directoriesList) {
|
||||
const cacheFiles = fs.readdirSync(directories.thumbnailsBg);
|
||||
|
||||
// files exist, all ok
|
||||
if (cacheFiles.length) {
|
||||
continue;
|
||||
}
|
||||
|
||||
console.info('Generating thumbnails cache. Please wait...');
|
||||
|
||||
const bgFiles = fs.readdirSync(directories.backgrounds);
|
||||
const tasks = [];
|
||||
|
||||
for (const file of bgFiles) {
|
||||
tasks.push(generateThumbnail(directories, 'bg', file));
|
||||
}
|
||||
|
||||
await Promise.all(tasks);
|
||||
console.info(`Done! Generated: ${bgFiles.length} preview images`);
|
||||
}
|
||||
}
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
// Important: This route must be mounted as '/thumbnail'. It is used in the client code and saved to chat files.
|
||||
router.get('/', async function (request, response) {
|
||||
try{
|
||||
if (typeof request.query.file !== 'string' || typeof request.query.type !== 'string') {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const type = request.query.type;
|
||||
const file = sanitize(request.query.file);
|
||||
|
||||
if (!type || !file) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
if (!(type === 'bg' || type === 'avatar' || type === 'persona')) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
if (sanitize(file) !== file) {
|
||||
console.error('Malicious filename prevented');
|
||||
return response.sendStatus(403);
|
||||
}
|
||||
|
||||
if (!thumbnailsEnabled) {
|
||||
const folder = getOriginalFolder(request.user.directories, type);
|
||||
|
||||
if (folder === undefined) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const pathToOriginalFile = path.join(folder, file);
|
||||
if (!fs.existsSync(pathToOriginalFile)) {
|
||||
return response.sendStatus(404);
|
||||
}
|
||||
const contentType = mime.lookup(pathToOriginalFile) || 'image/png';
|
||||
const originalFile = await fsPromises.readFile(pathToOriginalFile);
|
||||
response.setHeader('Content-Type', contentType);
|
||||
|
||||
invalidateFirefoxCache(pathToOriginalFile, request, response);
|
||||
|
||||
return response.send(originalFile);
|
||||
}
|
||||
|
||||
const pathToCachedFile = await generateThumbnail(request.user.directories, type, file);
|
||||
|
||||
if (!pathToCachedFile) {
|
||||
return response.sendStatus(404);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(pathToCachedFile)) {
|
||||
return response.sendStatus(404);
|
||||
}
|
||||
|
||||
const contentType = mime.lookup(pathToCachedFile) || 'image/jpeg';
|
||||
const cachedFile = await fsPromises.readFile(pathToCachedFile);
|
||||
response.setHeader('Content-Type', contentType);
|
||||
|
||||
invalidateFirefoxCache(file, request, response);
|
||||
|
||||
return response.send(cachedFile);
|
||||
} catch (error) {
|
||||
console.error('Failed getting thumbnail', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
1128
web-app/src/endpoints/tokenizers.js
Normal file
1128
web-app/src/endpoints/tokenizers.js
Normal file
File diff suppressed because it is too large
Load Diff
409
web-app/src/endpoints/translate.js
Normal file
409
web-app/src/endpoints/translate.js
Normal file
@@ -0,0 +1,409 @@
|
||||
import fetch from 'node-fetch';
|
||||
import express from 'express';
|
||||
import { translate as bingTranslate } from 'bing-translate-api';
|
||||
import urlJoin from 'url-join';
|
||||
import { Translator } from 'google-translate-api-x';
|
||||
|
||||
import { readSecret, SECRET_KEYS } from './secrets.js';
|
||||
import { getConfigValue, uuidv4 } from '../util.js';
|
||||
|
||||
const DEEPLX_URL_DEFAULT = 'http://127.0.0.1:1188/translate';
|
||||
const ONERING_URL_DEFAULT = 'http://127.0.0.1:4990/translate';
|
||||
const LINGVA_DEFAULT = 'https://lingva.ml/api/v1';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/libre', async (request, response) => {
|
||||
try {
|
||||
const key = readSecret(request.user.directories, SECRET_KEYS.LIBRE);
|
||||
const url = readSecret(request.user.directories, SECRET_KEYS.LIBRE_URL);
|
||||
|
||||
if (!url) {
|
||||
console.warn('LibreTranslate URL is not configured.');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
if (request.body.lang === 'zh-CN') {
|
||||
request.body.lang = 'zh';
|
||||
}
|
||||
|
||||
if (request.body.lang === 'zh-TW') {
|
||||
request.body.lang = 'zt';
|
||||
}
|
||||
|
||||
if (request.body.lang === 'pt-BR' || request.body.lang === 'pt-PT') {
|
||||
request.body.lang = 'pt';
|
||||
}
|
||||
|
||||
const text = request.body.text;
|
||||
const lang = request.body.lang;
|
||||
|
||||
if (!text || !lang) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.debug('Input text: ' + text);
|
||||
|
||||
const result = await fetch(url, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
q: text,
|
||||
source: 'auto',
|
||||
target: lang,
|
||||
format: 'text',
|
||||
api_key: key,
|
||||
}),
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const error = await result.text();
|
||||
console.warn('LibreTranslate error: ', result.statusText, error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const json = await result.json();
|
||||
console.debug('Translated text: ' + json.translatedText);
|
||||
|
||||
return response.send(json.translatedText);
|
||||
} catch (error) {
|
||||
console.error('Translation error: ' + error.message);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/google', async (request, response) => {
|
||||
try {
|
||||
if (request.body.lang === 'pt-BR') {
|
||||
request.body.lang = 'pt';
|
||||
}
|
||||
|
||||
const text = String(request.body.text ?? '');
|
||||
const lang = String(request.body.lang ?? '');
|
||||
|
||||
if (!text || !lang) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.debug('Input text: ' + text);
|
||||
|
||||
const translator = new Translator({ to: lang });
|
||||
const translatedText = await translator.translate(text).then(result => result.text);
|
||||
|
||||
response.setHeader('Content-Type', 'text/plain; charset=utf-8');
|
||||
console.debug('Translated text: ' + translatedText);
|
||||
return response.send(translatedText);
|
||||
} catch (error) {
|
||||
console.error('Translation error', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/yandex', async (request, response) => {
|
||||
try {
|
||||
if (request.body.lang === 'pt-PT') {
|
||||
request.body.lang = 'pt';
|
||||
}
|
||||
|
||||
if (request.body.lang === 'zh-CN' || request.body.lang === 'zh-TW') {
|
||||
request.body.lang = 'zh';
|
||||
}
|
||||
|
||||
const chunks = request.body.chunks;
|
||||
const lang = request.body.lang;
|
||||
|
||||
if (!chunks || !lang) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
// reconstruct original text to log
|
||||
let inputText = '';
|
||||
|
||||
const params = new URLSearchParams();
|
||||
for (const chunk of chunks) {
|
||||
params.append('text', chunk);
|
||||
inputText += chunk;
|
||||
}
|
||||
params.append('lang', lang);
|
||||
const ucid = uuidv4().replaceAll('-', '');
|
||||
|
||||
console.debug('Input text: ' + inputText);
|
||||
|
||||
const result = await fetch(`https://translate.yandex.net/api/v1/tr.json/translate?ucid=${ucid}&srv=android&format=text`, {
|
||||
method: 'POST',
|
||||
body: params,
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
},
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const error = await result.text();
|
||||
console.warn('Yandex error: ', result.statusText, error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const json = await result.json();
|
||||
const translated = json.text.join();
|
||||
console.debug('Translated text: ' + translated);
|
||||
|
||||
return response.send(translated);
|
||||
} catch (error) {
|
||||
console.error('Translation error: ' + error.message);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/lingva', async (request, response) => {
|
||||
try {
|
||||
const secretUrl = readSecret(request.user.directories, SECRET_KEYS.LINGVA_URL);
|
||||
const baseUrl = secretUrl || LINGVA_DEFAULT;
|
||||
|
||||
if (!secretUrl && baseUrl === LINGVA_DEFAULT) {
|
||||
console.warn('Lingva URL is using default value.', LINGVA_DEFAULT);
|
||||
}
|
||||
|
||||
if (request.body.lang === 'zh-CN' || request.body.lang === 'zh-TW') {
|
||||
request.body.lang = 'zh';
|
||||
}
|
||||
|
||||
if (request.body.lang === 'pt-BR' || request.body.lang === 'pt-PT') {
|
||||
request.body.lang = 'pt';
|
||||
}
|
||||
|
||||
const text = request.body.text;
|
||||
const lang = request.body.lang;
|
||||
|
||||
if (!text || !lang) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.debug('Input text: ' + text);
|
||||
|
||||
const url = urlJoin(baseUrl, 'auto', lang, encodeURIComponent(text));
|
||||
const result = await fetch(url);
|
||||
|
||||
if (!result.ok) {
|
||||
const error = await result.text();
|
||||
console.warn('Lingva error: ', result.statusText, error);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await result.json();
|
||||
console.debug('Translated text: ' + data.translation);
|
||||
return response.send(data.translation);
|
||||
} catch (error) {
|
||||
console.error('Translation error', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/deepl', async (request, response) => {
|
||||
try {
|
||||
const key = readSecret(request.user.directories, SECRET_KEYS.DEEPL);
|
||||
|
||||
if (!key) {
|
||||
console.warn('DeepL key is not configured.');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
if (request.body.lang === 'zh-CN' || request.body.lang === 'zh-TW') {
|
||||
request.body.lang = 'ZH';
|
||||
}
|
||||
|
||||
const text = request.body.text;
|
||||
const lang = request.body.lang;
|
||||
const formality = getConfigValue('deepl.formality', 'default');
|
||||
|
||||
if (!text || !lang) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.debug('Input text: ' + text);
|
||||
|
||||
const params = new URLSearchParams();
|
||||
params.append('text', text);
|
||||
params.append('target_lang', lang);
|
||||
|
||||
if (['de', 'fr', 'it', 'es', 'nl', 'ja', 'ru', 'pt-BR', 'pt-PT'].includes(lang)) {
|
||||
params.append('formality', formality);
|
||||
}
|
||||
|
||||
const endpoint = request.body.endpoint === 'pro'
|
||||
? 'https://api.deepl.com/v2/translate'
|
||||
: 'https://api-free.deepl.com/v2/translate';
|
||||
|
||||
const result = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
body: params,
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Authorization': `DeepL-Auth-Key ${key}`,
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
},
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const error = await result.text();
|
||||
console.warn('DeepL error: ', result.statusText, error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const json = await result.json();
|
||||
console.debug('Translated text: ' + json.translations[0].text);
|
||||
|
||||
return response.send(json.translations[0].text);
|
||||
} catch (error) {
|
||||
console.error('Translation error: ' + error.message);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/onering', async (request, response) => {
|
||||
try {
|
||||
const secretUrl = readSecret(request.user.directories, SECRET_KEYS.ONERING_URL);
|
||||
const url = secretUrl || ONERING_URL_DEFAULT;
|
||||
|
||||
if (!url) {
|
||||
console.warn('OneRing URL is not configured.');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
if (!secretUrl && url === ONERING_URL_DEFAULT) {
|
||||
console.info('OneRing URL is using default value.', ONERING_URL_DEFAULT);
|
||||
}
|
||||
|
||||
if (request.body.lang === 'pt-BR' || request.body.lang === 'pt-PT') {
|
||||
request.body.lang = 'pt';
|
||||
}
|
||||
|
||||
const text = request.body.text;
|
||||
const from_lang = request.body.from_lang;
|
||||
const to_lang = request.body.to_lang;
|
||||
|
||||
if (!text || !from_lang || !to_lang) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const params = new URLSearchParams();
|
||||
params.append('text', text);
|
||||
params.append('from_lang', from_lang);
|
||||
params.append('to_lang', to_lang);
|
||||
|
||||
console.debug('Input text: ' + text);
|
||||
|
||||
const fetchUrl = new URL(url);
|
||||
fetchUrl.search = params.toString();
|
||||
|
||||
const result = await fetch(fetchUrl, {
|
||||
method: 'GET',
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const error = await result.text();
|
||||
console.warn('OneRing error: ', result.statusText, error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await result.json();
|
||||
console.debug('Translated text: ' + data.result);
|
||||
|
||||
return response.send(data.result);
|
||||
} catch (error) {
|
||||
console.error('Translation error: ' + error.message);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/deeplx', async (request, response) => {
|
||||
try {
|
||||
const secretUrl = readSecret(request.user.directories, SECRET_KEYS.DEEPLX_URL);
|
||||
const url = secretUrl || DEEPLX_URL_DEFAULT;
|
||||
|
||||
if (!url) {
|
||||
console.warn('DeepLX URL is not configured.');
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
if (!secretUrl && url === DEEPLX_URL_DEFAULT) {
|
||||
console.info('DeepLX URL is using default value.', DEEPLX_URL_DEFAULT);
|
||||
}
|
||||
|
||||
const text = request.body.text;
|
||||
let lang = request.body.lang;
|
||||
if (request.body.lang === 'zh-CN' || request.body.lang === 'zh-TW') {
|
||||
lang = 'ZH';
|
||||
}
|
||||
|
||||
if (!text || !lang) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.debug('Input text: ' + text);
|
||||
|
||||
const result = await fetch(url, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
text: text,
|
||||
source_lang: 'auto',
|
||||
target_lang: lang,
|
||||
}),
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
const error = await result.text();
|
||||
console.warn('DeepLX error: ', result.statusText, error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const json = await result.json();
|
||||
console.debug('Translated text: ' + json.data);
|
||||
|
||||
return response.send(json.data);
|
||||
} catch (error) {
|
||||
console.error('DeepLX translation error: ' + error.message);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/bing', async (request, response) => {
|
||||
try {
|
||||
const text = request.body.text;
|
||||
let lang = request.body.lang;
|
||||
|
||||
if (request.body.lang === 'zh-CN') {
|
||||
lang = 'zh-Hans';
|
||||
}
|
||||
|
||||
if (request.body.lang === 'zh-TW') {
|
||||
lang = 'zh-Hant';
|
||||
}
|
||||
|
||||
if (request.body.lang === 'pt-BR') {
|
||||
lang = 'pt';
|
||||
}
|
||||
|
||||
if (!text || !lang) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
console.debug('Input text: ' + text);
|
||||
|
||||
const result = await bingTranslate(text, null, lang);
|
||||
const translatedText = result?.translation;
|
||||
console.debug('Translated text: ' + translatedText);
|
||||
return response.send(translatedText);
|
||||
} catch (error) {
|
||||
console.error('Translation error', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
251
web-app/src/endpoints/users-admin.js
Normal file
251
web-app/src/endpoints/users-admin.js
Normal file
@@ -0,0 +1,251 @@
|
||||
import { promises as fsPromises } from 'node:fs';
|
||||
|
||||
import storage from 'node-persist';
|
||||
import express from 'express';
|
||||
import lodash from 'lodash';
|
||||
import { checkForNewContent, CONTENT_TYPES } from './content-manager.js';
|
||||
import {
|
||||
KEY_PREFIX,
|
||||
toKey,
|
||||
requireAdminMiddleware,
|
||||
getUserAvatar,
|
||||
getAllUserHandles,
|
||||
getPasswordSalt,
|
||||
getPasswordHash,
|
||||
getUserDirectories,
|
||||
ensurePublicDirectoriesExist,
|
||||
} from '../users.js';
|
||||
import { DEFAULT_USER } from '../constants.js';
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/get', requireAdminMiddleware, async (_request, response) => {
|
||||
try {
|
||||
/** @type {import('../users.js').User[]} */
|
||||
const users = await storage.values(x => x.key.startsWith(KEY_PREFIX));
|
||||
|
||||
/** @type {Promise<import('../users.js').UserViewModel>[]} */
|
||||
const viewModelPromises = users
|
||||
.map(user => new Promise(resolve => {
|
||||
getUserAvatar(user.handle).then(avatar =>
|
||||
resolve({
|
||||
handle: user.handle,
|
||||
name: user.name,
|
||||
avatar: avatar,
|
||||
admin: user.admin,
|
||||
enabled: user.enabled,
|
||||
created: user.created,
|
||||
password: !!user.password,
|
||||
}),
|
||||
);
|
||||
}));
|
||||
|
||||
const viewModels = await Promise.all(viewModelPromises);
|
||||
viewModels.sort((x, y) => (x.created ?? 0) - (y.created ?? 0));
|
||||
return response.json(viewModels);
|
||||
} catch (error) {
|
||||
console.error('User list failed:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/disable', requireAdminMiddleware, async (request, response) => {
|
||||
try {
|
||||
if (!request.body.handle) {
|
||||
console.warn('Disable user failed: Missing required fields');
|
||||
return response.status(400).json({ error: 'Missing required fields' });
|
||||
}
|
||||
|
||||
if (request.body.handle === request.user.profile.handle) {
|
||||
console.warn('Disable user failed: Cannot disable yourself');
|
||||
return response.status(400).json({ error: 'Cannot disable yourself' });
|
||||
}
|
||||
|
||||
/** @type {import('../users.js').User} */
|
||||
const user = await storage.getItem(toKey(request.body.handle));
|
||||
|
||||
if (!user) {
|
||||
console.error('Disable user failed: User not found');
|
||||
return response.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
user.enabled = false;
|
||||
await storage.setItem(toKey(request.body.handle), user);
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error('User disable failed:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/enable', requireAdminMiddleware, async (request, response) => {
|
||||
try {
|
||||
if (!request.body.handle) {
|
||||
console.warn('Enable user failed: Missing required fields');
|
||||
return response.status(400).json({ error: 'Missing required fields' });
|
||||
}
|
||||
|
||||
/** @type {import('../users.js').User} */
|
||||
const user = await storage.getItem(toKey(request.body.handle));
|
||||
|
||||
if (!user) {
|
||||
console.error('Enable user failed: User not found');
|
||||
return response.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
user.enabled = true;
|
||||
await storage.setItem(toKey(request.body.handle), user);
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error('User enable failed:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/promote', requireAdminMiddleware, async (request, response) => {
|
||||
try {
|
||||
if (!request.body.handle) {
|
||||
console.warn('Promote user failed: Missing required fields');
|
||||
return response.status(400).json({ error: 'Missing required fields' });
|
||||
}
|
||||
|
||||
/** @type {import('../users.js').User} */
|
||||
const user = await storage.getItem(toKey(request.body.handle));
|
||||
|
||||
if (!user) {
|
||||
console.error('Promote user failed: User not found');
|
||||
return response.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
user.admin = true;
|
||||
await storage.setItem(toKey(request.body.handle), user);
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error('User promote failed:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/demote', requireAdminMiddleware, async (request, response) => {
|
||||
try {
|
||||
if (!request.body.handle) {
|
||||
console.warn('Demote user failed: Missing required fields');
|
||||
return response.status(400).json({ error: 'Missing required fields' });
|
||||
}
|
||||
|
||||
if (request.body.handle === request.user.profile.handle) {
|
||||
console.warn('Demote user failed: Cannot demote yourself');
|
||||
return response.status(400).json({ error: 'Cannot demote yourself' });
|
||||
}
|
||||
|
||||
/** @type {import('../users.js').User} */
|
||||
const user = await storage.getItem(toKey(request.body.handle));
|
||||
|
||||
if (!user) {
|
||||
console.error('Demote user failed: User not found');
|
||||
return response.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
user.admin = false;
|
||||
await storage.setItem(toKey(request.body.handle), user);
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error('User demote failed:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/create', requireAdminMiddleware, async (request, response) => {
|
||||
try {
|
||||
if (!request.body.handle || !request.body.name) {
|
||||
console.warn('Create user failed: Missing required fields');
|
||||
return response.status(400).json({ error: 'Missing required fields' });
|
||||
}
|
||||
|
||||
const handles = await getAllUserHandles();
|
||||
const handle = lodash.kebabCase(String(request.body.handle).toLowerCase().trim());
|
||||
|
||||
if (!handle) {
|
||||
console.warn('Create user failed: Invalid handle');
|
||||
return response.status(400).json({ error: 'Invalid handle' });
|
||||
}
|
||||
|
||||
if (handles.some(x => x === handle)) {
|
||||
console.warn('Create user failed: User with that handle already exists');
|
||||
return response.status(409).json({ error: 'User already exists' });
|
||||
}
|
||||
|
||||
const salt = getPasswordSalt();
|
||||
const password = request.body.password ? getPasswordHash(request.body.password, salt) : '';
|
||||
|
||||
const newUser = {
|
||||
handle: handle,
|
||||
name: request.body.name || 'Anonymous',
|
||||
created: Date.now(),
|
||||
password: password,
|
||||
salt: salt,
|
||||
admin: !!request.body.admin,
|
||||
enabled: true,
|
||||
};
|
||||
|
||||
await storage.setItem(toKey(handle), newUser);
|
||||
|
||||
// Create user directories
|
||||
console.info('Creating data directories for', newUser.handle);
|
||||
await ensurePublicDirectoriesExist();
|
||||
const directories = getUserDirectories(newUser.handle);
|
||||
await checkForNewContent([directories], [CONTENT_TYPES.SETTINGS]);
|
||||
return response.json({ handle: newUser.handle });
|
||||
} catch (error) {
|
||||
console.error('User create failed:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/delete', requireAdminMiddleware, async (request, response) => {
|
||||
try {
|
||||
if (!request.body.handle) {
|
||||
console.warn('Delete user failed: Missing required fields');
|
||||
return response.status(400).json({ error: 'Missing required fields' });
|
||||
}
|
||||
|
||||
if (request.body.handle === request.user.profile.handle) {
|
||||
console.warn('Delete user failed: Cannot delete yourself');
|
||||
return response.status(400).json({ error: 'Cannot delete yourself' });
|
||||
}
|
||||
|
||||
if (request.body.handle === DEFAULT_USER.handle) {
|
||||
console.warn('Delete user failed: Cannot delete default user');
|
||||
return response.status(400).json({ error: 'Sorry, but the default user cannot be deleted. It is required as a fallback.' });
|
||||
}
|
||||
|
||||
await storage.removeItem(toKey(request.body.handle));
|
||||
|
||||
if (request.body.purge) {
|
||||
const directories = getUserDirectories(request.body.handle);
|
||||
console.info('Deleting data directories for', request.body.handle);
|
||||
await fsPromises.rm(directories.root, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error('User delete failed:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/slugify', requireAdminMiddleware, async (request, response) => {
|
||||
try {
|
||||
if (!request.body.text) {
|
||||
console.warn('Slugify failed: Missing required fields');
|
||||
return response.status(400).json({ error: 'Missing required fields' });
|
||||
}
|
||||
|
||||
const text = lodash.kebabCase(String(request.body.text).toLowerCase().trim());
|
||||
|
||||
return response.send(text);
|
||||
} catch (error) {
|
||||
console.error('Slugify failed:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
255
web-app/src/endpoints/users-private.js
Normal file
255
web-app/src/endpoints/users-private.js
Normal file
@@ -0,0 +1,255 @@
|
||||
import path from 'node:path';
|
||||
import { promises as fsPromises } from 'node:fs';
|
||||
import crypto from 'node:crypto';
|
||||
|
||||
import storage from 'node-persist';
|
||||
import express from 'express';
|
||||
|
||||
import { getUserAvatar, toKey, getPasswordHash, getPasswordSalt, createBackupArchive, ensurePublicDirectoriesExist, toAvatarKey } from '../users.js';
|
||||
import { SETTINGS_FILE } from '../constants.js';
|
||||
import { checkForNewContent, CONTENT_TYPES } from './content-manager.js';
|
||||
import { color, Cache } from '../util.js';
|
||||
|
||||
const RESET_CACHE = new Cache(5 * 60 * 1000);
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/logout', async (request, response) => {
|
||||
try {
|
||||
if (!request.session) {
|
||||
console.error('Session not available');
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
request.session.handle = null;
|
||||
request.session.csrfToken = null;
|
||||
request.session = null;
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/me', async (request, response) => {
|
||||
try {
|
||||
if (!request.user) {
|
||||
return response.sendStatus(403);
|
||||
}
|
||||
|
||||
const user = request.user.profile;
|
||||
const viewModel = {
|
||||
handle: user.handle,
|
||||
name: user.name,
|
||||
avatar: await getUserAvatar(user.handle),
|
||||
admin: user.admin,
|
||||
password: !!user.password,
|
||||
created: user.created,
|
||||
};
|
||||
|
||||
return response.json(viewModel);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/change-avatar', async (request, response) => {
|
||||
try {
|
||||
if (!request.body.handle) {
|
||||
console.warn('Change avatar failed: Missing required fields');
|
||||
return response.status(400).json({ error: 'Missing required fields' });
|
||||
}
|
||||
|
||||
if (request.body.handle !== request.user.profile.handle && !request.user.profile.admin) {
|
||||
console.error('Change avatar failed: Unauthorized');
|
||||
return response.status(403).json({ error: 'Unauthorized' });
|
||||
}
|
||||
|
||||
// Avatar is not a data URL or not an empty string
|
||||
if (!request.body.avatar.startsWith('data:image/') && request.body.avatar !== '') {
|
||||
console.warn('Change avatar failed: Invalid data URL');
|
||||
return response.status(400).json({ error: 'Invalid data URL' });
|
||||
}
|
||||
|
||||
/** @type {import('../users.js').User} */
|
||||
const user = await storage.getItem(toKey(request.body.handle));
|
||||
|
||||
if (!user) {
|
||||
console.error('Change avatar failed: User not found');
|
||||
return response.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
await storage.setItem(toAvatarKey(request.body.handle), request.body.avatar);
|
||||
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/change-password', async (request, response) => {
|
||||
try {
|
||||
if (!request.body.handle) {
|
||||
console.warn('Change password failed: Missing required fields');
|
||||
return response.status(400).json({ error: 'Missing required fields' });
|
||||
}
|
||||
|
||||
if (request.body.handle !== request.user.profile.handle && !request.user.profile.admin) {
|
||||
console.error('Change password failed: Unauthorized');
|
||||
return response.status(403).json({ error: 'Unauthorized' });
|
||||
}
|
||||
|
||||
/** @type {import('../users.js').User} */
|
||||
const user = await storage.getItem(toKey(request.body.handle));
|
||||
|
||||
if (!user) {
|
||||
console.error('Change password failed: User not found');
|
||||
return response.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
if (!user.enabled) {
|
||||
console.error('Change password failed: User is disabled');
|
||||
return response.status(403).json({ error: 'User is disabled' });
|
||||
}
|
||||
|
||||
if (!request.user.profile.admin && user.password && user.password !== getPasswordHash(request.body.oldPassword, user.salt)) {
|
||||
console.error('Change password failed: Incorrect password');
|
||||
return response.status(403).json({ error: 'Incorrect password' });
|
||||
}
|
||||
|
||||
if (request.body.newPassword) {
|
||||
const salt = getPasswordSalt();
|
||||
user.password = getPasswordHash(request.body.newPassword, salt);
|
||||
user.salt = salt;
|
||||
} else {
|
||||
user.password = '';
|
||||
user.salt = '';
|
||||
}
|
||||
|
||||
await storage.setItem(toKey(request.body.handle), user);
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/backup', async (request, response) => {
|
||||
try {
|
||||
const handle = request.body.handle;
|
||||
|
||||
if (!handle) {
|
||||
console.warn('Backup failed: Missing required fields');
|
||||
return response.status(400).json({ error: 'Missing required fields' });
|
||||
}
|
||||
|
||||
if (handle !== request.user.profile.handle && !request.user.profile.admin) {
|
||||
console.error('Backup failed: Unauthorized');
|
||||
return response.status(403).json({ error: 'Unauthorized' });
|
||||
}
|
||||
|
||||
await createBackupArchive(handle, response);
|
||||
} catch (error) {
|
||||
console.error('Backup failed', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/reset-settings', async (request, response) => {
|
||||
try {
|
||||
const password = request.body.password;
|
||||
|
||||
if (request.user.profile.password && request.user.profile.password !== getPasswordHash(password, request.user.profile.salt)) {
|
||||
console.warn('Reset settings failed: Incorrect password');
|
||||
return response.status(403).json({ error: 'Incorrect password' });
|
||||
}
|
||||
|
||||
const pathToFile = path.join(request.user.directories.root, SETTINGS_FILE);
|
||||
await fsPromises.rm(pathToFile, { force: true });
|
||||
await checkForNewContent([request.user.directories], [CONTENT_TYPES.SETTINGS]);
|
||||
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error('Reset settings failed', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/change-name', async (request, response) => {
|
||||
try {
|
||||
if (!request.body.name || !request.body.handle) {
|
||||
console.warn('Change name failed: Missing required fields');
|
||||
return response.status(400).json({ error: 'Missing required fields' });
|
||||
}
|
||||
|
||||
if (request.body.handle !== request.user.profile.handle && !request.user.profile.admin) {
|
||||
console.error('Change name failed: Unauthorized');
|
||||
return response.status(403).json({ error: 'Unauthorized' });
|
||||
}
|
||||
|
||||
/** @type {import('../users.js').User} */
|
||||
const user = await storage.getItem(toKey(request.body.handle));
|
||||
|
||||
if (!user) {
|
||||
console.warn('Change name failed: User not found');
|
||||
return response.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
user.name = request.body.name;
|
||||
await storage.setItem(toKey(request.body.handle), user);
|
||||
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error('Change name failed', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/reset-step1', async (request, response) => {
|
||||
try {
|
||||
const resetCode = String(crypto.randomInt(1000, 9999));
|
||||
console.log();
|
||||
console.log(color.magenta(`${request.user.profile.name}, your account reset code is: `) + color.red(resetCode));
|
||||
console.log();
|
||||
RESET_CACHE.set(request.user.profile.handle, resetCode);
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error('Recover step 1 failed:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/reset-step2', async (request, response) => {
|
||||
try {
|
||||
if (!request.body.code) {
|
||||
console.warn('Recover step 2 failed: Missing required fields');
|
||||
return response.status(400).json({ error: 'Missing required fields' });
|
||||
}
|
||||
|
||||
if (request.user.profile.password && request.user.profile.password !== getPasswordHash(request.body.password, request.user.profile.salt)) {
|
||||
console.warn('Recover step 2 failed: Incorrect password');
|
||||
return response.status(400).json({ error: 'Incorrect password' });
|
||||
}
|
||||
|
||||
const code = RESET_CACHE.get(request.user.profile.handle);
|
||||
|
||||
if (!code || code !== request.body.code) {
|
||||
console.warn('Recover step 2 failed: Incorrect code');
|
||||
return response.status(400).json({ error: 'Incorrect code' });
|
||||
}
|
||||
|
||||
console.info('Resetting account data:', request.user.profile.handle);
|
||||
await fsPromises.rm(request.user.directories.root, { recursive: true, force: true });
|
||||
|
||||
await ensurePublicDirectoriesExist();
|
||||
await checkForNewContent([request.user.directories]);
|
||||
|
||||
RESET_CACHE.remove(request.user.profile.handle);
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
console.error('Recover step 2 failed:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
199
web-app/src/endpoints/users-public.js
Normal file
199
web-app/src/endpoints/users-public.js
Normal file
@@ -0,0 +1,199 @@
|
||||
import crypto from 'node:crypto';
|
||||
|
||||
import storage from 'node-persist';
|
||||
import express from 'express';
|
||||
import { RateLimiterMemory, RateLimiterRes } from 'rate-limiter-flexible';
|
||||
import { getIpFromRequest, getRealIpFromHeader } from '../express-common.js';
|
||||
import { color, Cache, getConfigValue } from '../util.js';
|
||||
import { KEY_PREFIX, getUserAvatar, toKey, getPasswordHash, getPasswordSalt } from '../users.js';
|
||||
|
||||
const DISCREET_LOGIN = getConfigValue('enableDiscreetLogin', false, 'boolean');
|
||||
const PREFER_REAL_IP_HEADER = getConfigValue('rateLimiting.preferRealIpHeader', false, 'boolean');
|
||||
const MFA_CACHE = new Cache(5 * 60 * 1000);
|
||||
|
||||
const getIpAddress = (request) => PREFER_REAL_IP_HEADER ? getRealIpFromHeader(request) : getIpFromRequest(request);
|
||||
|
||||
export const router = express.Router();
|
||||
const loginLimiter = new RateLimiterMemory({
|
||||
points: 5,
|
||||
duration: 60,
|
||||
});
|
||||
const recoverLimiter = new RateLimiterMemory({
|
||||
points: 5,
|
||||
duration: 300,
|
||||
});
|
||||
|
||||
router.post('/list', async (_request, response) => {
|
||||
try {
|
||||
if (DISCREET_LOGIN) {
|
||||
return response.sendStatus(204);
|
||||
}
|
||||
|
||||
/** @type {import('../users.js').User[]} */
|
||||
const users = await storage.values(x => x.key.startsWith(KEY_PREFIX));
|
||||
|
||||
/** @type {Promise<import('../users.js').UserViewModel>[]} */
|
||||
const viewModelPromises = users
|
||||
.filter(x => x.enabled)
|
||||
.map(user => new Promise(async (resolve) => {
|
||||
getUserAvatar(user.handle).then(avatar =>
|
||||
resolve({
|
||||
handle: user.handle,
|
||||
name: user.name,
|
||||
created: user.created,
|
||||
avatar: avatar,
|
||||
password: !!user.password,
|
||||
}),
|
||||
);
|
||||
}));
|
||||
|
||||
const viewModels = await Promise.all(viewModelPromises);
|
||||
viewModels.sort((x, y) => (x.created ?? 0) - (y.created ?? 0));
|
||||
return response.json(viewModels);
|
||||
} catch (error) {
|
||||
console.error('User list failed:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/login', async (request, response) => {
|
||||
try {
|
||||
if (!request.body.handle) {
|
||||
console.warn('Login failed: Missing required fields');
|
||||
return response.status(400).json({ error: 'Missing required fields' });
|
||||
}
|
||||
|
||||
const ip = getIpAddress(request);
|
||||
await loginLimiter.consume(ip);
|
||||
|
||||
/** @type {import('../users.js').User} */
|
||||
const user = await storage.getItem(toKey(request.body.handle));
|
||||
|
||||
if (!user) {
|
||||
console.error('Login failed: User', request.body.handle, 'not found');
|
||||
return response.status(403).json({ error: 'Incorrect credentials' });
|
||||
}
|
||||
|
||||
if (!user.enabled) {
|
||||
console.warn('Login failed: User', user.handle, 'is disabled');
|
||||
return response.status(403).json({ error: 'User is disabled' });
|
||||
}
|
||||
|
||||
if (user.password && user.password !== getPasswordHash(request.body.password, user.salt)) {
|
||||
console.warn('Login failed: Incorrect password for', user.handle);
|
||||
return response.status(403).json({ error: 'Incorrect credentials' });
|
||||
}
|
||||
|
||||
if (!request.session) {
|
||||
console.error('Session not available');
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
|
||||
await loginLimiter.delete(ip);
|
||||
request.session.handle = user.handle;
|
||||
console.info('Login successful:', user.handle, 'from', ip, 'at', new Date().toLocaleString());
|
||||
return response.json({ handle: user.handle });
|
||||
} catch (error) {
|
||||
if (error instanceof RateLimiterRes) {
|
||||
console.error('Login failed: Rate limited from', getIpAddress(request));
|
||||
return response.status(429).send({ error: 'Too many attempts. Try again later or recover your password.' });
|
||||
}
|
||||
|
||||
console.error('Login failed:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/recover-step1', async (request, response) => {
|
||||
try {
|
||||
if (!request.body.handle) {
|
||||
console.warn('Recover step 1 failed: Missing required fields');
|
||||
return response.status(400).json({ error: 'Missing required fields' });
|
||||
}
|
||||
|
||||
const ip = getIpAddress(request);
|
||||
await recoverLimiter.consume(ip);
|
||||
|
||||
/** @type {import('../users.js').User} */
|
||||
const user = await storage.getItem(toKey(request.body.handle));
|
||||
|
||||
if (!user) {
|
||||
console.error('Recover step 1 failed: User', request.body.handle, 'not found');
|
||||
return response.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
if (!user.enabled) {
|
||||
console.error('Recover step 1 failed: User', user.handle, 'is disabled');
|
||||
return response.status(403).json({ error: 'User is disabled' });
|
||||
}
|
||||
|
||||
const mfaCode = String(crypto.randomInt(1000, 9999));
|
||||
console.log();
|
||||
console.log(color.blue(`${user.name}, your password recovery code is: `) + color.magenta(mfaCode));
|
||||
console.log();
|
||||
MFA_CACHE.set(user.handle, mfaCode);
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
if (error instanceof RateLimiterRes) {
|
||||
console.error('Recover step 1 failed: Rate limited from', getIpAddress(request));
|
||||
return response.status(429).send({ error: 'Too many attempts. Try again later or contact your admin.' });
|
||||
}
|
||||
|
||||
console.error('Recover step 1 failed:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/recover-step2', async (request, response) => {
|
||||
try {
|
||||
if (!request.body.handle || !request.body.code) {
|
||||
console.warn('Recover step 2 failed: Missing required fields');
|
||||
return response.status(400).json({ error: 'Missing required fields' });
|
||||
}
|
||||
|
||||
/** @type {import('../users.js').User} */
|
||||
const user = await storage.getItem(toKey(request.body.handle));
|
||||
const ip = getIpAddress(request);
|
||||
|
||||
if (!user) {
|
||||
console.error('Recover step 2 failed: User', request.body.handle, 'not found');
|
||||
return response.status(404).json({ error: 'User not found' });
|
||||
}
|
||||
|
||||
if (!user.enabled) {
|
||||
console.warn('Recover step 2 failed: User', user.handle, 'is disabled');
|
||||
return response.status(403).json({ error: 'User is disabled' });
|
||||
}
|
||||
|
||||
const mfaCode = MFA_CACHE.get(user.handle);
|
||||
|
||||
if (request.body.code !== mfaCode) {
|
||||
await recoverLimiter.consume(ip);
|
||||
console.warn('Recover step 2 failed: Incorrect code');
|
||||
return response.status(403).json({ error: 'Incorrect code' });
|
||||
}
|
||||
|
||||
if (request.body.newPassword) {
|
||||
const salt = getPasswordSalt();
|
||||
user.password = getPasswordHash(request.body.newPassword, salt);
|
||||
user.salt = salt;
|
||||
await storage.setItem(toKey(user.handle), user);
|
||||
} else {
|
||||
user.password = '';
|
||||
user.salt = '';
|
||||
await storage.setItem(toKey(user.handle), user);
|
||||
}
|
||||
|
||||
await recoverLimiter.delete(ip);
|
||||
MFA_CACHE.remove(user.handle);
|
||||
return response.sendStatus(204);
|
||||
} catch (error) {
|
||||
if (error instanceof RateLimiterRes) {
|
||||
console.error('Recover step 2 failed: Rate limited from', getIpAddress(request));
|
||||
return response.status(429).send({ error: 'Too many attempts. Try again later or contact your admin.' });
|
||||
}
|
||||
|
||||
console.error('Recover step 2 failed:', error);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
568
web-app/src/endpoints/vectors.js
Normal file
568
web-app/src/endpoints/vectors.js
Normal file
@@ -0,0 +1,568 @@
|
||||
import path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
|
||||
import vectra from 'vectra';
|
||||
import express from 'express';
|
||||
import sanitize from 'sanitize-filename';
|
||||
|
||||
import { getConfigValue } from '../util.js';
|
||||
|
||||
import { getNomicAIBatchVector, getNomicAIVector } from '../vectors/nomicai-vectors.js';
|
||||
import { getOpenAIVector, getOpenAIBatchVector } from '../vectors/openai-vectors.js';
|
||||
import { getTransformersVector, getTransformersBatchVector } from '../vectors/embedding.js';
|
||||
import { getExtrasVector, getExtrasBatchVector } from '../vectors/extras-vectors.js';
|
||||
import { getMakerSuiteVector, getMakerSuiteBatchVector } from '../vectors/google-vectors.js';
|
||||
import { getVertexVector, getVertexBatchVector } from '../vectors/google-vectors.js';
|
||||
import { getCohereVector, getCohereBatchVector } from '../vectors/cohere-vectors.js';
|
||||
import { getLlamaCppVector, getLlamaCppBatchVector } from '../vectors/llamacpp-vectors.js';
|
||||
import { getVllmVector, getVllmBatchVector } from '../vectors/vllm-vectors.js';
|
||||
import { getOllamaVector, getOllamaBatchVector } from '../vectors/ollama-vectors.js';
|
||||
|
||||
// Don't forget to add new sources to the SOURCES array
|
||||
const SOURCES = [
|
||||
'transformers',
|
||||
'mistral',
|
||||
'openai',
|
||||
'extras',
|
||||
'palm',
|
||||
'togetherai',
|
||||
'nomicai',
|
||||
'cohere',
|
||||
'ollama',
|
||||
'llamacpp',
|
||||
'vllm',
|
||||
'webllm',
|
||||
'koboldcpp',
|
||||
'vertexai',
|
||||
'electronhub',
|
||||
'openrouter',
|
||||
'chutes',
|
||||
];
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text from the given source.
|
||||
* @param {string} source - The source of the vector
|
||||
* @param {Object} sourceSettings - Settings for the source, if it needs any
|
||||
* @param {string} text - The text to get the vector for
|
||||
* @param {boolean} isQuery - If the text is a query for embedding search
|
||||
* @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
|
||||
* @returns {Promise<number[]>} - The vector for the text
|
||||
*/
|
||||
async function getVector(source, sourceSettings, text, isQuery, directories) {
|
||||
switch (source) {
|
||||
case 'nomicai':
|
||||
return getNomicAIVector(text, source, directories);
|
||||
case 'togetherai':
|
||||
case 'mistral':
|
||||
case 'openai':
|
||||
return getOpenAIVector(text, source, directories, sourceSettings.model);
|
||||
case 'electronhub':
|
||||
return getOpenAIVector(text, source, directories, sourceSettings.model);
|
||||
case 'openrouter':
|
||||
return getOpenAIVector(text, source, directories, sourceSettings.model);
|
||||
case 'transformers':
|
||||
return getTransformersVector(text);
|
||||
case 'extras':
|
||||
return getExtrasVector(text, sourceSettings.extrasUrl, sourceSettings.extrasKey);
|
||||
case 'palm':
|
||||
return getMakerSuiteVector(text, sourceSettings.model, sourceSettings.request);
|
||||
case 'vertexai':
|
||||
return getVertexVector(text, sourceSettings.model, sourceSettings.request);
|
||||
case 'cohere':
|
||||
return getCohereVector(text, isQuery, directories, sourceSettings.model);
|
||||
case 'llamacpp':
|
||||
return getLlamaCppVector(text, sourceSettings.apiUrl, directories);
|
||||
case 'vllm':
|
||||
return getVllmVector(text, sourceSettings.apiUrl, sourceSettings.model, directories);
|
||||
case 'ollama':
|
||||
return getOllamaVector(text, sourceSettings.apiUrl, sourceSettings.model, sourceSettings.keep, directories);
|
||||
case 'webllm':
|
||||
return sourceSettings.embeddings[text];
|
||||
case 'koboldcpp':
|
||||
return sourceSettings.embeddings[text];
|
||||
case 'chutes':
|
||||
return getOpenAIVector(text, source, directories, sourceSettings.model);
|
||||
}
|
||||
|
||||
throw new Error(`Unknown vector source ${source}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text batch from the given source.
|
||||
* @param {string} source - The source of the vector
|
||||
* @param {Object} sourceSettings - Settings for the source, if it needs any
|
||||
* @param {string[]} texts - The array of texts to get the vector for
|
||||
* @param {boolean} isQuery - If the text is a query for embedding search
|
||||
* @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
|
||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||
*/
|
||||
async function getBatchVector(source, sourceSettings, texts, isQuery, directories) {
|
||||
const batchSize = 10;
|
||||
const batches = Array(Math.ceil(texts.length / batchSize)).fill(undefined).map((_, i) => texts.slice(i * batchSize, i * batchSize + batchSize));
|
||||
|
||||
let results = [];
|
||||
for (let batch of batches) {
|
||||
switch (source) {
|
||||
case 'nomicai':
|
||||
results.push(...await getNomicAIBatchVector(batch, source, directories));
|
||||
break;
|
||||
case 'togetherai':
|
||||
case 'mistral':
|
||||
case 'openai':
|
||||
results.push(...await getOpenAIBatchVector(batch, source, directories, sourceSettings.model));
|
||||
break;
|
||||
case 'electronhub':
|
||||
results.push(...await getOpenAIBatchVector(batch, source, directories, sourceSettings.model));
|
||||
break;
|
||||
case 'openrouter':
|
||||
results.push(...await getOpenAIBatchVector(batch, source, directories, sourceSettings.model));
|
||||
break;
|
||||
case 'transformers':
|
||||
results.push(...await getTransformersBatchVector(batch));
|
||||
break;
|
||||
case 'extras':
|
||||
results.push(...await getExtrasBatchVector(batch, sourceSettings.extrasUrl, sourceSettings.extrasKey));
|
||||
break;
|
||||
case 'palm':
|
||||
results.push(...await getMakerSuiteBatchVector(batch, sourceSettings.model, sourceSettings.request));
|
||||
break;
|
||||
case 'vertexai':
|
||||
results.push(...await getVertexBatchVector(batch, sourceSettings.model, sourceSettings.request));
|
||||
break;
|
||||
case 'cohere':
|
||||
results.push(...await getCohereBatchVector(batch, isQuery, directories, sourceSettings.model));
|
||||
break;
|
||||
case 'llamacpp':
|
||||
results.push(...await getLlamaCppBatchVector(batch, sourceSettings.apiUrl, directories));
|
||||
break;
|
||||
case 'vllm':
|
||||
results.push(...await getVllmBatchVector(batch, sourceSettings.apiUrl, sourceSettings.model, directories));
|
||||
break;
|
||||
case 'ollama':
|
||||
results.push(...await getOllamaBatchVector(batch, sourceSettings.apiUrl, sourceSettings.model, sourceSettings.keep, directories));
|
||||
break;
|
||||
case 'webllm':
|
||||
results.push(...texts.map(x => sourceSettings.embeddings[x]));
|
||||
break;
|
||||
case 'koboldcpp':
|
||||
results.push(...texts.map(x => sourceSettings.embeddings[x]));
|
||||
break;
|
||||
case 'chutes':
|
||||
results.push(...await getOpenAIBatchVector(batch, source, directories, sourceSettings.model));
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown vector source ${source}`);
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts settings for the vectorization sources from the HTTP request headers.
|
||||
* @param {string} source - Which source to extract settings for.
|
||||
* @param {object} request - The HTTP request object.
|
||||
* @returns {object} - An object that can be used as `sourceSettings` in functions that take that parameter.
|
||||
*/
|
||||
function getSourceSettings(source, request) {
|
||||
switch (source) {
|
||||
case 'togetherai':
|
||||
return {
|
||||
model: String(request.body.model),
|
||||
};
|
||||
case 'openai':
|
||||
return {
|
||||
model: String(request.body.model),
|
||||
};
|
||||
case 'electronhub':
|
||||
return {
|
||||
model: String(request.body.model || 'text-embedding-3-small'),
|
||||
};
|
||||
case 'openrouter':
|
||||
return {
|
||||
model: String(request.body.model) || 'openai/text-embedding-3-large',
|
||||
};
|
||||
case 'cohere':
|
||||
return {
|
||||
model: String(request.body.model),
|
||||
};
|
||||
case 'llamacpp':
|
||||
return {
|
||||
apiUrl: String(request.body.apiUrl),
|
||||
};
|
||||
case 'vllm':
|
||||
return {
|
||||
apiUrl: String(request.body.apiUrl),
|
||||
model: String(request.body.model),
|
||||
};
|
||||
case 'ollama':
|
||||
return {
|
||||
apiUrl: String(request.body.apiUrl),
|
||||
model: String(request.body.model),
|
||||
keep: Boolean(request.body.keep),
|
||||
};
|
||||
case 'extras':
|
||||
return {
|
||||
extrasUrl: String(request.body.extrasUrl),
|
||||
extrasKey: String(request.body.extrasKey),
|
||||
};
|
||||
case 'transformers':
|
||||
return {
|
||||
model: getConfigValue('extensions.models.embedding', ''),
|
||||
};
|
||||
case 'palm':
|
||||
case 'vertexai':
|
||||
return {
|
||||
model: String(request.body.model || 'text-embedding-005'),
|
||||
request: request, // Pass the request object to get API key and URL
|
||||
};
|
||||
case 'mistral':
|
||||
return {
|
||||
model: 'mistral-embed',
|
||||
};
|
||||
case 'nomicai':
|
||||
return {
|
||||
model: 'nomic-embed-text-v1.5',
|
||||
};
|
||||
case 'webllm':
|
||||
return {
|
||||
model: String(request.body.model),
|
||||
embeddings: request.body.embeddings ?? {},
|
||||
};
|
||||
case 'koboldcpp':
|
||||
return {
|
||||
model: String(request.body.model),
|
||||
embeddings: request.body.embeddings ?? {},
|
||||
};
|
||||
case 'chutes':
|
||||
return {
|
||||
model: String(request.body.model || 'chutes-qwen-qwen3-embedding-8b'),
|
||||
};
|
||||
default:
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the model scope for the source.
|
||||
* @param {object} sourceSettings - The settings for the source
|
||||
* @returns {string} The model scope for the source
|
||||
*/
|
||||
function getModelScope(sourceSettings) {
|
||||
return (sourceSettings?.model || '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the index for the vector collection
|
||||
* @param {import('../users.js').UserDirectoryList} directories - User directories
|
||||
* @param {string} collectionId - The collection ID
|
||||
* @param {string} source - The source of the vector
|
||||
* @param {object} sourceSettings - The model for the source
|
||||
* @returns {Promise<vectra.LocalIndex>} - The index for the collection
|
||||
*/
|
||||
async function getIndex(directories, collectionId, source, sourceSettings) {
|
||||
const model = getModelScope(sourceSettings);
|
||||
const pathToFile = path.join(directories.vectors, sanitize(source), sanitize(collectionId), sanitize(model));
|
||||
const store = new vectra.LocalIndex(pathToFile);
|
||||
|
||||
if (!await store.isIndexCreated()) {
|
||||
await store.createIndex();
|
||||
}
|
||||
|
||||
return store;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inserts items into the vector collection
|
||||
* @param {import('../users.js').UserDirectoryList} directories - User directories
|
||||
* @param {string} collectionId - The collection ID
|
||||
* @param {string} source - The source of the vector
|
||||
* @param {Object} sourceSettings - Settings for the source, if it needs any
|
||||
* @param {{ hash: number; text: string; index: number; }[]} items - The items to insert
|
||||
*/
|
||||
async function insertVectorItems(directories, collectionId, source, sourceSettings, items) {
|
||||
const store = await getIndex(directories, collectionId, source, sourceSettings);
|
||||
|
||||
await store.beginUpdate();
|
||||
|
||||
const vectors = await getBatchVector(source, sourceSettings, items.map(x => x.text), false, directories);
|
||||
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
const item = items[i];
|
||||
const vector = vectors[i];
|
||||
await store.upsertItem({ vector: vector, metadata: { hash: item.hash, text: item.text, index: item.index } });
|
||||
}
|
||||
|
||||
await store.endUpdate();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the hashes of the items in the vector collection
|
||||
* @param {import('../users.js').UserDirectoryList} directories - User directories
|
||||
* @param {string} collectionId - The collection ID
|
||||
* @param {string} source - The source of the vector
|
||||
* @param {Object} sourceSettings - Settings for the source, if it needs any
|
||||
* @returns {Promise<number[]>} - The hashes of the items in the collection
|
||||
*/
|
||||
async function getSavedHashes(directories, collectionId, source, sourceSettings) {
|
||||
const store = await getIndex(directories, collectionId, source, sourceSettings);
|
||||
|
||||
const items = await store.listItems();
|
||||
const hashes = items.map(x => Number(x.metadata.hash));
|
||||
|
||||
return hashes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes items from the vector collection by hash
|
||||
* @param {import('../users.js').UserDirectoryList} directories - User directories
|
||||
* @param {string} collectionId - The collection ID
|
||||
* @param {string} source - The source of the vector
|
||||
* @param {Object} sourceSettings - Settings for the source, if it needs any
|
||||
* @param {number[]} hashes - The hashes of the items to delete
|
||||
*/
|
||||
async function deleteVectorItems(directories, collectionId, source, sourceSettings, hashes) {
|
||||
const store = await getIndex(directories, collectionId, source, sourceSettings);
|
||||
const items = await store.listItemsByMetadata({ hash: { '$in': hashes } });
|
||||
|
||||
await store.beginUpdate();
|
||||
|
||||
for (const item of items) {
|
||||
await store.deleteItem(item.id);
|
||||
}
|
||||
|
||||
await store.endUpdate();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the hashes of the items in the vector collection that match the search text
|
||||
* @param {import('../users.js').UserDirectoryList} directories - User directories
|
||||
* @param {string} collectionId - The collection ID
|
||||
* @param {string} source - The source of the vector
|
||||
* @param {Object} sourceSettings - Settings for the source, if it needs any
|
||||
* @param {string} searchText - The text to search for
|
||||
* @param {number} topK - The number of results to return
|
||||
* @param {number} threshold - The threshold for the search
|
||||
* @returns {Promise<{hashes: number[], metadata: object[]}>} - The metadata of the items that match the search text
|
||||
*/
|
||||
async function queryCollection(directories, collectionId, source, sourceSettings, searchText, topK, threshold) {
|
||||
const store = await getIndex(directories, collectionId, source, sourceSettings);
|
||||
const vector = await getVector(source, sourceSettings, searchText, true, directories);
|
||||
|
||||
const result = await store.queryItems(vector, topK);
|
||||
const metadata = result.filter(x => x.score >= threshold).map(x => x.item.metadata);
|
||||
const hashes = result.map(x => Number(x.item.metadata.hash));
|
||||
return { metadata, hashes };
|
||||
}
|
||||
|
||||
/**
|
||||
* Queries multiple collections for the given search queries. Returns the overall top K results.
|
||||
* @param {import('../users.js').UserDirectoryList} directories - User directories
|
||||
* @param {string[]} collectionIds - The collection IDs to query
|
||||
* @param {string} source - The source of the vector
|
||||
* @param {Object} sourceSettings - Settings for the source, if it needs any
|
||||
* @param {string} searchText - The text to search for
|
||||
* @param {number} topK - The number of results to return
|
||||
* @param {number} threshold - The threshold for the search
|
||||
*
|
||||
* @returns {Promise<Record<string, { hashes: number[], metadata: object[] }>>} - The top K results from each collection
|
||||
*/
|
||||
async function multiQueryCollection(directories, collectionIds, source, sourceSettings, searchText, topK, threshold) {
|
||||
const vector = await getVector(source, sourceSettings, searchText, true, directories);
|
||||
const results = [];
|
||||
|
||||
for (const collectionId of collectionIds) {
|
||||
const store = await getIndex(directories, collectionId, source, sourceSettings);
|
||||
const result = await store.queryItems(vector, topK);
|
||||
results.push(...result.map(result => ({ collectionId, result })));
|
||||
}
|
||||
|
||||
// Sort results by descending similarity, apply threshold, and take top K
|
||||
const sortedResults = results
|
||||
.sort((a, b) => b.result.score - a.result.score)
|
||||
.filter(x => x.result.score >= threshold)
|
||||
.slice(0, topK);
|
||||
|
||||
/**
|
||||
* Group the results by collection ID
|
||||
* @type {Record<string, { hashes: number[], metadata: object[] }>}
|
||||
*/
|
||||
const groupedResults = {};
|
||||
for (const result of sortedResults) {
|
||||
if (!groupedResults[result.collectionId]) {
|
||||
groupedResults[result.collectionId] = { hashes: [], metadata: [] };
|
||||
}
|
||||
|
||||
groupedResults[result.collectionId].hashes.push(Number(result.result.item.metadata.hash));
|
||||
groupedResults[result.collectionId].metadata.push(result.result.item.metadata);
|
||||
}
|
||||
|
||||
return groupedResults;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a request to regenerate the index if it is corrupted.
|
||||
* @param {import('express').Request} req Express request object
|
||||
* @param {import('express').Response} res Express response object
|
||||
* @param {Error} error Error object
|
||||
* @returns {Promise<any>} Promise
|
||||
*/
|
||||
async function regenerateCorruptedIndexErrorHandler(req, res, error) {
|
||||
if (error instanceof SyntaxError && !req.query.regenerated) {
|
||||
const collectionId = String(req.body.collectionId);
|
||||
const source = String(req.body.source) || 'transformers';
|
||||
const sourceSettings = getSourceSettings(source, req);
|
||||
|
||||
if (collectionId && source) {
|
||||
const index = await getIndex(req.user.directories, collectionId, source, sourceSettings);
|
||||
const exists = await index.isIndexCreated();
|
||||
|
||||
if (exists) {
|
||||
const path = index.folderPath;
|
||||
console.warn(`Corrupted index detected at ${path}, regenerating...`);
|
||||
await index.deleteIndex();
|
||||
return res.redirect(307, req.originalUrl + '?regenerated=true');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/query', async (req, res) => {
|
||||
try {
|
||||
if (!req.body.collectionId || !req.body.searchText) {
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
const collectionId = String(req.body.collectionId);
|
||||
const searchText = String(req.body.searchText);
|
||||
const topK = Number(req.body.topK) || 10;
|
||||
const threshold = Number(req.body.threshold) || 0.0;
|
||||
const source = String(req.body.source) || 'transformers';
|
||||
const sourceSettings = getSourceSettings(source, req);
|
||||
|
||||
const results = await queryCollection(req.user.directories, collectionId, source, sourceSettings, searchText, topK, threshold);
|
||||
return res.json(results);
|
||||
} catch (error) {
|
||||
return regenerateCorruptedIndexErrorHandler(req, res, error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/query-multi', async (req, res) => {
|
||||
try {
|
||||
if (!Array.isArray(req.body.collectionIds) || !req.body.searchText) {
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
const collectionIds = req.body.collectionIds.map(x => String(x));
|
||||
const searchText = String(req.body.searchText);
|
||||
const topK = Number(req.body.topK) || 10;
|
||||
const threshold = Number(req.body.threshold) || 0.0;
|
||||
const source = String(req.body.source) || 'transformers';
|
||||
const sourceSettings = getSourceSettings(source, req);
|
||||
|
||||
const results = await multiQueryCollection(req.user.directories, collectionIds, source, sourceSettings, searchText, topK, threshold);
|
||||
return res.json(results);
|
||||
} catch (error) {
|
||||
return regenerateCorruptedIndexErrorHandler(req, res, error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/insert', async (req, res) => {
|
||||
try {
|
||||
if (!Array.isArray(req.body.items) || !req.body.collectionId) {
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
const collectionId = String(req.body.collectionId);
|
||||
const items = req.body.items.map(x => ({ hash: x.hash, text: x.text, index: x.index }));
|
||||
const source = String(req.body.source) || 'transformers';
|
||||
const sourceSettings = getSourceSettings(source, req);
|
||||
|
||||
await insertVectorItems(req.user.directories, collectionId, source, sourceSettings, items);
|
||||
return res.sendStatus(200);
|
||||
} catch (error) {
|
||||
return regenerateCorruptedIndexErrorHandler(req, res, error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/list', async (req, res) => {
|
||||
try {
|
||||
if (!req.body.collectionId) {
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
const collectionId = String(req.body.collectionId);
|
||||
const source = String(req.body.source) || 'transformers';
|
||||
const sourceSettings = getSourceSettings(source, req);
|
||||
|
||||
const hashes = await getSavedHashes(req.user.directories, collectionId, source, sourceSettings);
|
||||
return res.json(hashes);
|
||||
} catch (error) {
|
||||
return regenerateCorruptedIndexErrorHandler(req, res, error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/delete', async (req, res) => {
|
||||
try {
|
||||
if (!Array.isArray(req.body.hashes) || !req.body.collectionId) {
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
const collectionId = String(req.body.collectionId);
|
||||
const hashes = req.body.hashes.map(x => Number(x));
|
||||
const source = String(req.body.source) || 'transformers';
|
||||
const sourceSettings = getSourceSettings(source, req);
|
||||
|
||||
await deleteVectorItems(req.user.directories, collectionId, source, sourceSettings, hashes);
|
||||
return res.sendStatus(200);
|
||||
} catch (error) {
|
||||
return regenerateCorruptedIndexErrorHandler(req, res, error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/purge-all', async (req, res) => {
|
||||
try {
|
||||
for (const source of SOURCES) {
|
||||
const sourcePath = path.join(req.user.directories.vectors, sanitize(source));
|
||||
if (!fs.existsSync(sourcePath)) {
|
||||
continue;
|
||||
}
|
||||
await fs.promises.rm(sourcePath, { recursive: true });
|
||||
console.info(`Deleted vector source store at ${sourcePath}`);
|
||||
}
|
||||
|
||||
return res.sendStatus(200);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/purge', async (req, res) => {
|
||||
try {
|
||||
if (!req.body.collectionId) {
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
|
||||
const collectionId = String(req.body.collectionId);
|
||||
|
||||
for (const source of SOURCES) {
|
||||
const sourcePath = path.join(req.user.directories.vectors, sanitize(source), sanitize(collectionId));
|
||||
if (!fs.existsSync(sourcePath)) {
|
||||
continue;
|
||||
}
|
||||
await fs.promises.rm(sourcePath, { recursive: true });
|
||||
console.info(`Deleted vector index at ${sourcePath}`);
|
||||
}
|
||||
|
||||
return res.sendStatus(200);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
});
|
||||
157
web-app/src/endpoints/worldinfo.js
Normal file
157
web-app/src/endpoints/worldinfo.js
Normal file
@@ -0,0 +1,157 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
import express from 'express';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import _ from 'lodash';
|
||||
import { sync as writeFileAtomicSync } from 'write-file-atomic';
|
||||
import { tryParse } from '../util.js';
|
||||
|
||||
/**
|
||||
* Reads a World Info file and returns its contents
|
||||
* @param {import('../users.js').UserDirectoryList} directories User directories
|
||||
* @param {string} worldInfoName Name of the World Info file
|
||||
* @param {boolean} allowDummy If true, returns an empty object if the file doesn't exist
|
||||
* @returns {object} World Info file contents
|
||||
*/
|
||||
export function readWorldInfoFile(directories, worldInfoName, allowDummy) {
|
||||
const dummyObject = allowDummy ? { entries: {} } : null;
|
||||
|
||||
if (!worldInfoName) {
|
||||
return dummyObject;
|
||||
}
|
||||
|
||||
const filename = sanitize(`${worldInfoName}.json`);
|
||||
const pathToWorldInfo = path.join(directories.worlds, filename);
|
||||
|
||||
if (!fs.existsSync(pathToWorldInfo)) {
|
||||
console.error(`World info file ${filename} doesn't exist.`);
|
||||
return dummyObject;
|
||||
}
|
||||
|
||||
const worldInfoText = fs.readFileSync(pathToWorldInfo, 'utf8');
|
||||
const worldInfo = JSON.parse(worldInfoText);
|
||||
return worldInfo;
|
||||
}
|
||||
|
||||
export const router = express.Router();
|
||||
|
||||
router.post('/list', async (request, response) => {
|
||||
try {
|
||||
const data = [];
|
||||
const jsonFiles = (await fs.promises.readdir(request.user.directories.worlds, { withFileTypes: true }))
|
||||
.filter((file) => file.isFile() && path.extname(file.name).toLowerCase() === '.json')
|
||||
.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
for (const file of jsonFiles) {
|
||||
try {
|
||||
const filePath = path.join(request.user.directories.worlds, file.name);
|
||||
const fileContents = await fs.promises.readFile(filePath, 'utf8');
|
||||
const fileContentsParsed = tryParse(fileContents) || {};
|
||||
const fileExtensions = fileContentsParsed?.extensions || {};
|
||||
const fileNameWithoutExt = path.parse(file.name).name;
|
||||
const fileData = {
|
||||
file_id: fileNameWithoutExt,
|
||||
name: fileContentsParsed?.name || fileNameWithoutExt,
|
||||
extensions: _.isObjectLike(fileExtensions) ? fileExtensions : {},
|
||||
};
|
||||
data.push(fileData);
|
||||
} catch (err) {
|
||||
console.warn(`Error reading or parsing World Info file ${file.name}:`, err);
|
||||
}
|
||||
}
|
||||
|
||||
return response.send(data);
|
||||
} catch (err) {
|
||||
console.error('Error reading World Info directory:', err);
|
||||
return response.sendStatus(500);
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/get', (request, response) => {
|
||||
if (!request.body?.name) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const file = readWorldInfoFile(request.user.directories, request.body.name, true);
|
||||
|
||||
return response.send(file);
|
||||
});
|
||||
|
||||
router.post('/delete', (request, response) => {
|
||||
if (!request.body?.name) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
const worldInfoName = request.body.name;
|
||||
const filename = sanitize(`${worldInfoName}.json`);
|
||||
const pathToWorldInfo = path.join(request.user.directories.worlds, filename);
|
||||
|
||||
if (!fs.existsSync(pathToWorldInfo)) {
|
||||
throw new Error(`World info file ${filename} doesn't exist.`);
|
||||
}
|
||||
|
||||
fs.unlinkSync(pathToWorldInfo);
|
||||
|
||||
return response.sendStatus(200);
|
||||
});
|
||||
|
||||
router.post('/import', (request, response) => {
|
||||
if (!request.file) return response.sendStatus(400);
|
||||
|
||||
const filename = `${path.parse(sanitize(request.file.originalname)).name}.json`;
|
||||
|
||||
let fileContents = null;
|
||||
|
||||
if (request.body.convertedData) {
|
||||
fileContents = request.body.convertedData;
|
||||
} else {
|
||||
const pathToUpload = path.join(request.file.destination, request.file.filename);
|
||||
fileContents = fs.readFileSync(pathToUpload, 'utf8');
|
||||
fs.unlinkSync(pathToUpload);
|
||||
}
|
||||
|
||||
try {
|
||||
const worldContent = JSON.parse(fileContents);
|
||||
if (!('entries' in worldContent)) {
|
||||
throw new Error('File must contain a world info entries list');
|
||||
}
|
||||
} catch (err) {
|
||||
return response.status(400).send('Is not a valid world info file');
|
||||
}
|
||||
|
||||
const pathToNewFile = path.join(request.user.directories.worlds, filename);
|
||||
const worldName = path.parse(pathToNewFile).name;
|
||||
|
||||
if (!worldName) {
|
||||
return response.status(400).send('World file must have a name');
|
||||
}
|
||||
|
||||
writeFileAtomicSync(pathToNewFile, fileContents);
|
||||
return response.send({ name: worldName });
|
||||
});
|
||||
|
||||
router.post('/edit', (request, response) => {
|
||||
if (!request.body) {
|
||||
return response.sendStatus(400);
|
||||
}
|
||||
|
||||
if (!request.body.name) {
|
||||
return response.status(400).send('World file must have a name');
|
||||
}
|
||||
|
||||
try {
|
||||
if (!('entries' in request.body.data)) {
|
||||
throw new Error('World info must contain an entries list');
|
||||
}
|
||||
} catch (err) {
|
||||
return response.status(400).send('Is not a valid world info file');
|
||||
}
|
||||
|
||||
const filename = sanitize(`${request.body.name}.json`);
|
||||
const pathToFile = path.join(request.user.directories.worlds, filename);
|
||||
|
||||
writeFileAtomicSync(pathToFile, JSON.stringify(request.body.data, null, 4));
|
||||
|
||||
return response.send({ ok: true });
|
||||
});
|
||||
52
web-app/src/express-common.js
Normal file
52
web-app/src/express-common.js
Normal file
@@ -0,0 +1,52 @@
|
||||
import ipaddr from 'ipaddr.js';
|
||||
|
||||
const noopMiddleware = (_req, _res, next) => next();
|
||||
/** @deprecated Do not use. A global middleware is provided at the application level. */
|
||||
export const jsonParser = noopMiddleware;
|
||||
/** @deprecated Do not use. A global middleware is provided at the application level. */
|
||||
export const urlencodedParser = noopMiddleware;
|
||||
|
||||
/**
|
||||
* Gets the IP address of the client from the request object.
|
||||
* @param {import('express').Request} req Request object
|
||||
* @returns {string} IP address of the client
|
||||
*/
|
||||
export function getIpFromRequest(req) {
|
||||
let clientIp = req.socket.remoteAddress;
|
||||
if (!clientIp) {
|
||||
return 'unknown';
|
||||
}
|
||||
let ip = ipaddr.parse(clientIp);
|
||||
// Check if the IP address is IPv4-mapped IPv6 address
|
||||
if (ip.kind() === 'ipv6' && ip instanceof ipaddr.IPv6 && ip.isIPv4MappedAddress()) {
|
||||
const ipv4 = ip.toIPv4Address().toString();
|
||||
clientIp = ipv4;
|
||||
} else {
|
||||
clientIp = ip.toString();
|
||||
}
|
||||
return clientIp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the IP address of the client when behind reverse proxy using x-real-ip header, falls back to socket remote address.
|
||||
* This function should be used when the application is running behind a reverse proxy (e.g., Nginx, traefik, Caddy...).
|
||||
* @param {import('express').Request} req Request object
|
||||
* @returns {string} IP address of the client
|
||||
*/
|
||||
export function getRealIpFromHeader(req) {
|
||||
if (req.headers['x-real-ip']) {
|
||||
return req.headers['x-real-ip'].toString();
|
||||
}
|
||||
|
||||
return getIpFromRequest(req);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the request is coming from a Firefox browser.
|
||||
* @param {import('express').Request} req Request object
|
||||
* @returns {boolean} True if the request is from Firefox, false otherwise.
|
||||
*/
|
||||
export function isFirefox(req) {
|
||||
const userAgent = req.headers['user-agent'] || '';
|
||||
return /firefox/i.test(userAgent);
|
||||
}
|
||||
40
web-app/src/fetch-patch.js
Normal file
40
web-app/src/fetch-patch.js
Normal file
@@ -0,0 +1,40 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import mime from 'mime-types';
|
||||
import { serverDirectory } from './server-directory.js';
|
||||
import { getRequestURL, isFileURL, isPathUnderParent } from './util.js';
|
||||
|
||||
const originalFetch = globalThis.fetch;
|
||||
|
||||
const ALLOWED_EXTENSIONS = [
|
||||
'.wasm',
|
||||
];
|
||||
|
||||
// Patched fetch function that handles file URLs
|
||||
globalThis.fetch = async (/** @type {string | URL | Request} */ request, /** @type {RequestInit | undefined} */ options) => {
|
||||
if (!isFileURL(request)) {
|
||||
return originalFetch(request, options);
|
||||
}
|
||||
const url = getRequestURL(request);
|
||||
const filePath = path.resolve(fileURLToPath(url));
|
||||
const isUnderServerDirectory = isPathUnderParent(serverDirectory, filePath);
|
||||
if (!isUnderServerDirectory) {
|
||||
throw new Error('Requested file path is outside of the server directory.');
|
||||
}
|
||||
const parsedPath = path.parse(filePath);
|
||||
if (!ALLOWED_EXTENSIONS.includes(parsedPath.ext)) {
|
||||
throw new Error('Unsupported file extension.');
|
||||
}
|
||||
const fileName = parsedPath.base;
|
||||
const buffer = await fs.promises.readFile(filePath);
|
||||
const response = new Response(buffer, {
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
headers: {
|
||||
'Content-Type': mime.lookup(fileName) || 'application/octet-stream',
|
||||
'Content-Length': buffer.length.toString(),
|
||||
},
|
||||
});
|
||||
return response;
|
||||
};
|
||||
63
web-app/src/jimp.js
Normal file
63
web-app/src/jimp.js
Normal file
@@ -0,0 +1,63 @@
|
||||
import { createJimp } from '@jimp/core';
|
||||
|
||||
// Optimized image formats
|
||||
import webp from '@jimp/wasm-webp';
|
||||
import png from '@jimp/wasm-png';
|
||||
import jpeg from '@jimp/wasm-jpeg';
|
||||
import avif from '@jimp/wasm-avif';
|
||||
|
||||
// Other image formats
|
||||
import bmp, { msBmp } from '@jimp/js-bmp';
|
||||
import gif from '@jimp/js-gif';
|
||||
import tiff from '@jimp/js-tiff';
|
||||
|
||||
// Plugins
|
||||
import * as blit from '@jimp/plugin-blit';
|
||||
import * as circle from '@jimp/plugin-circle';
|
||||
import * as color from '@jimp/plugin-color';
|
||||
import * as contain from '@jimp/plugin-contain';
|
||||
import * as cover from '@jimp/plugin-cover';
|
||||
import * as crop from '@jimp/plugin-crop';
|
||||
import * as displace from '@jimp/plugin-displace';
|
||||
import * as fisheye from '@jimp/plugin-fisheye';
|
||||
import * as flip from '@jimp/plugin-flip';
|
||||
import * as mask from '@jimp/plugin-mask';
|
||||
import * as resize from '@jimp/plugin-resize';
|
||||
import * as rotate from '@jimp/plugin-rotate';
|
||||
import * as threshold from '@jimp/plugin-threshold';
|
||||
import * as quantize from '@jimp/plugin-quantize';
|
||||
|
||||
const defaultPlugins = [
|
||||
blit.methods,
|
||||
circle.methods,
|
||||
color.methods,
|
||||
contain.methods,
|
||||
cover.methods,
|
||||
crop.methods,
|
||||
displace.methods,
|
||||
fisheye.methods,
|
||||
flip.methods,
|
||||
mask.methods,
|
||||
resize.methods,
|
||||
rotate.methods,
|
||||
threshold.methods,
|
||||
quantize.methods,
|
||||
];
|
||||
|
||||
// A custom jimp that uses WASM for optimized formats and JS for the rest
|
||||
const Jimp = createJimp({
|
||||
formats: [webp, png, jpeg, avif, bmp, msBmp, gif, tiff],
|
||||
plugins: [...defaultPlugins],
|
||||
});
|
||||
|
||||
const JimpMime = {
|
||||
bmp: bmp().mime,
|
||||
gif: gif().mime,
|
||||
jpeg: jpeg().mime,
|
||||
png: png().mime,
|
||||
tiff: tiff().mime,
|
||||
};
|
||||
|
||||
export default Jimp;
|
||||
|
||||
export { Jimp, JimpMime };
|
||||
59
web-app/src/middleware/accessLogWriter.js
Normal file
59
web-app/src/middleware/accessLogWriter.js
Normal file
@@ -0,0 +1,59 @@
|
||||
import path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
import { getRealIpFromHeader } from '../express-common.js';
|
||||
import { color, getConfigValue } from '../util.js';
|
||||
|
||||
const enableAccessLog = getConfigValue('logging.enableAccessLog', true, 'boolean');
|
||||
|
||||
const knownIPs = new Set();
|
||||
|
||||
export const getAccessLogPath = () => path.join(globalThis.DATA_ROOT, 'access.log');
|
||||
|
||||
export function migrateAccessLog() {
|
||||
try {
|
||||
if (!fs.existsSync('access.log')) {
|
||||
return;
|
||||
}
|
||||
const logPath = getAccessLogPath();
|
||||
if (fs.existsSync(logPath)) {
|
||||
return;
|
||||
}
|
||||
fs.renameSync('access.log', logPath);
|
||||
console.log(color.yellow('Migrated access.log to new location:'), logPath);
|
||||
} catch (e) {
|
||||
console.error('Failed to migrate access log:', e);
|
||||
console.info('Please move access.log to the data directory manually.');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates middleware for logging access and new connections
|
||||
* @returns {import('express').RequestHandler}
|
||||
*/
|
||||
export default function accessLoggerMiddleware() {
|
||||
return function (req, res, next) {
|
||||
const clientIp = getRealIpFromHeader(req);
|
||||
const userAgent = req.headers['user-agent'];
|
||||
|
||||
if (!knownIPs.has(clientIp)) {
|
||||
// Log new connection
|
||||
knownIPs.add(clientIp);
|
||||
|
||||
// Write to access log if enabled
|
||||
if (enableAccessLog) {
|
||||
console.info(color.yellow(`New connection from ${clientIp}; User Agent: ${userAgent}\n`));
|
||||
const logPath = getAccessLogPath();
|
||||
const timestamp = new Date().toISOString();
|
||||
const log = `${timestamp} ${clientIp} ${userAgent}\n`;
|
||||
|
||||
fs.appendFile(logPath, log, (err) => {
|
||||
if (err) {
|
||||
console.error('Failed to write access log:', err);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
55
web-app/src/middleware/basicAuth.js
Normal file
55
web-app/src/middleware/basicAuth.js
Normal file
@@ -0,0 +1,55 @@
|
||||
/**
|
||||
* When applied, this middleware will ensure the request contains the required header for basic authentication and only
|
||||
* allow access to the endpoint after successful authentication.
|
||||
*/
|
||||
import { Buffer } from 'node:buffer';
|
||||
import storage from 'node-persist';
|
||||
import { getAllUserHandles, toKey, getPasswordHash } from '../users.js';
|
||||
import { getConfigValue, safeReadFileSync } from '../util.js';
|
||||
|
||||
const PER_USER_BASIC_AUTH = getConfigValue('perUserBasicAuth', false, 'boolean');
|
||||
const ENABLE_ACCOUNTS = getConfigValue('enableUserAccounts', false, 'boolean');
|
||||
|
||||
const basicAuthMiddleware = async function (request, response, callback) {
|
||||
const unauthorizedWebpage = safeReadFileSync('./public/error/unauthorized.html') ?? '';
|
||||
const unauthorizedResponse = (res) => {
|
||||
res.set('WWW-Authenticate', 'Basic realm="SillyTavern", charset="UTF-8"');
|
||||
return res.status(401).send(unauthorizedWebpage);
|
||||
};
|
||||
|
||||
const basicAuthUserName = getConfigValue('basicAuthUser.username');
|
||||
const basicAuthUserPassword = getConfigValue('basicAuthUser.password');
|
||||
const authHeader = request.headers.authorization;
|
||||
|
||||
if (!authHeader) {
|
||||
return unauthorizedResponse(response);
|
||||
}
|
||||
|
||||
const [scheme, credentials] = authHeader.split(' ');
|
||||
|
||||
if (scheme !== 'Basic' || !credentials) {
|
||||
return unauthorizedResponse(response);
|
||||
}
|
||||
|
||||
const usePerUserAuth = PER_USER_BASIC_AUTH && ENABLE_ACCOUNTS;
|
||||
const [username, password] = Buffer.from(credentials, 'base64')
|
||||
.toString('utf8')
|
||||
.split(':');
|
||||
|
||||
if (!usePerUserAuth && username === basicAuthUserName && password === basicAuthUserPassword) {
|
||||
return callback();
|
||||
} else if (usePerUserAuth) {
|
||||
const userHandles = await getAllUserHandles();
|
||||
for (const userHandle of userHandles) {
|
||||
if (username === userHandle) {
|
||||
const user = await storage.getItem(toKey(userHandle));
|
||||
if (user && user.enabled && (user.password && user.password === getPasswordHash(password, user.salt))) {
|
||||
return callback();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return unauthorizedResponse(response);
|
||||
};
|
||||
|
||||
export default basicAuthMiddleware;
|
||||
110
web-app/src/middleware/cacheBuster.js
Normal file
110
web-app/src/middleware/cacheBuster.js
Normal file
@@ -0,0 +1,110 @@
|
||||
import crypto from 'node:crypto';
|
||||
import { DEFAULT_USER } from '../constants.js';
|
||||
import { getConfigValue } from '../util.js';
|
||||
|
||||
/**
|
||||
* Sets the Clear-Site-Data header to bust the browser cache.
|
||||
*/
|
||||
class CacheBuster {
|
||||
/**
|
||||
* Handles/User-Agents that have already been busted.
|
||||
* @type {Set<string>}
|
||||
*/
|
||||
#keys = new Set();
|
||||
|
||||
/**
|
||||
* User agent regex to match against requests.
|
||||
* @type {RegExp | null}
|
||||
*/
|
||||
#userAgentRegex = null;
|
||||
|
||||
/**
|
||||
* Whether the cache buster is enabled.
|
||||
* @type {boolean | null}
|
||||
*/
|
||||
#isEnabled = null;
|
||||
|
||||
constructor() {
|
||||
this.#isEnabled = !!getConfigValue('cacheBuster.enabled', false, 'boolean');
|
||||
const userAgentPattern = getConfigValue('cacheBuster.userAgentPattern', '');
|
||||
if (userAgentPattern) {
|
||||
try {
|
||||
this.#userAgentRegex = new RegExp(userAgentPattern, 'i');
|
||||
} catch {
|
||||
console.error('[Cache Buster] Invalid user agent pattern:', userAgentPattern);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the cache should be busted for the given request.
|
||||
* @param {import('express').Request} request Express request object.
|
||||
* @param {import('express').Response} response Express response object.
|
||||
* @returns {boolean} Whether the cache should be busted.
|
||||
*/
|
||||
shouldBust(request, response) {
|
||||
// If disabled with config, don't do anything
|
||||
if (!this.#isEnabled) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If response headers are already sent or response is ended
|
||||
if (response.headersSent || response.writableEnded) {
|
||||
console.warn('[Cache Buster] Response ended or headers already sent');
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if the user agent matches the configured pattern
|
||||
const userAgent = request.headers['user-agent'] || '';
|
||||
|
||||
// Bust cache for all requests if no pattern is set
|
||||
if (!this.#userAgentRegex) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return this.#userAgentRegex.test(userAgent);
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware to bust the browser cache for the current user.
|
||||
* @type {import('express').RequestHandler}
|
||||
*/
|
||||
#middleware(request, response, next) {
|
||||
const handle = request.user?.profile?.handle || DEFAULT_USER.handle;
|
||||
const userAgent = request.headers['user-agent'] || '';
|
||||
const hash = crypto.createHash('sha256').update(userAgent).digest('hex');
|
||||
const key = `${handle}-${hash}`;
|
||||
|
||||
if (this.#keys.has(key)) {
|
||||
return next();
|
||||
}
|
||||
|
||||
this.#keys.add(key);
|
||||
this.bust(request, response);
|
||||
next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware to bust the browser cache for the current user.
|
||||
* @returns {import('express').RequestHandler} The middleware function.
|
||||
*/
|
||||
get middleware() {
|
||||
return this.#middleware.bind(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Bust the cache for the given response.
|
||||
* @param {import('express').Request} request Express request object.
|
||||
* @param {import('express').Response} response Express response object.
|
||||
* @returns {void}
|
||||
*/
|
||||
bust(request, response) {
|
||||
if (this.shouldBust(request, response)) {
|
||||
response.setHeader('Clear-Site-Data', '"cache"');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Export a single instance for the entire application
|
||||
const instance = new CacheBuster();
|
||||
export default instance;
|
||||
42
web-app/src/middleware/corsProxy.js
Normal file
42
web-app/src/middleware/corsProxy.js
Normal file
@@ -0,0 +1,42 @@
|
||||
import fetch from 'node-fetch';
|
||||
import { forwardFetchResponse } from '../util.js';
|
||||
|
||||
/**
|
||||
* Middleware to proxy requests to a different domain
|
||||
* @param {import('express').Request} req Express request object
|
||||
* @param {import('express').Response} res Express response object
|
||||
*/
|
||||
export default async function corsProxyMiddleware(req, res) {
|
||||
const url = req.params.url; // get the url from the request path
|
||||
|
||||
// Disallow circular requests
|
||||
const serverUrl = req.protocol + '://' + req.get('host');
|
||||
if (url.startsWith(serverUrl)) {
|
||||
return res.status(400).send('Circular requests are not allowed');
|
||||
}
|
||||
|
||||
try {
|
||||
const headers = JSON.parse(JSON.stringify(req.headers));
|
||||
const headersToRemove = [
|
||||
'x-csrf-token', 'host', 'referer', 'origin', 'cookie',
|
||||
'x-forwarded-for', 'x-forwarded-protocol', 'x-forwarded-proto',
|
||||
'x-forwarded-host', 'x-real-ip', 'sec-fetch-mode',
|
||||
'sec-fetch-site', 'sec-fetch-dest',
|
||||
];
|
||||
|
||||
headersToRemove.forEach(header => delete headers[header]);
|
||||
|
||||
const bodyMethods = ['POST', 'PUT', 'PATCH'];
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: req.method,
|
||||
headers: headers,
|
||||
body: bodyMethods.includes(req.method) ? JSON.stringify(req.body) : undefined,
|
||||
});
|
||||
|
||||
// Copy over relevant response params to the proxy response
|
||||
forwardFetchResponse(response, res);
|
||||
} catch (error) {
|
||||
res.status(500).send('Error occurred while trying to proxy to: ' + url + ' ' + error);
|
||||
}
|
||||
}
|
||||
48
web-app/src/middleware/hostWhitelist.js
Normal file
48
web-app/src/middleware/hostWhitelist.js
Normal file
@@ -0,0 +1,48 @@
|
||||
import path from 'node:path';
|
||||
import { color, getConfigValue, safeReadFileSync } from '../util.js';
|
||||
import { serverDirectory } from '../server-directory.js';
|
||||
import { isHostAllowed, hostValidationMiddleware } from 'host-validation-middleware';
|
||||
|
||||
const knownHosts = new Set();
|
||||
const maxKnownHosts = 1000;
|
||||
|
||||
const hostWhitelistEnabled = !!getConfigValue('hostWhitelist.enabled', false);
|
||||
const hostWhitelist = Object.freeze(getConfigValue('hostWhitelist.hosts', []));
|
||||
const hostWhitelistScan = !!getConfigValue('hostWhitelist.scan', false, 'boolean');
|
||||
|
||||
const hostNotAllowedHtml = safeReadFileSync(path.join(serverDirectory, 'public/error/host-not-allowed.html'))?.toString() ?? '';
|
||||
|
||||
const validationMiddleware = hostValidationMiddleware({
|
||||
allowedHosts: hostWhitelist,
|
||||
generateErrorMessage: () => hostNotAllowedHtml,
|
||||
errorResponseContentType: 'text/html',
|
||||
});
|
||||
|
||||
/**
|
||||
* Middleware to validate remote hosts.
|
||||
* Useful to protect against DNS rebinding attacks.
|
||||
* @param {import('express').Request} req Request
|
||||
* @param {import('express').Response} res Response
|
||||
* @param {import('express').NextFunction} next Next middleware
|
||||
*/
|
||||
export default function hostWhitelistMiddleware(req, res, next) {
|
||||
const hostValue = req.headers.host;
|
||||
if (hostWhitelistScan && !isHostAllowed(hostValue, hostWhitelist) && !knownHosts.has(hostValue) && knownHosts.size < maxKnownHosts) {
|
||||
const isFirstWarning = knownHosts.size === 0;
|
||||
console.warn(color.red('Request from untrusted host:'), hostValue);
|
||||
console.warn(`If you trust this host, you can add it to ${color.yellow('hostWhitelist.hosts')} in config.yaml`);
|
||||
if (!hostWhitelistEnabled && isFirstWarning) {
|
||||
console.warn(`To protect against host spoofing, consider setting ${color.yellow('hostWhitelist.enabled')} to true`);
|
||||
}
|
||||
if (isFirstWarning) {
|
||||
console.warn(`To disable this warning, set ${color.yellow('hostWhitelist.scan')} to false`);
|
||||
}
|
||||
knownHosts.add(hostValue);
|
||||
}
|
||||
|
||||
if (!hostWhitelistEnabled) {
|
||||
return next();
|
||||
}
|
||||
|
||||
return validationMiddleware(req, res, next);
|
||||
}
|
||||
30
web-app/src/middleware/multerMonkeyPatch.js
Normal file
30
web-app/src/middleware/multerMonkeyPatch.js
Normal file
@@ -0,0 +1,30 @@
|
||||
import { Buffer } from 'node:buffer';
|
||||
|
||||
/**
|
||||
* Decodes a file name from Latin1 to UTF-8.
|
||||
* @param {string} str Input string
|
||||
* @returns {string} Decoded file name
|
||||
*/
|
||||
function decodeFileName(str) {
|
||||
return Buffer.from(str, 'latin1').toString('utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware to decode file names from Latin1 to UTF-8.
|
||||
* See: https://github.com/expressjs/multer/issues/1104
|
||||
* @param {import('express').Request} req Request
|
||||
* @param {import('express').Response} _res Response
|
||||
* @param {import('express').NextFunction} next Next middleware
|
||||
*/
|
||||
export default function multerMonkeyPatch(req, _res, next) {
|
||||
try {
|
||||
if (req.file) {
|
||||
req.file.originalname = decodeFileName(req.file.originalname);
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
console.error('Error in multerMonkeyPatch:', error);
|
||||
next();
|
||||
}
|
||||
}
|
||||
43
web-app/src/middleware/validateFileName.js
Normal file
43
web-app/src/middleware/validateFileName.js
Normal file
@@ -0,0 +1,43 @@
|
||||
import path from 'node:path';
|
||||
|
||||
/**
|
||||
* Checks if an object has a toString method.
|
||||
* @param {object} o Object to check
|
||||
* @returns {boolean} True if the object has a toString method, false otherwise
|
||||
*/
|
||||
function hasToString(o) {
|
||||
return o != null && typeof o.toString === 'function';
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a middleware function that validates the field in the request body.
|
||||
* @param {string} fieldName Field name
|
||||
* @returns {import('express').RequestHandler} Middleware function
|
||||
*/
|
||||
export function getFileNameValidationFunction(fieldName) {
|
||||
/**
|
||||
* Validates the field in the request body.
|
||||
* @param {import('express').Request} req Request object
|
||||
* @param {import('express').Response} res Response object
|
||||
* @param {import('express').NextFunction} next Next middleware
|
||||
*/
|
||||
return function validateAvatarUrlMiddleware(req, res, next) {
|
||||
if (req.body && fieldName in req.body && (typeof req.body[fieldName] === 'string' || hasToString(req.body[fieldName]))) {
|
||||
const forbiddenRegExp = path.sep === '/' ? /[/\x00]/ : /[/\x00\\]/;
|
||||
if (forbiddenRegExp.test(req.body[fieldName])) {
|
||||
console.error('An error occurred while validating the request body', {
|
||||
handle: req.user.profile.handle,
|
||||
path: req.originalUrl,
|
||||
field: fieldName,
|
||||
value: req.body[fieldName],
|
||||
});
|
||||
return res.sendStatus(400);
|
||||
}
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
const avatarUrlValidationFunction = getFileNameValidationFunction('avatar_url');
|
||||
export default avatarUrlValidationFunction;
|
||||
53
web-app/src/middleware/webpack-serve.js
Normal file
53
web-app/src/middleware/webpack-serve.js
Normal file
@@ -0,0 +1,53 @@
|
||||
import path from 'node:path';
|
||||
import webpack from 'webpack';
|
||||
import getPublicLibConfig from '../../webpack.config.js';
|
||||
|
||||
export default function getWebpackServeMiddleware() {
|
||||
/**
|
||||
* A very spartan recreation of webpack-dev-middleware.
|
||||
* @param {import('express').Request} req Request object.
|
||||
* @param {import('express').Response} res Response object.
|
||||
* @param {import('express').NextFunction} next Next function.
|
||||
* @type {import('express').RequestHandler}
|
||||
*/
|
||||
function devMiddleware(req, res, next) {
|
||||
const publicLibConfig = getPublicLibConfig();
|
||||
const outputPath = publicLibConfig.output?.path;
|
||||
const outputFile = publicLibConfig.output?.filename;
|
||||
const parsedPath = path.parse(req.path);
|
||||
|
||||
if (req.method === 'GET' && parsedPath.dir === '/' && parsedPath.base === outputFile) {
|
||||
return res.sendFile(outputFile, { root: outputPath });
|
||||
}
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait until Webpack is done compiling.
|
||||
* @param {object} param Parameters.
|
||||
* @param {boolean} [param.forceDist] Whether to force the use the /dist folder.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
devMiddleware.runWebpackCompiler = ({ forceDist = false } = {}) => {
|
||||
const publicLibConfig = getPublicLibConfig(forceDist);
|
||||
const compiler = webpack(publicLibConfig);
|
||||
|
||||
return new Promise((resolve) => {
|
||||
console.log();
|
||||
console.log('Compiling frontend libraries...');
|
||||
compiler.run((_error, stats) => {
|
||||
const output = stats?.toString(publicLibConfig.stats);
|
||||
if (output) {
|
||||
console.log(output);
|
||||
console.log();
|
||||
}
|
||||
compiler.close(() => {
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
return devMiddleware;
|
||||
}
|
||||
148
web-app/src/middleware/whitelist.js
Normal file
148
web-app/src/middleware/whitelist.js
Normal file
@@ -0,0 +1,148 @@
|
||||
import path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
import process from 'node:process';
|
||||
import dns from 'node:dns';
|
||||
import Handlebars from 'handlebars';
|
||||
import ipMatching from 'ip-matching';
|
||||
import isDocker from 'is-docker';
|
||||
|
||||
import { getIpFromRequest } from '../express-common.js';
|
||||
import { color, getConfigValue, safeReadFileSync } from '../util.js';
|
||||
|
||||
const whitelistPath = path.join(process.cwd(), './whitelist.txt');
|
||||
const enableForwardedWhitelist = !!getConfigValue('enableForwardedWhitelist', false, 'boolean');
|
||||
const whitelistDockerHosts = !!getConfigValue('whitelistDockerHosts', true, 'boolean');
|
||||
/** @type {string[]} */
|
||||
let whitelist = getConfigValue('whitelist', []);
|
||||
|
||||
if (fs.existsSync(whitelistPath)) {
|
||||
try {
|
||||
let whitelistTxt = fs.readFileSync(whitelistPath, 'utf-8');
|
||||
whitelist = whitelistTxt.split('\n').filter(ip => ip).map(ip => ip.trim());
|
||||
} catch (e) {
|
||||
// Ignore errors that may occur when reading the whitelist (e.g. permissions)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates and filters the whitelist, removing any invalid entries.
|
||||
* @param {string[]} entries - The whitelist entries to validate
|
||||
* @returns {string[]} The filtered list of valid whitelist entries
|
||||
*/
|
||||
function validateWhitelist(entries) {
|
||||
const validEntries = [];
|
||||
|
||||
for (const entry of entries) {
|
||||
try {
|
||||
// This will throw if the entry is not a valid IP or CIDR
|
||||
ipMatching.getMatch(entry);
|
||||
validEntries.push(entry);
|
||||
} catch (e) {
|
||||
console.warn(`Whitelist ${color.red('Warning')}: Ignoring invalid entry ${color.yellow(entry)} - ${e.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
return validEntries;
|
||||
}
|
||||
|
||||
whitelist = validateWhitelist(whitelist);
|
||||
|
||||
/**
|
||||
* Get the client IP address from the request headers.
|
||||
* @param {import('express').Request} req Express request object
|
||||
* @returns {string|undefined} The client IP address
|
||||
*/
|
||||
function getForwardedIp(req) {
|
||||
if (!enableForwardedWhitelist) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Check if X-Real-IP is available
|
||||
if (req.headers['x-real-ip']) {
|
||||
return req.headers['x-real-ip'].toString();
|
||||
}
|
||||
|
||||
// Check for X-Forwarded-For and parse if available
|
||||
if (req.headers['x-forwarded-for']) {
|
||||
const ipList = req.headers['x-forwarded-for'].toString().split(',').map(ip => ip.trim());
|
||||
return ipList[0];
|
||||
}
|
||||
|
||||
// If none of the headers are available, return undefined
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the IP addresses of Docker hostnames and adds them to the whitelist.
|
||||
* @returns {Promise<void>} Promise that resolves when the Docker hostnames are resolved
|
||||
*/
|
||||
async function addDockerHostsToWhitelist() {
|
||||
if (!whitelistDockerHosts || !isDocker()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const whitelistHosts = ['host.docker.internal', 'gateway.docker.internal'];
|
||||
|
||||
for (const entry of whitelistHosts) {
|
||||
try {
|
||||
const result = await dns.promises.lookup(entry);
|
||||
console.info(`Resolved whitelist hostname ${color.green(entry)} to IPv${result.family} address ${color.green(result.address)}`);
|
||||
whitelist.push(result.address);
|
||||
} catch (e) {
|
||||
console.warn(`Failed to resolve whitelist hostname ${color.red(entry)}: ${e.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a middleware function that checks if the client IP is in the whitelist.
|
||||
* @returns {Promise<import('express').RequestHandler>} Promise that resolves to the middleware function
|
||||
*/
|
||||
export default async function getWhitelistMiddleware() {
|
||||
const forbiddenWebpage = Handlebars.compile(
|
||||
safeReadFileSync('./public/error/forbidden-by-whitelist.html') ?? '',
|
||||
);
|
||||
|
||||
const noLogPaths = [
|
||||
'/favicon.ico',
|
||||
];
|
||||
|
||||
await addDockerHostsToWhitelist();
|
||||
|
||||
return function (req, res, next) {
|
||||
const clientIp = getIpFromRequest(req);
|
||||
const forwardedIp = getForwardedIp(req);
|
||||
const userAgent = req.headers['user-agent'];
|
||||
|
||||
/**
|
||||
* Checks if an IP address matches any entry in the whitelist.
|
||||
* @param {string[]} whitelist - The list of whitelisted IPs/CIDRs
|
||||
* @param {string} ip - The IP address to check
|
||||
* @returns {boolean} True if the IP matches any whitelist entry
|
||||
*/
|
||||
function isIPInWhitelist(whitelist, ip) {
|
||||
return whitelist.some(x => ipMatching.matches(ip, ipMatching.getMatch(x)));
|
||||
}
|
||||
|
||||
//clientIp = req.connection.remoteAddress.split(':').pop();
|
||||
if (!isIPInWhitelist(whitelist, clientIp)
|
||||
|| forwardedIp && !isIPInWhitelist(whitelist, forwardedIp)
|
||||
) {
|
||||
// Log the connection attempt with real IP address
|
||||
const ipDetails = forwardedIp
|
||||
? `${clientIp} (forwarded from ${forwardedIp})`
|
||||
: clientIp;
|
||||
|
||||
if (!noLogPaths.includes(req.path)) {
|
||||
console.warn(
|
||||
color.red(
|
||||
`Blocked connection from ${ipDetails}; User Agent: ${userAgent}\n\tTo allow this connection, add its IP address to the whitelist or disable whitelist mode by editing config.yaml in the root directory of your SillyTavern installation.\n`,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
return res.status(403).send(forbiddenWebpage({ ipDetails }));
|
||||
}
|
||||
next();
|
||||
};
|
||||
}
|
||||
293
web-app/src/plugin-loader.js
Normal file
293
web-app/src/plugin-loader.js
Normal file
@@ -0,0 +1,293 @@
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import url from 'node:url';
|
||||
|
||||
import express from 'express';
|
||||
import { default as git, CheckRepoActions } from 'simple-git';
|
||||
import { sync as commandExistsSync } from 'command-exists';
|
||||
import { getConfigValue, color } from './util.js';
|
||||
|
||||
const enableServerPlugins = !!getConfigValue('enableServerPlugins', false, 'boolean');
|
||||
const enableServerPluginsAutoUpdate = !!getConfigValue('enableServerPluginsAutoUpdate', true, 'boolean');
|
||||
|
||||
/**
|
||||
* Map of loaded plugins.
|
||||
* @type {Map<string, any>}
|
||||
*/
|
||||
const loadedPlugins = new Map();
|
||||
|
||||
/**
|
||||
* Determine if a file is a CommonJS module.
|
||||
* @param {string} file Path to file
|
||||
* @returns {boolean} True if file is a CommonJS module
|
||||
*/
|
||||
const isCommonJS = (file) => path.extname(file) === '.js' || path.extname(file) === '.cjs';
|
||||
|
||||
/**
|
||||
* Determine if a file is an ECMAScript module.
|
||||
* @param {string} file Path to file
|
||||
* @returns {boolean} True if file is an ECMAScript module
|
||||
*/
|
||||
const isESModule = (file) => path.extname(file) === '.mjs';
|
||||
|
||||
/**
|
||||
* Load and initialize server plugins from a directory if they are enabled.
|
||||
* @param {import('express').Express} app Express app
|
||||
* @param {string} pluginsPath Path to plugins directory
|
||||
* @returns {Promise<Function>} Promise that resolves when all plugins are loaded. Resolves to a "cleanup" function to
|
||||
* be called before the server shuts down.
|
||||
*/
|
||||
export async function loadPlugins(app, pluginsPath) {
|
||||
try {
|
||||
const exitHooks = [];
|
||||
const emptyFn = () => { };
|
||||
|
||||
// Server plugins are disabled.
|
||||
if (!enableServerPlugins) {
|
||||
return emptyFn;
|
||||
}
|
||||
|
||||
// Plugins directory does not exist.
|
||||
if (!fs.existsSync(pluginsPath)) {
|
||||
return emptyFn;
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(pluginsPath);
|
||||
|
||||
// No plugins to load.
|
||||
if (files.length === 0) {
|
||||
return emptyFn;
|
||||
}
|
||||
|
||||
await updatePlugins(pluginsPath);
|
||||
|
||||
for (const file of files) {
|
||||
const pluginFilePath = path.join(pluginsPath, file);
|
||||
|
||||
if (fs.statSync(pluginFilePath).isDirectory()) {
|
||||
await loadFromDirectory(app, pluginFilePath, exitHooks);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Not a JavaScript file.
|
||||
if (!isCommonJS(file) && !isESModule(file)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
await loadFromFile(app, pluginFilePath, exitHooks);
|
||||
}
|
||||
|
||||
if (loadedPlugins.size > 0) {
|
||||
console.log(`${loadedPlugins.size} server plugin(s) are currently loaded. Make sure you know exactly what they do, and only install plugins from trusted sources!`);
|
||||
}
|
||||
|
||||
// Call all plugin "exit" functions at once and wait for them to finish
|
||||
return () => Promise.all(exitHooks.map(exitFn => exitFn()));
|
||||
} catch (error) {
|
||||
console.error('Plugin loading failed.', error);
|
||||
return () => { };
|
||||
}
|
||||
}
|
||||
|
||||
async function loadFromDirectory(app, pluginDirectoryPath, exitHooks) {
|
||||
const files = fs.readdirSync(pluginDirectoryPath);
|
||||
|
||||
// No plugins to load.
|
||||
if (files.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Plugin is an npm package.
|
||||
const packageJsonFilePath = path.join(pluginDirectoryPath, 'package.json');
|
||||
if (fs.existsSync(packageJsonFilePath)) {
|
||||
if (await loadFromPackage(app, packageJsonFilePath, exitHooks)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Plugin is a module file.
|
||||
const fileTypes = ['index.js', 'index.cjs', 'index.mjs'];
|
||||
|
||||
for (const fileType of fileTypes) {
|
||||
const filePath = path.join(pluginDirectoryPath, fileType);
|
||||
if (fs.existsSync(filePath)) {
|
||||
if (await loadFromFile(app, filePath, exitHooks)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads and initializes a plugin from an npm package.
|
||||
* @param {import('express').Express} app Express app
|
||||
* @param {string} packageJsonPath Path to package.json file
|
||||
* @param {Array<Function>} exitHooks Array of functions to be run on plugin exit. Will be pushed to if the plugin has
|
||||
* an "exit" function.
|
||||
* @returns {Promise<boolean>} Promise that resolves to true if plugin was loaded successfully
|
||||
*/
|
||||
async function loadFromPackage(app, packageJsonPath, exitHooks) {
|
||||
try {
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
|
||||
if (packageJson.main) {
|
||||
const pluginFilePath = path.join(path.dirname(packageJsonPath), packageJson.main);
|
||||
return await loadFromFile(app, pluginFilePath, exitHooks);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to load plugin from ${packageJsonPath}: ${error}`);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads and initializes a plugin from a file.
|
||||
* @param {import('express').Express} app Express app
|
||||
* @param {string} pluginFilePath Path to plugin directory
|
||||
* @param {Array.<Function>} exitHooks Array of functions to be run on plugin exit. Will be pushed to if the plugin has
|
||||
* an "exit" function.
|
||||
* @returns {Promise<boolean>} Promise that resolves to true if plugin was loaded successfully
|
||||
*/
|
||||
async function loadFromFile(app, pluginFilePath, exitHooks) {
|
||||
try {
|
||||
const fileUrl = url.pathToFileURL(pluginFilePath).toString();
|
||||
const plugin = await import(fileUrl);
|
||||
console.log(`Initializing plugin from ${pluginFilePath}`);
|
||||
return await initPlugin(app, plugin, exitHooks);
|
||||
} catch (error) {
|
||||
console.error(`Failed to load plugin from ${pluginFilePath}: ${error}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether a plugin ID is valid (only lowercase alphanumeric, hyphens, and underscores).
|
||||
* @param {string} id The plugin ID to check
|
||||
* @returns {boolean} True if the plugin ID is valid.
|
||||
*/
|
||||
function isValidPluginID(id) {
|
||||
return /^[a-z0-9_-]+$/.test(id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes a plugin module.
|
||||
* @param {import('express').Express} app Express app
|
||||
* @param {any} plugin Plugin module
|
||||
* @param {Array.<Function>} exitHooks Array of functions to be run on plugin exit. Will be pushed to if the plugin has
|
||||
* an "exit" function.
|
||||
* @returns {Promise<boolean>} Promise that resolves to true if plugin was initialized successfully
|
||||
*/
|
||||
async function initPlugin(app, plugin, exitHooks) {
|
||||
const info = plugin.info || plugin.default?.info;
|
||||
if (typeof info !== 'object') {
|
||||
console.error('Failed to load plugin module; plugin info not found');
|
||||
return false;
|
||||
}
|
||||
|
||||
// We don't currently use "name" or "description" but it would be nice to have a UI for listing server plugins, so
|
||||
// require them now just to be safe
|
||||
for (const field of ['id', 'name', 'description']) {
|
||||
if (typeof info[field] !== 'string') {
|
||||
console.error(`Failed to load plugin module; plugin info missing field '${field}'`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
const init = plugin.init || plugin.default?.init;
|
||||
if (typeof init !== 'function') {
|
||||
console.error('Failed to load plugin module; no init function');
|
||||
return false;
|
||||
}
|
||||
|
||||
const { id } = info;
|
||||
|
||||
if (!isValidPluginID(id)) {
|
||||
console.error(`Failed to load plugin module; invalid plugin ID '${id}'`);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (loadedPlugins.has(id)) {
|
||||
console.error(`Failed to load plugin module; plugin ID '${id}' is already in use`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Allow the plugin to register API routes under /api/plugins/[plugin ID] via a router
|
||||
const router = express.Router();
|
||||
|
||||
await init(router);
|
||||
|
||||
loadedPlugins.set(id, plugin);
|
||||
|
||||
// Add API routes to the app if the plugin registered any
|
||||
if (router.stack.length > 0) {
|
||||
app.use(`/api/plugins/${id}`, router);
|
||||
}
|
||||
|
||||
const exit = plugin.exit || plugin.default?.exit;
|
||||
if (typeof exit === 'function') {
|
||||
exitHooks.push(exit);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Automatically update all git plugins in the ./plugins directory
|
||||
* @param {string} pluginsPath Path to plugins directory
|
||||
*/
|
||||
async function updatePlugins(pluginsPath) {
|
||||
if (!enableServerPluginsAutoUpdate) {
|
||||
return;
|
||||
}
|
||||
|
||||
const directories = fs.readdirSync(pluginsPath)
|
||||
.filter(file => !file.startsWith('.'))
|
||||
.filter(file => fs.statSync(path.join(pluginsPath, file)).isDirectory());
|
||||
|
||||
if (directories.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(color.blue('Auto-updating server plugins... Set'), color.yellow('enableServerPluginsAutoUpdate: false'), color.blue('in config.yaml to disable this feature.'));
|
||||
|
||||
if (!commandExistsSync('git')) {
|
||||
console.error(color.red('Git is not installed. Please install Git to enable auto-updating of server plugins.'));
|
||||
return;
|
||||
}
|
||||
|
||||
let pluginsToUpdate = 0;
|
||||
|
||||
for (const directory of directories) {
|
||||
try {
|
||||
const pluginPath = path.join(pluginsPath, directory);
|
||||
const pluginRepo = git(pluginPath);
|
||||
|
||||
const isRepo = await pluginRepo.checkIsRepo(CheckRepoActions.IS_REPO_ROOT);
|
||||
if (!isRepo) {
|
||||
continue;
|
||||
}
|
||||
|
||||
await pluginRepo.fetch();
|
||||
const commitHash = await pluginRepo.revparse(['HEAD']);
|
||||
const trackingBranch = await pluginRepo.revparse(['--abbrev-ref', '@{u}']);
|
||||
const log = await pluginRepo.log({
|
||||
from: commitHash,
|
||||
to: trackingBranch,
|
||||
});
|
||||
|
||||
if (log.total === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
pluginsToUpdate++;
|
||||
await pluginRepo.pull();
|
||||
const latestCommit = await pluginRepo.revparse(['HEAD']);
|
||||
console.log(`Plugin ${color.green(directory)} updated to commit ${color.cyan(latestCommit)}`);
|
||||
} catch (error) {
|
||||
console.error(color.red(`Failed to update plugin ${directory}: ${error.message}`));
|
||||
}
|
||||
}
|
||||
|
||||
if (pluginsToUpdate === 0) {
|
||||
console.log('All plugins are up to date.');
|
||||
}
|
||||
}
|
||||
68
web-app/src/png/encode.js
Normal file
68
web-app/src/png/encode.js
Normal file
@@ -0,0 +1,68 @@
|
||||
import { crc32 } from 'crc';
|
||||
|
||||
/**
|
||||
* Encodes PNG chunks into a PNG file format buffer.
|
||||
* @param {Array<{ name: string; data: Uint8Array }>} chunks Array of PNG chunks
|
||||
* @returns {Uint8Array} Encoded PNG data
|
||||
* @copyright Based on https://github.com/hughsk/png-chunks-encode (MIT)
|
||||
*/
|
||||
export default function encode(chunks) {
|
||||
const uint8 = new Uint8Array(4);
|
||||
const int32 = new Int32Array(uint8.buffer);
|
||||
const uint32 = new Uint32Array(uint8.buffer);
|
||||
|
||||
let totalSize = 8;
|
||||
let idx = totalSize;
|
||||
|
||||
for (let i = 0; i < chunks.length; i++) {
|
||||
totalSize += chunks[i].data.length;
|
||||
totalSize += 12;
|
||||
}
|
||||
|
||||
const output = new Uint8Array(totalSize);
|
||||
|
||||
output[0] = 0x89;
|
||||
output[1] = 0x50;
|
||||
output[2] = 0x4E;
|
||||
output[3] = 0x47;
|
||||
output[4] = 0x0D;
|
||||
output[5] = 0x0A;
|
||||
output[6] = 0x1A;
|
||||
output[7] = 0x0A;
|
||||
|
||||
for (let i = 0; i < chunks.length; i++) {
|
||||
const { name, data } = chunks[i];
|
||||
const size = data.length;
|
||||
const nameChars = [
|
||||
name.charCodeAt(0),
|
||||
name.charCodeAt(1),
|
||||
name.charCodeAt(2),
|
||||
name.charCodeAt(3),
|
||||
];
|
||||
|
||||
uint32[0] = size;
|
||||
output[idx++] = uint8[3];
|
||||
output[idx++] = uint8[2];
|
||||
output[idx++] = uint8[1];
|
||||
output[idx++] = uint8[0];
|
||||
|
||||
output[idx++] = nameChars[0];
|
||||
output[idx++] = nameChars[1];
|
||||
output[idx++] = nameChars[2];
|
||||
output[idx++] = nameChars[3];
|
||||
|
||||
for (let j = 0; j < size;) {
|
||||
output[idx++] = data[j++];
|
||||
}
|
||||
|
||||
const crc = crc32(data, crc32(new Uint8Array(nameChars)));
|
||||
|
||||
int32[0] = crc;
|
||||
output[idx++] = uint8[3];
|
||||
output[idx++] = uint8[2];
|
||||
output[idx++] = uint8[1];
|
||||
output[idx++] = uint8[0];
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
1415
web-app/src/prompt-converters.js
Normal file
1415
web-app/src/prompt-converters.js
Normal file
File diff suppressed because it is too large
Load Diff
65
web-app/src/recover-password.js
Normal file
65
web-app/src/recover-password.js
Normal file
@@ -0,0 +1,65 @@
|
||||
import fs from 'node:fs';
|
||||
import yaml from 'yaml';
|
||||
import storage from 'node-persist';
|
||||
import {
|
||||
initUserStorage,
|
||||
getPasswordSalt,
|
||||
getPasswordHash,
|
||||
toKey,
|
||||
} from './users.js';
|
||||
|
||||
/**
|
||||
* Initializes the storage with the data root specified in the config file.
|
||||
* @param {string} configPath - The path to the config file.
|
||||
*/
|
||||
async function initStorage(configPath) {
|
||||
const config = yaml.parse(fs.readFileSync(configPath, 'utf8'));
|
||||
const dataRoot = config.dataRoot;
|
||||
|
||||
if (!dataRoot) {
|
||||
console.error('No "dataRoot" setting found in config.yaml file.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
await initUserStorage(dataRoot);
|
||||
}
|
||||
|
||||
/**
|
||||
* Recovers a user account by enabling it and optionally setting a new password.
|
||||
* @param {string} configPath - The path to the config file.
|
||||
* @param {string} userAccount - The username of the account to recover.
|
||||
* @param {string} [userPassword] - The new password for the account. If not provided, sets an empty password.
|
||||
*/
|
||||
export async function recoverPassword(configPath, userAccount, userPassword) {
|
||||
await initStorage(configPath);
|
||||
|
||||
/**
|
||||
* @type {import('./users').User}
|
||||
*/
|
||||
const user = await storage.get(toKey(userAccount));
|
||||
|
||||
if (!user) {
|
||||
console.error(`User "${userAccount}" not found.`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!user.enabled) {
|
||||
console.log('User is disabled. Enabling...');
|
||||
user.enabled = true;
|
||||
}
|
||||
|
||||
if (userPassword) {
|
||||
console.log('Setting new password...');
|
||||
const salt = getPasswordSalt();
|
||||
const passwordHash = getPasswordHash(userPassword, salt);
|
||||
user.password = passwordHash;
|
||||
user.salt = salt;
|
||||
} else {
|
||||
console.log('Setting an empty password...');
|
||||
user.password = '';
|
||||
user.salt = '';
|
||||
}
|
||||
|
||||
await storage.setItem(toKey(userAccount), user);
|
||||
console.log('User recovered. A program will exit now.');
|
||||
}
|
||||
52
web-app/src/request-proxy.js
Normal file
52
web-app/src/request-proxy.js
Normal file
@@ -0,0 +1,52 @@
|
||||
import process from 'node:process';
|
||||
import http from 'node:http';
|
||||
import https from 'node:https';
|
||||
import { ProxyAgent } from 'proxy-agent';
|
||||
import { isValidUrl, color } from './util.js';
|
||||
|
||||
const LOG_HEADER = '[Request Proxy]';
|
||||
|
||||
/**
|
||||
* Initialize request proxy.
|
||||
* @param {ProxySettings} settings Proxy settings.
|
||||
* @typedef {object} ProxySettings
|
||||
* @property {boolean} enabled Whether proxy is enabled.
|
||||
* @property {string} url Proxy URL.
|
||||
* @property {string[]} bypass List of URLs to bypass proxy.
|
||||
*/
|
||||
export default function initRequestProxy({ enabled, url, bypass }) {
|
||||
try {
|
||||
// No proxy is enabled, so return
|
||||
if (!enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!url) {
|
||||
console.error(color.red(LOG_HEADER), 'No proxy URL provided');
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isValidUrl(url)) {
|
||||
console.error(color.red(LOG_HEADER), 'Invalid proxy URL provided');
|
||||
return;
|
||||
}
|
||||
|
||||
// ProxyAgent uses proxy-from-env under the hood
|
||||
// Reference: https://github.com/Rob--W/proxy-from-env
|
||||
process.env.all_proxy = url;
|
||||
|
||||
if (Array.isArray(bypass) && bypass.length > 0) {
|
||||
process.env.no_proxy = bypass.join(',');
|
||||
}
|
||||
|
||||
const proxyAgent = new ProxyAgent();
|
||||
http.globalAgent = proxyAgent;
|
||||
https.globalAgent = proxyAgent;
|
||||
|
||||
console.info();
|
||||
console.info(color.green(LOG_HEADER), 'Proxy URL is used:', color.blue(url));
|
||||
console.info();
|
||||
} catch (error) {
|
||||
console.error(color.red(LOG_HEADER), 'Failed to initialize request proxy:', error);
|
||||
}
|
||||
}
|
||||
3
web-app/src/server-directory.js
Normal file
3
web-app/src/server-directory.js
Normal file
@@ -0,0 +1,3 @@
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
export const serverDirectory = path.dirname(import.meta.dirname ?? path.dirname(fileURLToPath(import.meta.url)));
|
||||
21
web-app/src/server-events.js
Normal file
21
web-app/src/server-events.js
Normal file
@@ -0,0 +1,21 @@
|
||||
import EventEmitter from 'node:events';
|
||||
import process from 'node:process';
|
||||
|
||||
/**
|
||||
* @typedef {import('../index').ServerEventMap} ServerEventMap
|
||||
* @type {EventEmitter<ServerEventMap>} The default event source.
|
||||
*/
|
||||
export const serverEvents = new EventEmitter();
|
||||
process.serverEvents = serverEvents;
|
||||
export default serverEvents;
|
||||
|
||||
/**
|
||||
* @enum {string}
|
||||
* @readonly
|
||||
*/
|
||||
export const EVENT_NAMES = Object.freeze({
|
||||
/**
|
||||
* Emitted when the server has started.
|
||||
*/
|
||||
SERVER_STARTED: 'server-started',
|
||||
});
|
||||
5
web-app/src/server-global.js
Executable file
5
web-app/src/server-global.js
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env node
|
||||
globalThis.FORCE_GLOBAL_MODE = true;
|
||||
await import('../server.js');
|
||||
|
||||
export {};
|
||||
422
web-app/src/server-main.js
Normal file
422
web-app/src/server-main.js
Normal file
@@ -0,0 +1,422 @@
|
||||
// native node modules
|
||||
import path from 'node:path';
|
||||
import util from 'node:util';
|
||||
import net from 'node:net';
|
||||
import dns from 'node:dns';
|
||||
import process from 'node:process';
|
||||
|
||||
import cors from 'cors';
|
||||
import { csrfSync } from 'csrf-sync';
|
||||
import express from 'express';
|
||||
import compression from 'compression';
|
||||
import cookieSession from 'cookie-session';
|
||||
import multer from 'multer';
|
||||
import responseTime from 'response-time';
|
||||
import helmet from 'helmet';
|
||||
import bodyParser from 'body-parser';
|
||||
|
||||
// local library imports
|
||||
import './fetch-patch.js';
|
||||
import { serverDirectory } from './server-directory.js';
|
||||
|
||||
import { serverEvents, EVENT_NAMES } from './server-events.js';
|
||||
import { loadPlugins } from './plugin-loader.js';
|
||||
import {
|
||||
initUserStorage,
|
||||
getCookieSecret,
|
||||
getCookieSessionName,
|
||||
ensurePublicDirectoriesExist,
|
||||
getUserDirectoriesList,
|
||||
migrateSystemPrompts,
|
||||
migrateUserData,
|
||||
requireLoginMiddleware,
|
||||
setUserDataMiddleware,
|
||||
shouldRedirectToLogin,
|
||||
cleanUploads,
|
||||
getSessionCookieAge,
|
||||
verifySecuritySettings,
|
||||
loginPageMiddleware,
|
||||
} from './users.js';
|
||||
|
||||
import getWebpackServeMiddleware from './middleware/webpack-serve.js';
|
||||
import basicAuthMiddleware from './middleware/basicAuth.js';
|
||||
import getWhitelistMiddleware from './middleware/whitelist.js';
|
||||
import accessLoggerMiddleware, { getAccessLogPath, migrateAccessLog } from './middleware/accessLogWriter.js';
|
||||
import multerMonkeyPatch from './middleware/multerMonkeyPatch.js';
|
||||
import initRequestProxy from './request-proxy.js';
|
||||
import cacheBuster from './middleware/cacheBuster.js';
|
||||
import corsProxyMiddleware from './middleware/corsProxy.js';
|
||||
import hostWhitelistMiddleware from './middleware/hostWhitelist.js';
|
||||
import {
|
||||
getVersion,
|
||||
color,
|
||||
removeColorFormatting,
|
||||
getSeparator,
|
||||
safeReadFileSync,
|
||||
setupLogLevel,
|
||||
setWindowTitle,
|
||||
getConfigValue,
|
||||
} from './util.js';
|
||||
import { UPLOADS_DIRECTORY } from './constants.js';
|
||||
import { ensureThumbnailCache } from './endpoints/thumbnails.js';
|
||||
|
||||
// Routers
|
||||
import { router as usersPublicRouter } from './endpoints/users-public.js';
|
||||
import { init as statsInit, onExit as statsOnExit } from './endpoints/stats.js';
|
||||
import { checkForNewContent } from './endpoints/content-manager.js';
|
||||
import { init as settingsInit } from './endpoints/settings.js';
|
||||
import { redirectDeprecatedEndpoints, ServerStartup, setupPrivateEndpoints } from './server-startup.js';
|
||||
import { diskCache } from './endpoints/characters.js';
|
||||
import { migrateFlatSecrets } from './endpoints/secrets.js';
|
||||
import { migrateGroupChatsMetadataFormat } from './endpoints/groups.js';
|
||||
|
||||
// Work around a node v20.0.0, v20.1.0, and v20.2.0 bug. The issue was fixed in v20.3.0.
|
||||
// https://github.com/nodejs/node/issues/47822#issuecomment-1564708870
|
||||
// Safe to remove once support for Node v20 is dropped.
|
||||
if (process.versions && process.versions.node && process.versions.node.match(/20\.[0-2]\.0/)) {
|
||||
// @ts-ignore
|
||||
if (net.setDefaultAutoSelectFamily) net.setDefaultAutoSelectFamily(false);
|
||||
}
|
||||
|
||||
// Unrestrict console logs display limit
|
||||
util.inspect.defaultOptions.maxArrayLength = null;
|
||||
util.inspect.defaultOptions.maxStringLength = null;
|
||||
util.inspect.defaultOptions.depth = 4;
|
||||
|
||||
/** @type {import('./command-line.js').CommandLineArguments} */
|
||||
const cliArgs = globalThis.COMMAND_LINE_ARGS;
|
||||
|
||||
if (!cliArgs.enableIPv6 && !cliArgs.enableIPv4) {
|
||||
console.error('error: You can\'t disable all internet protocols: at least IPv6 or IPv4 must be enabled.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const app = express();
|
||||
app.use(helmet({
|
||||
contentSecurityPolicy: false,
|
||||
}));
|
||||
app.use(compression());
|
||||
app.use(responseTime());
|
||||
|
||||
app.use(bodyParser.json({ limit: '500mb' }));
|
||||
app.use(bodyParser.urlencoded({ extended: true, limit: '500mb' }));
|
||||
|
||||
// CORS Settings //
|
||||
const CORS = cors({
|
||||
origin: 'null',
|
||||
methods: ['OPTIONS'],
|
||||
});
|
||||
|
||||
app.use(CORS);
|
||||
|
||||
if (cliArgs.listen && cliArgs.basicAuthMode) {
|
||||
app.use(basicAuthMiddleware);
|
||||
}
|
||||
|
||||
if (cliArgs.whitelistMode) {
|
||||
const whitelistMiddleware = await getWhitelistMiddleware();
|
||||
app.use(whitelistMiddleware);
|
||||
}
|
||||
|
||||
app.use(hostWhitelistMiddleware);
|
||||
|
||||
if (cliArgs.listen) {
|
||||
app.use(accessLoggerMiddleware());
|
||||
}
|
||||
|
||||
if (cliArgs.enableCorsProxy) {
|
||||
app.use('/proxy/:url(*)', corsProxyMiddleware);
|
||||
} else {
|
||||
app.use('/proxy/:url(*)', async (_, res) => {
|
||||
const message = 'CORS proxy is disabled. Enable it in config.yaml or use the --corsProxy flag.';
|
||||
console.log(message);
|
||||
res.status(404).send(message);
|
||||
});
|
||||
}
|
||||
|
||||
app.use(cookieSession({
|
||||
name: getCookieSessionName(),
|
||||
sameSite: 'lax',
|
||||
httpOnly: true,
|
||||
maxAge: getSessionCookieAge(),
|
||||
secret: getCookieSecret(globalThis.DATA_ROOT),
|
||||
}));
|
||||
|
||||
app.use(setUserDataMiddleware);
|
||||
|
||||
// CSRF Protection //
|
||||
if (!cliArgs.disableCsrf) {
|
||||
const csrfSyncProtection = csrfSync({
|
||||
getTokenFromState: (req) => {
|
||||
if (!req.session) {
|
||||
console.error('(CSRF error) getTokenFromState: Session object not initialized');
|
||||
return;
|
||||
}
|
||||
return req.session.csrfToken;
|
||||
},
|
||||
getTokenFromRequest: (req) => {
|
||||
return req.headers['x-csrf-token']?.toString();
|
||||
},
|
||||
storeTokenInState: (req, token) => {
|
||||
if (!req.session) {
|
||||
console.error('(CSRF error) storeTokenInState: Session object not initialized');
|
||||
return;
|
||||
}
|
||||
req.session.csrfToken = token;
|
||||
},
|
||||
size: 32,
|
||||
});
|
||||
|
||||
app.get('/csrf-token', (req, res) => {
|
||||
res.json({
|
||||
'token': csrfSyncProtection.generateToken(req),
|
||||
});
|
||||
});
|
||||
|
||||
// Customize the error message
|
||||
csrfSyncProtection.invalidCsrfTokenError.message = color.red('Invalid CSRF token. Please refresh the page and try again.');
|
||||
csrfSyncProtection.invalidCsrfTokenError.stack = undefined;
|
||||
|
||||
app.use(csrfSyncProtection.csrfSynchronisedProtection);
|
||||
} else {
|
||||
console.warn('\nCSRF protection is disabled. This will make your server vulnerable to CSRF attacks.\n');
|
||||
app.get('/csrf-token', (req, res) => {
|
||||
res.json({
|
||||
'token': 'disabled',
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Static files
|
||||
// Host index page
|
||||
app.get('/', cacheBuster.middleware, (request, response) => {
|
||||
if (shouldRedirectToLogin(request)) {
|
||||
const query = request.url.split('?')[1];
|
||||
const redirectUrl = query ? `/login?${query}` : '/login';
|
||||
return response.redirect(redirectUrl);
|
||||
}
|
||||
|
||||
return response.sendFile('index.html', { root: path.join(serverDirectory, 'public') });
|
||||
});
|
||||
|
||||
// Callback endpoint for OAuth PKCE flows (e.g. OpenRouter)
|
||||
app.get('/callback/:source?', (request, response) => {
|
||||
const source = request.params.source;
|
||||
const query = request.url.split('?')[1];
|
||||
const searchParams = new URLSearchParams();
|
||||
source && searchParams.set('source', source);
|
||||
query && searchParams.set('query', query);
|
||||
const path = `/?${searchParams.toString()}`;
|
||||
return response.redirect(307, path);
|
||||
});
|
||||
|
||||
// Host login page
|
||||
app.get('/login', loginPageMiddleware);
|
||||
|
||||
// Host frontend assets
|
||||
const webpackMiddleware = getWebpackServeMiddleware();
|
||||
app.use(webpackMiddleware);
|
||||
app.use(express.static(path.join(serverDirectory, 'public'), {}));
|
||||
|
||||
// Public API
|
||||
app.use('/api/users', usersPublicRouter);
|
||||
|
||||
// Everything below this line requires authentication
|
||||
app.use(requireLoginMiddleware);
|
||||
app.post('/api/ping', (request, response) => {
|
||||
if (request.query.extend && request.session) {
|
||||
request.session.touch = Date.now();
|
||||
}
|
||||
|
||||
response.sendStatus(204);
|
||||
});
|
||||
|
||||
// File uploads
|
||||
const uploadsPath = path.join(cliArgs.dataRoot, UPLOADS_DIRECTORY);
|
||||
app.use(multer({ dest: uploadsPath, limits: { fieldSize: 500 * 1024 * 1024 } }).single('avatar'));
|
||||
app.use(multerMonkeyPatch);
|
||||
|
||||
app.get('/version', async function (_, response) {
|
||||
const data = await getVersion();
|
||||
response.send(data);
|
||||
});
|
||||
|
||||
redirectDeprecatedEndpoints(app);
|
||||
setupPrivateEndpoints(app);
|
||||
|
||||
/**
|
||||
* Tasks that need to be run before the server starts listening.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function preSetupTasks() {
|
||||
const version = await getVersion();
|
||||
|
||||
// Print formatted header
|
||||
console.log();
|
||||
console.log(`SillyTavern ${version.pkgVersion}`);
|
||||
if (version.gitBranch && version.commitDate) {
|
||||
const date = new Date(version.commitDate);
|
||||
const localDate = date.toLocaleString('en-US', { timeZoneName: 'short' });
|
||||
console.log(`Running '${version.gitBranch}' (${version.gitRevision}) - ${localDate}`);
|
||||
if (!version.isLatest && ['staging', 'release'].includes(version.gitBranch)) {
|
||||
console.log('INFO: Currently not on the latest commit.');
|
||||
console.log(' Run \'git pull\' to update. If you have any merge conflicts, run \'git reset --hard\' and \'git pull\' to reset your branch.');
|
||||
}
|
||||
}
|
||||
console.log();
|
||||
|
||||
const directories = await getUserDirectoriesList();
|
||||
await migrateGroupChatsMetadataFormat(directories);
|
||||
await checkForNewContent(directories);
|
||||
await ensureThumbnailCache(directories);
|
||||
await diskCache.verify(directories);
|
||||
migrateFlatSecrets(directories);
|
||||
cleanUploads();
|
||||
migrateAccessLog();
|
||||
|
||||
await settingsInit();
|
||||
await statsInit();
|
||||
|
||||
const pluginsDirectory = path.join(serverDirectory, 'plugins');
|
||||
const cleanupPlugins = await loadPlugins(app, pluginsDirectory);
|
||||
const consoleTitle = process.title;
|
||||
|
||||
let isExiting = false;
|
||||
const exitProcess = async () => {
|
||||
if (isExiting) return;
|
||||
isExiting = true;
|
||||
await statsOnExit();
|
||||
if (typeof cleanupPlugins === 'function') {
|
||||
await cleanupPlugins();
|
||||
}
|
||||
diskCache.dispose();
|
||||
setWindowTitle(consoleTitle);
|
||||
process.exit();
|
||||
};
|
||||
|
||||
// Set up event listeners for a graceful shutdown
|
||||
process.on('SIGINT', exitProcess);
|
||||
process.on('SIGTERM', exitProcess);
|
||||
process.on('uncaughtException', (err) => {
|
||||
console.error('Uncaught exception:', err);
|
||||
exitProcess();
|
||||
});
|
||||
|
||||
// Add request proxy.
|
||||
initRequestProxy({ enabled: cliArgs.requestProxyEnabled, url: cliArgs.requestProxyUrl, bypass: cliArgs.requestProxyBypass });
|
||||
|
||||
// Wait for frontend libs to compile
|
||||
await webpackMiddleware.runWebpackCompiler();
|
||||
}
|
||||
|
||||
/**
|
||||
* Tasks that need to be run after the server starts listening.
|
||||
* @param {import('./server-startup.js').ServerStartupResult} result The result of the server startup
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function postSetupTasks(result) {
|
||||
const browserLaunchHostname = await cliArgs.getBrowserLaunchHostname(result);
|
||||
const browserLaunchUrl = cliArgs.getBrowserLaunchUrl(browserLaunchHostname);
|
||||
const browserLaunchApp = String(getConfigValue('browserLaunch.browser', 'default') ?? '');
|
||||
|
||||
if (cliArgs.browserLaunchEnabled) {
|
||||
try {
|
||||
// TODO: This should be converted to a regular import when support for Node 18 is dropped
|
||||
const openModule = await import('open');
|
||||
const { default: open, apps } = openModule;
|
||||
|
||||
function getBrowsers() {
|
||||
const isAndroid = process.platform === 'android';
|
||||
if (isAndroid) {
|
||||
return {};
|
||||
}
|
||||
return {
|
||||
'firefox': apps.firefox,
|
||||
'chrome': apps.chrome,
|
||||
'edge': apps.edge,
|
||||
'brave': apps.brave,
|
||||
};
|
||||
}
|
||||
|
||||
const validBrowsers = getBrowsers();
|
||||
const appName = validBrowsers[browserLaunchApp.trim().toLowerCase()];
|
||||
const openOptions = appName ? { app: { name: appName } } : {};
|
||||
|
||||
console.log(`Launching in a browser: ${browserLaunchApp}...`);
|
||||
await open(browserLaunchUrl.toString(), openOptions);
|
||||
} catch (error) {
|
||||
console.error('Failed to launch the browser. Open the URL manually.', error);
|
||||
}
|
||||
}
|
||||
|
||||
setWindowTitle('SillyTavern WebServer');
|
||||
|
||||
let logListen = 'SillyTavern is listening on';
|
||||
|
||||
if (result.useIPv6 && !result.v6Failed) {
|
||||
logListen += color.green(
|
||||
' IPv6: ' + cliArgs.getIPv6ListenUrl().host,
|
||||
);
|
||||
}
|
||||
|
||||
if (result.useIPv4 && !result.v4Failed) {
|
||||
logListen += color.green(
|
||||
' IPv4: ' + cliArgs.getIPv4ListenUrl().host,
|
||||
);
|
||||
}
|
||||
|
||||
const goToLog = `Go to: ${color.blue(browserLaunchUrl)} to open SillyTavern`;
|
||||
const plainGoToLog = removeColorFormatting(goToLog);
|
||||
|
||||
console.log(logListen);
|
||||
if (cliArgs.listen) {
|
||||
console.log();
|
||||
console.log('To limit connections to internal localhost only ([::1] or 127.0.0.1), change the setting in config.yaml to "listen: false".');
|
||||
console.log('Check the "access.log" file in the data directory to inspect incoming connections:', color.green(getAccessLogPath()));
|
||||
}
|
||||
console.log('\n' + getSeparator(plainGoToLog.length) + '\n');
|
||||
console.log(goToLog);
|
||||
console.log('\n' + getSeparator(plainGoToLog.length) + '\n');
|
||||
|
||||
setupLogLevel();
|
||||
serverEvents.emit(EVENT_NAMES.SERVER_STARTED, { url: browserLaunchUrl });
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a not-found error response if a not-found error page exists. Should only be called after all other middlewares have been registered.
|
||||
*/
|
||||
function apply404Middleware() {
|
||||
const notFoundWebpage = safeReadFileSync(path.join(serverDirectory, 'public/error/url-not-found.html')) ?? '';
|
||||
app.use((req, res) => {
|
||||
res.status(404).send(notFoundWebpage);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the DNS resolution order based on the command line arguments.
|
||||
*/
|
||||
function setDnsResolutionOrder() {
|
||||
try {
|
||||
if (cliArgs.dnsPreferIPv6) {
|
||||
dns.setDefaultResultOrder('ipv6first');
|
||||
console.log('Preferring IPv6 for DNS resolution');
|
||||
} else {
|
||||
dns.setDefaultResultOrder('ipv4first');
|
||||
console.log('Preferring IPv4 for DNS resolution');
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Failed to set DNS resolution order. Possibly unsupported in this Node version.');
|
||||
}
|
||||
}
|
||||
|
||||
// User storage module needs to be initialized before starting the server
|
||||
initUserStorage(globalThis.DATA_ROOT)
|
||||
.then(setDnsResolutionOrder)
|
||||
.then(ensurePublicDirectoriesExist)
|
||||
.then(migrateUserData)
|
||||
.then(migrateSystemPrompts)
|
||||
.then(verifySecuritySettings)
|
||||
.then(preSetupTasks)
|
||||
.then(apply404Middleware)
|
||||
.then(() => new ServerStartup(app, cliArgs).start())
|
||||
.then(postSetupTasks);
|
||||
392
web-app/src/server-startup.js
Normal file
392
web-app/src/server-startup.js
Normal file
@@ -0,0 +1,392 @@
|
||||
import https from 'node:https';
|
||||
import http from 'node:http';
|
||||
import fs from 'node:fs';
|
||||
import { color, urlHostnameToIPv6, getHasIP } from './util.js';
|
||||
|
||||
// Express routers
|
||||
import { router as userDataRouter } from './users.js';
|
||||
import { router as usersPrivateRouter } from './endpoints/users-private.js';
|
||||
import { router as usersAdminRouter } from './endpoints/users-admin.js';
|
||||
import { router as movingUIRouter } from './endpoints/moving-ui.js';
|
||||
import { router as imagesRouter } from './endpoints/images.js';
|
||||
import { router as quickRepliesRouter } from './endpoints/quick-replies.js';
|
||||
import { router as avatarsRouter } from './endpoints/avatars.js';
|
||||
import { router as themesRouter } from './endpoints/themes.js';
|
||||
import { router as openAiRouter } from './endpoints/openai.js';
|
||||
import { router as googleRouter } from './endpoints/google.js';
|
||||
import { router as anthropicRouter } from './endpoints/anthropic.js';
|
||||
import { router as tokenizersRouter } from './endpoints/tokenizers.js';
|
||||
import { router as presetsRouter } from './endpoints/presets.js';
|
||||
import { router as secretsRouter } from './endpoints/secrets.js';
|
||||
import { router as thumbnailRouter } from './endpoints/thumbnails.js';
|
||||
import { router as novelAiRouter } from './endpoints/novelai.js';
|
||||
import { router as extensionsRouter } from './endpoints/extensions.js';
|
||||
import { router as assetsRouter } from './endpoints/assets.js';
|
||||
import { router as filesRouter } from './endpoints/files.js';
|
||||
import { router as charactersRouter } from './endpoints/characters.js';
|
||||
import { router as chatsRouter } from './endpoints/chats.js';
|
||||
import { router as groupsRouter } from './endpoints/groups.js';
|
||||
import { router as worldInfoRouter } from './endpoints/worldinfo.js';
|
||||
import { router as statsRouter } from './endpoints/stats.js';
|
||||
import { router as contentManagerRouter } from './endpoints/content-manager.js';
|
||||
import { router as settingsRouter } from './endpoints/settings.js';
|
||||
import { router as backgroundsRouter } from './endpoints/backgrounds.js';
|
||||
import { router as spritesRouter } from './endpoints/sprites.js';
|
||||
import { router as stableDiffusionRouter } from './endpoints/stable-diffusion.js';
|
||||
import { router as hordeRouter } from './endpoints/horde.js';
|
||||
import { router as vectorsRouter } from './endpoints/vectors.js';
|
||||
import { router as translateRouter } from './endpoints/translate.js';
|
||||
import { router as classifyRouter } from './endpoints/classify.js';
|
||||
import { router as captionRouter } from './endpoints/caption.js';
|
||||
import { router as searchRouter } from './endpoints/search.js';
|
||||
import { router as openRouterRouter } from './endpoints/openrouter.js';
|
||||
import { router as chatCompletionsRouter } from './endpoints/backends/chat-completions.js';
|
||||
import { router as koboldRouter } from './endpoints/backends/kobold.js';
|
||||
import { router as textCompletionsRouter } from './endpoints/backends/text-completions.js';
|
||||
import { router as speechRouter } from './endpoints/speech.js';
|
||||
import { router as azureRouter } from './endpoints/azure.js';
|
||||
import { router as minimaxRouter } from './endpoints/minimax.js';
|
||||
import { router as dataMaidRouter } from './endpoints/data-maid.js';
|
||||
import { router as backupsRouter } from './endpoints/backups.js';
|
||||
|
||||
/**
|
||||
* @typedef {object} ServerStartupResult
|
||||
* @property {boolean} v6Failed If the server failed to start on IPv6
|
||||
* @property {boolean} v4Failed If the server failed to start on IPv4
|
||||
* @property {boolean} useIPv6 If use IPv6
|
||||
* @property {boolean} useIPv4 If use IPv4
|
||||
*/
|
||||
|
||||
/**
|
||||
* Redirect deprecated API endpoints to their replacements.
|
||||
* @param {import('express').Express} app The Express app to use
|
||||
*/
|
||||
export function redirectDeprecatedEndpoints(app) {
|
||||
/**
|
||||
* Redirect a deprecated API endpoint URL to its replacement. Because fetch, form submissions, and $.ajax follow
|
||||
* redirects, this is transparent to client-side code.
|
||||
* @param {string} src The URL to redirect from.
|
||||
* @param {string} destination The URL to redirect to.
|
||||
*/
|
||||
function redirect(src, destination) {
|
||||
app.use(src, (req, res) => {
|
||||
console.warn(`API endpoint ${src} is deprecated; use ${destination} instead`);
|
||||
// HTTP 301 causes the request to become a GET. 308 preserves the request method.
|
||||
res.redirect(308, destination);
|
||||
});
|
||||
}
|
||||
|
||||
redirect('/createcharacter', '/api/characters/create');
|
||||
redirect('/renamecharacter', '/api/characters/rename');
|
||||
redirect('/editcharacter', '/api/characters/edit');
|
||||
redirect('/editcharacterattribute', '/api/characters/edit-attribute');
|
||||
redirect('/v2/editcharacterattribute', '/api/characters/merge-attributes');
|
||||
redirect('/deletecharacter', '/api/characters/delete');
|
||||
redirect('/getcharacters', '/api/characters/all');
|
||||
redirect('/getonecharacter', '/api/characters/get');
|
||||
redirect('/getallchatsofcharacter', '/api/characters/chats');
|
||||
redirect('/importcharacter', '/api/characters/import');
|
||||
redirect('/dupecharacter', '/api/characters/duplicate');
|
||||
redirect('/exportcharacter', '/api/characters/export');
|
||||
redirect('/savechat', '/api/chats/save');
|
||||
redirect('/getchat', '/api/chats/get');
|
||||
redirect('/renamechat', '/api/chats/rename');
|
||||
redirect('/delchat', '/api/chats/delete');
|
||||
redirect('/exportchat', '/api/chats/export');
|
||||
redirect('/importgroupchat', '/api/chats/group/import');
|
||||
redirect('/importchat', '/api/chats/import');
|
||||
redirect('/getgroupchat', '/api/chats/group/get');
|
||||
redirect('/deletegroupchat', '/api/chats/group/delete');
|
||||
redirect('/savegroupchat', '/api/chats/group/save');
|
||||
redirect('/getgroups', '/api/groups/all');
|
||||
redirect('/creategroup', '/api/groups/create');
|
||||
redirect('/editgroup', '/api/groups/edit');
|
||||
redirect('/deletegroup', '/api/groups/delete');
|
||||
redirect('/getworldinfo', '/api/worldinfo/get');
|
||||
redirect('/deleteworldinfo', '/api/worldinfo/delete');
|
||||
redirect('/importworldinfo', '/api/worldinfo/import');
|
||||
redirect('/editworldinfo', '/api/worldinfo/edit');
|
||||
redirect('/getstats', '/api/stats/get');
|
||||
redirect('/recreatestats', '/api/stats/recreate');
|
||||
redirect('/updatestats', '/api/stats/update');
|
||||
redirect('/getbackgrounds', '/api/backgrounds/all');
|
||||
redirect('/delbackground', '/api/backgrounds/delete');
|
||||
redirect('/renamebackground', '/api/backgrounds/rename');
|
||||
redirect('/downloadbackground', '/api/backgrounds/upload'); // yes, the downloadbackground endpoint actually uploads one
|
||||
redirect('/savetheme', '/api/themes/save');
|
||||
redirect('/getuseravatars', '/api/avatars/get');
|
||||
redirect('/deleteuseravatar', '/api/avatars/delete');
|
||||
redirect('/uploaduseravatar', '/api/avatars/upload');
|
||||
redirect('/deletequickreply', '/api/quick-replies/delete');
|
||||
redirect('/savequickreply', '/api/quick-replies/save');
|
||||
redirect('/uploadimage', '/api/images/upload');
|
||||
redirect('/listimgfiles/:folder', '/api/images/list/:folder');
|
||||
redirect('/api/content/import', '/api/content/importURL');
|
||||
redirect('/savemovingui', '/api/moving-ui/save');
|
||||
redirect('/api/serpapi/search', '/api/search/serpapi');
|
||||
redirect('/api/serpapi/visit', '/api/search/visit');
|
||||
redirect('/api/serpapi/transcript', '/api/search/transcript');
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup the routers for the endpoints.
|
||||
* @param {import('express').Express} app The Express app to use
|
||||
*/
|
||||
export function setupPrivateEndpoints(app) {
|
||||
app.use('/', userDataRouter);
|
||||
app.use('/api/users', usersPrivateRouter);
|
||||
app.use('/api/users', usersAdminRouter);
|
||||
app.use('/api/moving-ui', movingUIRouter);
|
||||
app.use('/api/images', imagesRouter);
|
||||
app.use('/api/quick-replies', quickRepliesRouter);
|
||||
app.use('/api/avatars', avatarsRouter);
|
||||
app.use('/api/themes', themesRouter);
|
||||
app.use('/api/openai', openAiRouter);
|
||||
app.use('/api/google', googleRouter);
|
||||
app.use('/api/anthropic', anthropicRouter);
|
||||
app.use('/api/tokenizers', tokenizersRouter);
|
||||
app.use('/api/presets', presetsRouter);
|
||||
app.use('/api/secrets', secretsRouter);
|
||||
app.use('/thumbnail', thumbnailRouter);
|
||||
app.use('/api/novelai', novelAiRouter);
|
||||
app.use('/api/extensions', extensionsRouter);
|
||||
app.use('/api/assets', assetsRouter);
|
||||
app.use('/api/files', filesRouter);
|
||||
app.use('/api/characters', charactersRouter);
|
||||
app.use('/api/chats', chatsRouter);
|
||||
app.use('/api/groups', groupsRouter);
|
||||
app.use('/api/worldinfo', worldInfoRouter);
|
||||
app.use('/api/stats', statsRouter);
|
||||
app.use('/api/backgrounds', backgroundsRouter);
|
||||
app.use('/api/sprites', spritesRouter);
|
||||
app.use('/api/content', contentManagerRouter);
|
||||
app.use('/api/settings', settingsRouter);
|
||||
app.use('/api/sd', stableDiffusionRouter);
|
||||
app.use('/api/horde', hordeRouter);
|
||||
app.use('/api/vector', vectorsRouter);
|
||||
app.use('/api/translate', translateRouter);
|
||||
app.use('/api/extra/classify', classifyRouter);
|
||||
app.use('/api/extra/caption', captionRouter);
|
||||
app.use('/api/search', searchRouter);
|
||||
app.use('/api/backends/text-completions', textCompletionsRouter);
|
||||
app.use('/api/openrouter', openRouterRouter);
|
||||
app.use('/api/backends/kobold', koboldRouter);
|
||||
app.use('/api/backends/chat-completions', chatCompletionsRouter);
|
||||
app.use('/api/speech', speechRouter);
|
||||
app.use('/api/azure', azureRouter);
|
||||
app.use('/api/minimax', minimaxRouter);
|
||||
app.use('/api/data-maid', dataMaidRouter);
|
||||
app.use('/api/backups', backupsRouter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Utilities for starting the express server.
|
||||
*/
|
||||
export class ServerStartup {
|
||||
/**
|
||||
* Creates a new ServerStartup instance.
|
||||
* @param {import('express').Express} app The Express app to use
|
||||
* @param {import('./command-line.js').CommandLineArguments} cliArgs The command-line arguments
|
||||
*/
|
||||
constructor(app, cliArgs) {
|
||||
this.app = app;
|
||||
this.cliArgs = cliArgs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prints a fatal error message and exits the process.
|
||||
* @param {string} message
|
||||
*/
|
||||
#fatal(message) {
|
||||
console.error(color.red(message));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if SSL options are valid. If not, it will print an error message and exit the process.
|
||||
* @returns {void}
|
||||
*/
|
||||
#verifySslOptions() {
|
||||
if (!this.cliArgs.ssl) return;
|
||||
|
||||
if (!this.cliArgs.certPath) {
|
||||
this.#fatal('Error: SSL certificate path is required when using HTTPS. Check your config');
|
||||
}
|
||||
|
||||
if (!this.cliArgs.keyPath) {
|
||||
this.#fatal('Error: SSL key path is required when using HTTPS. Check your config');
|
||||
}
|
||||
|
||||
if (!fs.existsSync(this.cliArgs.certPath)) {
|
||||
this.#fatal('Error: SSL certificate path does not exist');
|
||||
}
|
||||
|
||||
if (!fs.existsSync(this.cliArgs.keyPath)) {
|
||||
this.#fatal('Error: SSL key path does not exist');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an HTTPS server.
|
||||
* @param {URL} url The URL to listen on
|
||||
* @param {number} ipVersion the ip version to use
|
||||
* @returns {Promise<void>} A promise that resolves when the server is listening
|
||||
*/
|
||||
#createHttpsServer(url, ipVersion) {
|
||||
this.#verifySslOptions();
|
||||
return new Promise((resolve, reject) => {
|
||||
/** @type {import('https').ServerOptions} */
|
||||
const sslOptions = {
|
||||
cert: fs.readFileSync(this.cliArgs.certPath),
|
||||
key: fs.readFileSync(this.cliArgs.keyPath),
|
||||
passphrase: String(this.cliArgs.keyPassphrase ?? ''),
|
||||
};
|
||||
const server = https.createServer(sslOptions, this.app);
|
||||
server.on('error', reject);
|
||||
server.on('listening', resolve);
|
||||
|
||||
let host = url.hostname;
|
||||
if (ipVersion === 6) host = urlHostnameToIPv6(url.hostname);
|
||||
server.listen({
|
||||
host: host,
|
||||
port: Number(url.port || 443),
|
||||
// see https://nodejs.org/api/net.html#serverlisten for why ipv6Only is used
|
||||
ipv6Only: true,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an HTTP server.
|
||||
* @param {URL} url The URL to listen on
|
||||
* @param {number} ipVersion the ip version to use
|
||||
* @returns {Promise<void>} A promise that resolves when the server is listening
|
||||
*/
|
||||
#createHttpServer(url, ipVersion) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const server = http.createServer(this.app);
|
||||
server.on('error', reject);
|
||||
server.on('listening', resolve);
|
||||
|
||||
let host = url.hostname;
|
||||
if (ipVersion === 6) host = urlHostnameToIPv6(url.hostname);
|
||||
server.listen({
|
||||
host: host,
|
||||
port: Number(url.port || 80),
|
||||
// see https://nodejs.org/api/net.html#serverlisten for why ipv6Only is used
|
||||
ipv6Only: true,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts the server using http or https depending on config
|
||||
* @param {boolean} useIPv6 If use IPv6
|
||||
* @param {boolean} useIPv4 If use IPv4
|
||||
* @returns {Promise<[boolean, boolean]>} A promise that resolves with an array of booleans indicating if the server failed to start on IPv6 and IPv4, respectively
|
||||
*/
|
||||
async #startHTTPorHTTPS(useIPv6, useIPv4) {
|
||||
let v6Failed = false;
|
||||
let v4Failed = false;
|
||||
|
||||
const createFunc = this.cliArgs.ssl ? this.#createHttpsServer.bind(this) : this.#createHttpServer.bind(this);
|
||||
|
||||
if (useIPv6) {
|
||||
try {
|
||||
await createFunc(this.cliArgs.getIPv6ListenUrl(), 6);
|
||||
} catch (error) {
|
||||
console.error('Warning: failed to start server on IPv6');
|
||||
console.error(error);
|
||||
|
||||
v6Failed = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (useIPv4) {
|
||||
try {
|
||||
await createFunc(this.cliArgs.getIPv4ListenUrl(), 4);
|
||||
} catch (error) {
|
||||
console.error('Warning: failed to start server on IPv4');
|
||||
console.error(error);
|
||||
|
||||
v4Failed = true;
|
||||
}
|
||||
}
|
||||
|
||||
return [v6Failed, v4Failed];
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles the case where the server failed to start on one or both protocols.
|
||||
* @param {ServerStartupResult} result The results of the server startup
|
||||
* @returns {void}
|
||||
*/
|
||||
#handleServerListenFail({ v6Failed, v4Failed, useIPv6, useIPv4 }) {
|
||||
if (v6Failed && !useIPv4) {
|
||||
this.#fatal('Error: Failed to start server on IPv6 and IPv4 disabled');
|
||||
}
|
||||
|
||||
if (v4Failed && !useIPv6) {
|
||||
this.#fatal('Error: Failed to start server on IPv4 and IPv6 disabled');
|
||||
}
|
||||
|
||||
if (v6Failed && v4Failed) {
|
||||
this.#fatal('Error: Failed to start server on both IPv6 and IPv4');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs the server startup.
|
||||
* @returns {Promise<ServerStartupResult>} A promise that resolves with an object containing the results of the server startup
|
||||
*/
|
||||
async start() {
|
||||
let useIPv6 = (this.cliArgs.enableIPv6 === true);
|
||||
let useIPv4 = (this.cliArgs.enableIPv4 === true);
|
||||
|
||||
if (this.cliArgs.enableIPv6 === 'auto' || this.cliArgs.enableIPv4 === 'auto') {
|
||||
const ipQuery = await getHasIP();
|
||||
let hasIPv6 = false, hasIPv4 = false;
|
||||
|
||||
hasIPv6 = this.cliArgs.listen ? ipQuery.hasIPv6Any : ipQuery.hasIPv6Local;
|
||||
if (this.cliArgs.enableIPv6 === 'auto') {
|
||||
useIPv6 = hasIPv6;
|
||||
}
|
||||
if (hasIPv6) {
|
||||
if (useIPv6) {
|
||||
console.log(color.green('IPv6 support detected'));
|
||||
} else {
|
||||
console.log('IPv6 support detected (but disabled)');
|
||||
}
|
||||
}
|
||||
|
||||
hasIPv4 = this.cliArgs.listen ? ipQuery.hasIPv4Any : ipQuery.hasIPv4Local;
|
||||
if (this.cliArgs.enableIPv4 === 'auto') {
|
||||
useIPv4 = hasIPv4;
|
||||
}
|
||||
if (hasIPv4) {
|
||||
if (useIPv4) {
|
||||
console.log(color.green('IPv4 support detected'));
|
||||
} else {
|
||||
console.log('IPv4 support detected (but disabled)');
|
||||
}
|
||||
}
|
||||
|
||||
if (this.cliArgs.enableIPv6 === 'auto' && this.cliArgs.enableIPv4 === 'auto') {
|
||||
if (!hasIPv6 && !hasIPv4) {
|
||||
console.error('Both IPv6 and IPv4 are not detected');
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!useIPv6 && !useIPv4) {
|
||||
console.error('Both IPv6 and IPv4 are disabled or not detected');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const [v6Failed, v4Failed] = await this.#startHTTPorHTTPS(useIPv6, useIPv4);
|
||||
const result = { v6Failed, v4Failed, useIPv6, useIPv4 };
|
||||
this.#handleServerListenFail(result);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
1
web-app/src/tokenizers/claude.json
Normal file
1
web-app/src/tokenizers/claude.json
Normal file
File diff suppressed because one or more lines are too long
BIN
web-app/src/tokenizers/gemma.model
Normal file
BIN
web-app/src/tokenizers/gemma.model
Normal file
Binary file not shown.
BIN
web-app/src/tokenizers/jamba.model
Normal file
BIN
web-app/src/tokenizers/jamba.model
Normal file
Binary file not shown.
BIN
web-app/src/tokenizers/llama.model
Normal file
BIN
web-app/src/tokenizers/llama.model
Normal file
Binary file not shown.
1
web-app/src/tokenizers/llama3.json
Normal file
1
web-app/src/tokenizers/llama3.json
Normal file
File diff suppressed because one or more lines are too long
BIN
web-app/src/tokenizers/mistral.model
Normal file
BIN
web-app/src/tokenizers/mistral.model
Normal file
Binary file not shown.
BIN
web-app/src/tokenizers/nerdstash.model
Normal file
BIN
web-app/src/tokenizers/nerdstash.model
Normal file
Binary file not shown.
BIN
web-app/src/tokenizers/nerdstash_v2.model
Normal file
BIN
web-app/src/tokenizers/nerdstash_v2.model
Normal file
Binary file not shown.
BIN
web-app/src/tokenizers/yi.model
Normal file
BIN
web-app/src/tokenizers/yi.model
Normal file
Binary file not shown.
148
web-app/src/transformers.js
Normal file
148
web-app/src/transformers.js
Normal file
@@ -0,0 +1,148 @@
|
||||
import path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
import process from 'node:process';
|
||||
import { Buffer } from 'node:buffer';
|
||||
|
||||
import { pipeline, env, RawImage } from 'sillytavern-transformers';
|
||||
import { getConfigValue } from './util.js';
|
||||
import { serverDirectory } from './server-directory.js';
|
||||
|
||||
configureTransformers();
|
||||
|
||||
function configureTransformers() {
|
||||
// Limit the number of threads to 1 to avoid issues on Android
|
||||
env.backends.onnx.wasm.numThreads = 1;
|
||||
// Use WASM from a local folder to avoid CDN connections
|
||||
env.backends.onnx.wasm.wasmPaths = path.join(serverDirectory, 'node_modules', 'sillytavern-transformers', 'dist') + path.sep;
|
||||
}
|
||||
|
||||
const tasks = {
|
||||
'text-classification': {
|
||||
defaultModel: 'Cohee/distilbert-base-uncased-go-emotions-onnx',
|
||||
pipeline: null,
|
||||
configField: 'extensions.models.classification',
|
||||
quantized: true,
|
||||
},
|
||||
'image-to-text': {
|
||||
defaultModel: 'Xenova/vit-gpt2-image-captioning',
|
||||
pipeline: null,
|
||||
configField: 'extensions.models.captioning',
|
||||
quantized: true,
|
||||
},
|
||||
'feature-extraction': {
|
||||
defaultModel: 'Xenova/all-mpnet-base-v2',
|
||||
pipeline: null,
|
||||
configField: 'extensions.models.embedding',
|
||||
quantized: true,
|
||||
},
|
||||
'automatic-speech-recognition': {
|
||||
defaultModel: 'Xenova/whisper-small',
|
||||
pipeline: null,
|
||||
configField: 'extensions.models.speechToText',
|
||||
quantized: true,
|
||||
},
|
||||
'text-to-speech': {
|
||||
defaultModel: 'Xenova/speecht5_tts',
|
||||
pipeline: null,
|
||||
configField: 'extensions.models.textToSpeech',
|
||||
quantized: false,
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets a RawImage object from a base64-encoded image.
|
||||
* @param {string} image Base64-encoded image
|
||||
* @returns {Promise<RawImage|null>} Object representing the image
|
||||
*/
|
||||
export async function getRawImage(image) {
|
||||
try {
|
||||
const buffer = Buffer.from(image, 'base64');
|
||||
const byteArray = new Uint8Array(buffer);
|
||||
const blob = new Blob([byteArray]);
|
||||
|
||||
const rawImage = await RawImage.fromBlob(blob);
|
||||
return rawImage;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the model to use for a given transformers.js task.
|
||||
* @param {string} task The task to get the model for
|
||||
* @returns {string} The model to use for the given task
|
||||
*/
|
||||
function getModelForTask(task) {
|
||||
const defaultModel = tasks[task].defaultModel;
|
||||
|
||||
try {
|
||||
const model = getConfigValue(tasks[task].configField, null);
|
||||
return model || defaultModel;
|
||||
} catch (error) {
|
||||
console.warn('Failed to read config.yaml, using default classification model.');
|
||||
return defaultModel;
|
||||
}
|
||||
}
|
||||
|
||||
async function migrateCacheToDataDir() {
|
||||
const oldCacheDir = path.join(process.cwd(), 'cache');
|
||||
const newCacheDir = path.join(globalThis.DATA_ROOT, '_cache');
|
||||
|
||||
if (!fs.existsSync(newCacheDir)) {
|
||||
fs.mkdirSync(newCacheDir, { recursive: true });
|
||||
}
|
||||
|
||||
if (fs.existsSync(oldCacheDir) && fs.statSync(oldCacheDir).isDirectory()) {
|
||||
const files = fs.readdirSync(oldCacheDir);
|
||||
|
||||
if (files.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Migrating model cache files to data directory. Please wait...');
|
||||
|
||||
for (const file of files) {
|
||||
try {
|
||||
const oldPath = path.join(oldCacheDir, file);
|
||||
const newPath = path.join(newCacheDir, file);
|
||||
fs.cpSync(oldPath, newPath, { recursive: true, force: true });
|
||||
fs.rmSync(oldPath, { recursive: true, force: true });
|
||||
} catch (error) {
|
||||
console.warn('Failed to migrate cache file. The model will be re-downloaded.', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the transformers.js pipeline for a given task.
|
||||
* @param {import('sillytavern-transformers').PipelineType} task The task to get the pipeline for
|
||||
* @param {string} forceModel The model to use for the pipeline, if any
|
||||
* @returns {Promise<import('sillytavern-transformers').Pipeline>} The transformers.js pipeline
|
||||
*/
|
||||
export async function getPipeline(task, forceModel = '') {
|
||||
await migrateCacheToDataDir();
|
||||
|
||||
if (tasks[task].pipeline) {
|
||||
if (forceModel === '' || tasks[task].currentModel === forceModel) {
|
||||
return tasks[task].pipeline;
|
||||
}
|
||||
console.log('Disposing transformers.js pipeline for for task', task, 'with model', tasks[task].currentModel);
|
||||
await tasks[task].pipeline.dispose();
|
||||
}
|
||||
|
||||
const cacheDir = path.join(globalThis.DATA_ROOT, '_cache');
|
||||
const model = forceModel || getModelForTask(task);
|
||||
const localOnly = !getConfigValue('extensions.models.autoDownload', true, 'boolean');
|
||||
console.log('Initializing transformers.js pipeline for task', task, 'with model', model);
|
||||
const instance = await pipeline(task, model, { cache_dir: cacheDir, quantized: tasks[task].quantized ?? true, local_files_only: localOnly });
|
||||
tasks[task].pipeline = instance;
|
||||
tasks[task].currentModel = model;
|
||||
// @ts-ignore
|
||||
return instance;
|
||||
}
|
||||
|
||||
export default {
|
||||
getRawImage,
|
||||
getPipeline,
|
||||
};
|
||||
91
web-app/src/types/byaf.d.ts
vendored
Normal file
91
web-app/src/types/byaf.d.ts
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
type ByafLoreItem = {
|
||||
key: string;
|
||||
value: string;
|
||||
};
|
||||
|
||||
type ByafCharacterImage = {
|
||||
path: string;
|
||||
label: string;
|
||||
};
|
||||
|
||||
type ByafExampleMessage = {
|
||||
characterID: string;
|
||||
text: string;
|
||||
};
|
||||
|
||||
type ByafCharacter = {
|
||||
schemaVersion: 1;
|
||||
id: string;
|
||||
name: string;
|
||||
displayName: string;
|
||||
isNSFW: boolean;
|
||||
persona: string;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
loreItems: Array<ByafLoreItem>;
|
||||
images: Array<ByafCharacterImage>;
|
||||
};
|
||||
|
||||
type ByafManifest = {
|
||||
schemaVersion: 1;
|
||||
createdAt: string;
|
||||
characters: string[];
|
||||
scenarios: string[];
|
||||
author?: {
|
||||
name: string;
|
||||
backyardURL: string;
|
||||
};
|
||||
};
|
||||
|
||||
type ByafAiMessage = {
|
||||
type: "ai";
|
||||
outputs: Array<{
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
text: string;
|
||||
activeTimestamp: string;
|
||||
}>;
|
||||
};
|
||||
|
||||
type ByafHumanMessage = {
|
||||
type: "human";
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
text: string;
|
||||
};
|
||||
|
||||
type ByafScenario = {
|
||||
schemaVersion: 1;
|
||||
title?: string;
|
||||
model?: string;
|
||||
formattingInstructions: string;
|
||||
minP: number;
|
||||
minPEnabled: boolean;
|
||||
temperature: number;
|
||||
repeatPenalty: number;
|
||||
repeatLastN: number;
|
||||
topK: number;
|
||||
topP: number;
|
||||
exampleMessages: Array<ByafExampleMessage>;
|
||||
canDeleteExampleMessages: boolean;
|
||||
firstMessages: Array<ByafExampleMessage>;
|
||||
narrative: string;
|
||||
promptTemplate: "general" | "ChatML" | "Llama3" | "Gemma2" | "CommandR" | "MistralInstruct" | null;
|
||||
grammar: string | null;
|
||||
messages: Array<ByafAiMessage | ByafHumanMessage>;
|
||||
backgroundImage?: string;
|
||||
};
|
||||
|
||||
type ByafChatBackground = {
|
||||
name: string;
|
||||
data: Buffer;
|
||||
paths: string[];
|
||||
};
|
||||
|
||||
type ByafParseResult = {
|
||||
card: TavernCardV2,
|
||||
images: { filename: string, image: Buffer, label: string }[],
|
||||
scenarios: Partial<ByafScenario>[],
|
||||
chatBackgrounds: Array<ByafChatBackground>,
|
||||
character: ByafCharacter
|
||||
};
|
||||
52
web-app/src/types/spec-v2.d.ts
vendored
Normal file
52
web-app/src/types/spec-v2.d.ts
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
type TavernCardV2 = {
|
||||
spec: 'chara_card_v2';
|
||||
spec_version: '2.0';
|
||||
data: {
|
||||
name: string;
|
||||
description: string;
|
||||
personality: string;
|
||||
scenario: string;
|
||||
first_mes: string;
|
||||
mes_example: string;
|
||||
|
||||
creator_notes: string;
|
||||
system_prompt: string;
|
||||
post_history_instructions: string;
|
||||
alternate_greetings: Array<string>;
|
||||
character_book?: CharacterBook;
|
||||
|
||||
tags: Array<string>;
|
||||
creator: string;
|
||||
character_version: string;
|
||||
extensions: Record<string, any>;
|
||||
}
|
||||
}
|
||||
|
||||
type CharacterBook = {
|
||||
name?: string;
|
||||
description?: string;
|
||||
scan_depth?: number;
|
||||
token_budget?: number;
|
||||
recursive_scanning?: boolean;
|
||||
extensions: Record<string, any>;
|
||||
entries: Array<CharacterBookEntry>;
|
||||
}
|
||||
|
||||
type CharacterBookEntry = {
|
||||
keys: Array<string>;
|
||||
content: string;
|
||||
extensions: Record<string, any>;
|
||||
enabled: boolean;
|
||||
insertion_order: number;
|
||||
case_sensitive?: boolean;
|
||||
|
||||
name?: string;
|
||||
priority?: number;
|
||||
|
||||
id?: number;
|
||||
comment?: string;
|
||||
selective?: boolean;
|
||||
secondary_keys?: Array<string>;
|
||||
constant?: boolean;
|
||||
position?: 'before_char' | 'after_char';
|
||||
};
|
||||
1082
web-app/src/users.js
Normal file
1082
web-app/src/users.js
Normal file
File diff suppressed because it is too large
Load Diff
1559
web-app/src/util.js
Normal file
1559
web-app/src/util.js
Normal file
File diff suppressed because it is too large
Load Diff
169
web-app/src/validator/TavernCardValidator.js
Normal file
169
web-app/src/validator/TavernCardValidator.js
Normal file
@@ -0,0 +1,169 @@
|
||||
/**
|
||||
* Validates the data structure of character cards.
|
||||
* Supported specs: V1, V2
|
||||
* Up to: 8083fb3
|
||||
*
|
||||
* @link https://github.com/malfoyslastname/character-card-spec-v2
|
||||
*/
|
||||
export class TavernCardValidator {
|
||||
/**
|
||||
* @type {string|null}
|
||||
*/
|
||||
#lastValidationError = null;
|
||||
|
||||
constructor(card) {
|
||||
this.card = card;
|
||||
}
|
||||
|
||||
/**
|
||||
* Field that caused the validation to fail
|
||||
*
|
||||
* @returns {null|string}
|
||||
*/
|
||||
get lastValidationError() {
|
||||
return this.#lastValidationError;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate against V1 or V2 spec.
|
||||
*
|
||||
* @returns {number|boolean} - false when neither V1 nor V2 spec were matched. Specification version number otherwise.
|
||||
*/
|
||||
validate() {
|
||||
this.#lastValidationError = null;
|
||||
|
||||
if (this.validateV1()) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (this.validateV2()) {
|
||||
return 2;
|
||||
}
|
||||
|
||||
if (this.validateV3()) {
|
||||
return 3;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate against V1 specification
|
||||
*
|
||||
* @returns {this is string[]}
|
||||
*/
|
||||
validateV1() {
|
||||
const requiredFields = ['name', 'description', 'personality', 'scenario', 'first_mes', 'mes_example'];
|
||||
return requiredFields.every(field => {
|
||||
if (!Object.hasOwn(this.card, field)) {
|
||||
this.#lastValidationError = field;
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate against V2 specification
|
||||
*
|
||||
* @returns {false|boolean|*}
|
||||
*/
|
||||
validateV2() {
|
||||
return this.#validateSpecV2()
|
||||
&& this.#validateSpecVersionV2()
|
||||
&& this.#validateDataV2()
|
||||
&& this.#validateCharacterBookV2();
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate against V3 specification
|
||||
* @returns {boolean}
|
||||
*/
|
||||
validateV3() {
|
||||
return this.#validateSpecV3()
|
||||
&& this.#validateSpecVersionV3()
|
||||
&& this.#validateDataV3();
|
||||
}
|
||||
|
||||
#validateSpecV2() {
|
||||
if (this.card.spec !== 'chara_card_v2') {
|
||||
this.#lastValidationError = 'spec';
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
#validateSpecVersionV2() {
|
||||
if (this.card.spec_version !== '2.0') {
|
||||
this.#lastValidationError = 'spec_version';
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
#validateDataV2() {
|
||||
const data = this.card.data;
|
||||
|
||||
if (!data) {
|
||||
this.#lastValidationError = 'No tavern card data found';
|
||||
return false;
|
||||
}
|
||||
|
||||
const requiredFields = ['name', 'description', 'personality', 'scenario', 'first_mes', 'mes_example', 'creator_notes', 'system_prompt', 'post_history_instructions', 'alternate_greetings', 'tags', 'creator', 'character_version', 'extensions'];
|
||||
const isAllRequiredFieldsPresent = requiredFields.every(field => {
|
||||
if (!Object.hasOwn(data, field)) {
|
||||
this.#lastValidationError = `data.${field}`;
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
return isAllRequiredFieldsPresent && Array.isArray(data.alternate_greetings) && Array.isArray(data.tags) && typeof data.extensions === 'object';
|
||||
}
|
||||
|
||||
#validateCharacterBookV2() {
|
||||
const characterBook = this.card.data.character_book;
|
||||
|
||||
if (!characterBook) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const requiredFields = ['extensions', 'entries'];
|
||||
const isAllRequiredFieldsPresent = requiredFields.every(field => {
|
||||
if (!Object.hasOwn(characterBook, field)) {
|
||||
this.#lastValidationError = `data.character_book.${field}`;
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
return isAllRequiredFieldsPresent && Array.isArray(characterBook.entries) && typeof characterBook.extensions === 'object';
|
||||
}
|
||||
|
||||
#validateSpecV3() {
|
||||
if (this.card.spec !== 'chara_card_v3') {
|
||||
this.#lastValidationError = 'spec';
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
#validateSpecVersionV3() {
|
||||
if (Number(this.card.spec_version) < 3.0 || Number(this.card.spec_version) >= 4.0) {
|
||||
this.#lastValidationError = 'spec_version';
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
#validateDataV3() {
|
||||
const data = this.card.data;
|
||||
|
||||
if (!data || typeof data !== 'object') {
|
||||
this.#lastValidationError = 'No tavern card data found';
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
63
web-app/src/vectors/cohere-vectors.js
Normal file
63
web-app/src/vectors/cohere-vectors.js
Normal file
@@ -0,0 +1,63 @@
|
||||
import fetch from 'node-fetch';
|
||||
import { SECRET_KEYS, readSecret } from '../endpoints/secrets.js';
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text batch from an OpenAI compatible endpoint.
|
||||
* @param {string[]} texts - The array of texts to get the vector for
|
||||
* @param {boolean} isQuery - If the text is a query for embedding search
|
||||
* @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
|
||||
* @param {string} model - The model to use for the embedding
|
||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||
*/
|
||||
export async function getCohereBatchVector(texts, isQuery, directories, model) {
|
||||
const key = readSecret(directories, SECRET_KEYS.COHERE);
|
||||
|
||||
if (!key) {
|
||||
console.warn('No API key found');
|
||||
throw new Error('No API key found');
|
||||
}
|
||||
|
||||
const response = await fetch('https://api.cohere.ai/v2/embed', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${key}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
texts: texts,
|
||||
model: model,
|
||||
embedding_types: ['float'],
|
||||
input_type: isQuery ? 'search_query' : 'search_document',
|
||||
truncate: 'END',
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
console.warn('API request failed', response.statusText, text);
|
||||
throw new Error('API request failed');
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await response.json();
|
||||
if (!Array.isArray(data?.embeddings?.float)) {
|
||||
console.warn('API response was not an array');
|
||||
throw new Error('API response was not an array');
|
||||
}
|
||||
|
||||
return data.embeddings.float;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text from an OpenAI compatible endpoint.
|
||||
* @param {string} text - The text to get the vector for
|
||||
* @param {boolean} isQuery - If the text is a query for embedding search
|
||||
* @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
|
||||
* @param {string} model - The model to use for the embedding
|
||||
* @returns {Promise<number[]>} - The vector for the text
|
||||
*/
|
||||
export async function getCohereVector(text, isQuery, directories, model) {
|
||||
const vectors = await getCohereBatchVector([text], isQuery, directories, model);
|
||||
return vectors[0];
|
||||
}
|
||||
|
||||
27
web-app/src/vectors/embedding.js
Normal file
27
web-app/src/vectors/embedding.js
Normal file
@@ -0,0 +1,27 @@
|
||||
import { getPipeline } from '../transformers.js';
|
||||
const TASK = 'feature-extraction';
|
||||
|
||||
/**
|
||||
* Gets the vectorized text in form of an array of numbers.
|
||||
* @param {string} text - The text to vectorize
|
||||
* @returns {Promise<number[]>} - The vectorized text in form of an array of numbers
|
||||
*/
|
||||
export async function getTransformersVector(text) {
|
||||
const pipe = await getPipeline(TASK);
|
||||
const result = await pipe(text, { pooling: 'mean', normalize: true });
|
||||
const vector = Array.from(result.data);
|
||||
return vector;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the vectorized texts in form of an array of arrays of numbers.
|
||||
* @param {string[]} texts - The texts to vectorize
|
||||
* @returns {Promise<number[][]>} - The vectorized texts in form of an array of arrays of numbers
|
||||
*/
|
||||
export async function getTransformersBatchVector(texts) {
|
||||
const result = [];
|
||||
for (const text of texts) {
|
||||
result.push(await getTransformersVector(text));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
74
web-app/src/vectors/extras-vectors.js
Normal file
74
web-app/src/vectors/extras-vectors.js
Normal file
@@ -0,0 +1,74 @@
|
||||
import fetch from 'node-fetch';
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text from SillyTavern-extras
|
||||
* @param {string[]} texts - The array of texts to get the vectors for
|
||||
* @param {string} apiUrl - The Extras API URL
|
||||
* @param {string} apiKey - The Extras API key, or empty string if API key not enabled
|
||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||
*/
|
||||
export async function getExtrasBatchVector(texts, apiUrl, apiKey) {
|
||||
return getExtrasVectorImpl(texts, apiUrl, apiKey);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text from SillyTavern-extras
|
||||
* @param {string} text - The text to get the vector for
|
||||
* @param {string} apiUrl - The Extras API URL
|
||||
* @param {string} apiKey - The Extras API key, or empty string if API key not enabled
|
||||
* @returns {Promise<number[]>} - The vector for the text
|
||||
*/
|
||||
export async function getExtrasVector(text, apiUrl, apiKey) {
|
||||
return getExtrasVectorImpl(text, apiUrl, apiKey);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text from SillyTavern-extras
|
||||
* @param {string|string[]} text - The text or texts to get the vector(s) for
|
||||
* @param {string} apiUrl - The Extras API URL
|
||||
* @param {string} apiKey - The Extras API key, or empty string if API key not enabled *
|
||||
* @returns {Promise<Array>} - The vector for a single text if input is string, or the array of vectors for multiple texts if input is string[]
|
||||
*/
|
||||
async function getExtrasVectorImpl(text, apiUrl, apiKey) {
|
||||
let url;
|
||||
try {
|
||||
url = new URL(apiUrl);
|
||||
url.pathname = '/api/embeddings/compute';
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Failed to set up Extras API call:', error);
|
||||
console.debug('Extras API URL given was:', apiUrl);
|
||||
throw error;
|
||||
}
|
||||
|
||||
const headers = {
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
|
||||
// Include the Extras API key, if enabled
|
||||
if (apiKey && apiKey.length > 0) {
|
||||
Object.assign(headers, {
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
});
|
||||
}
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: headers,
|
||||
body: JSON.stringify({
|
||||
text: text, // The backend accepts {string|string[]} for one or multiple text items, respectively.
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
console.warn('Extras request failed', response.statusText, text);
|
||||
throw new Error('Extras request failed');
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await response.json();
|
||||
const vector = data.embedding; // `embedding`: number[] (one text item), or number[][] (multiple text items).
|
||||
|
||||
return vector;
|
||||
}
|
||||
101
web-app/src/vectors/google-vectors.js
Normal file
101
web-app/src/vectors/google-vectors.js
Normal file
@@ -0,0 +1,101 @@
|
||||
import fetch from 'node-fetch';
|
||||
import { getGoogleApiConfig } from '../endpoints/google.js';
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text from Google AI Studio
|
||||
* @param {string[]} texts - The array of texts to get the vector for
|
||||
* @param {string} model - The model to use for embedding
|
||||
* @param {import('express').Request} request - The request object to get API key and URL
|
||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||
*/
|
||||
export async function getMakerSuiteBatchVector(texts, model, request) {
|
||||
const { url, headers, apiName } = await getGoogleApiConfig(request, model, 'batchEmbedContents');
|
||||
|
||||
const body = {
|
||||
requests: texts.map(text => ({
|
||||
model: `models/${model}`,
|
||||
content: { parts: [{ text }] },
|
||||
})),
|
||||
};
|
||||
|
||||
const response = await fetch(url, {
|
||||
body: JSON.stringify(body),
|
||||
method: 'POST',
|
||||
headers: headers,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
console.warn(`${apiName} batch request failed`, response.statusText, text);
|
||||
throw new Error(`${apiName} batch request failed`);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await response.json();
|
||||
if (!Array.isArray(data?.embeddings)) {
|
||||
throw new Error(`${apiName} did not return an array`);
|
||||
}
|
||||
|
||||
const embeddings = data.embeddings.map(embedding => embedding.values);
|
||||
return embeddings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text from Google Vertex AI
|
||||
* @param {string[]} texts - The array of texts to get the vector for
|
||||
* @param {string} model - The model to use for embedding
|
||||
* @param {import('express').Request} request - The request object to get API key and URL
|
||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||
*/
|
||||
export async function getVertexBatchVector(texts, model, request) {
|
||||
const { url, headers, apiName } = await getGoogleApiConfig(request, model, 'predict');
|
||||
|
||||
const body = {
|
||||
instances: texts.map(text => ({ content: text })),
|
||||
};
|
||||
|
||||
const response = await fetch(url, {
|
||||
body: JSON.stringify(body),
|
||||
method: 'POST',
|
||||
headers: headers,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
console.warn(`${apiName} batch request failed`, response.statusText, text);
|
||||
throw new Error(`${apiName} batch request failed`);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await response.json();
|
||||
if (!Array.isArray(data?.predictions)) {
|
||||
throw new Error(`${apiName} did not return an array`);
|
||||
}
|
||||
|
||||
const embeddings = data.predictions.map(p => p.embeddings.values);
|
||||
return embeddings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text from Google AI Studio
|
||||
* @param {string} text - The text to get the vector for
|
||||
* @param {string} model - The model to use for embedding
|
||||
* @param {import('express').Request} request - The request object to get API key and URL
|
||||
* @returns {Promise<number[]>} - The vector for the text
|
||||
*/
|
||||
export async function getMakerSuiteVector(text, model, request) {
|
||||
const [embedding] = await getMakerSuiteBatchVector([text], model, request);
|
||||
return embedding;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text from Google Vertex AI
|
||||
* @param {string} text - The text to get the vector for
|
||||
* @param {string} model - The model to use for embedding
|
||||
* @param {import('express').Request} request - The request object to get API key and URL
|
||||
* @returns {Promise<number[]>} - The vector for the text
|
||||
*/
|
||||
export async function getVertexVector(text, model, request) {
|
||||
const [embedding] = await getVertexBatchVector([text], model, request);
|
||||
return embedding;
|
||||
}
|
||||
57
web-app/src/vectors/llamacpp-vectors.js
Normal file
57
web-app/src/vectors/llamacpp-vectors.js
Normal file
@@ -0,0 +1,57 @@
|
||||
import fetch from 'node-fetch';
|
||||
import { setAdditionalHeadersByType } from '../additional-headers.js';
|
||||
import { TEXTGEN_TYPES } from '../constants.js';
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text from LlamaCpp
|
||||
* @param {string[]} texts - The array of texts to get the vectors for
|
||||
* @param {string} apiUrl - The API URL
|
||||
* @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
|
||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||
*/
|
||||
export async function getLlamaCppBatchVector(texts, apiUrl, directories) {
|
||||
const url = new URL(apiUrl);
|
||||
url.pathname = '/v1/embeddings';
|
||||
|
||||
const headers = {};
|
||||
setAdditionalHeadersByType(headers, TEXTGEN_TYPES.LLAMACPP, apiUrl, directories);
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...headers,
|
||||
},
|
||||
body: JSON.stringify({ input: texts }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const responseText = await response.text();
|
||||
throw new Error(`LlamaCpp: Failed to get vector for text: ${response.statusText} ${responseText}`);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await response.json();
|
||||
|
||||
if (!Array.isArray(data?.data)) {
|
||||
throw new Error('API response was not an array');
|
||||
}
|
||||
|
||||
// Sort data by x.index to ensure the order is correct
|
||||
data.data.sort((a, b) => a.index - b.index);
|
||||
|
||||
const vectors = data.data.map(x => x.embedding);
|
||||
return vectors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text from LlamaCpp
|
||||
* @param {string} text - The text to get the vector for
|
||||
* @param {string} apiUrl - The API URL
|
||||
* @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
|
||||
* @returns {Promise<number[]>} - The vector for the text
|
||||
*/
|
||||
export async function getLlamaCppVector(text, apiUrl, directories) {
|
||||
const vectors = await getLlamaCppBatchVector([text], apiUrl, directories);
|
||||
return vectors[0];
|
||||
}
|
||||
74
web-app/src/vectors/nomicai-vectors.js
Normal file
74
web-app/src/vectors/nomicai-vectors.js
Normal file
@@ -0,0 +1,74 @@
|
||||
import fetch from 'node-fetch';
|
||||
import { SECRET_KEYS, readSecret } from '../endpoints/secrets.js';
|
||||
|
||||
const SOURCES = {
|
||||
'nomicai': {
|
||||
secretKey: SECRET_KEYS.NOMICAI,
|
||||
url: 'api-atlas.nomic.ai/v1/embedding/text',
|
||||
model: 'nomic-embed-text-v1.5',
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text batch from an OpenAI compatible endpoint.
|
||||
* @param {string[]} texts - The array of texts to get the vector for
|
||||
* @param {string} source - The source of the vector
|
||||
* @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
|
||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||
*/
|
||||
export async function getNomicAIBatchVector(texts, source, directories) {
|
||||
const config = SOURCES[source];
|
||||
|
||||
if (!config) {
|
||||
console.error('Unknown source', source);
|
||||
throw new Error('Unknown source');
|
||||
}
|
||||
|
||||
const key = readSecret(directories, config.secretKey);
|
||||
|
||||
if (!key) {
|
||||
console.warn('No API key found');
|
||||
throw new Error('No API key found');
|
||||
}
|
||||
|
||||
const url = config.url;
|
||||
let response;
|
||||
response = await fetch(`https://${url}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${key}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
texts: texts,
|
||||
model: config.model,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
console.warn('API request failed', response.statusText, text);
|
||||
throw new Error('API request failed');
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await response.json();
|
||||
if (!Array.isArray(data?.embeddings)) {
|
||||
console.warn('API response was not an array');
|
||||
throw new Error('API response was not an array');
|
||||
}
|
||||
|
||||
return data.embeddings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text from an OpenAI compatible endpoint.
|
||||
* @param {string} text - The text to get the vector for
|
||||
* @param {string} source - The source of the vector
|
||||
* @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
|
||||
* @returns {Promise<number[]>} - The vector for the text
|
||||
*/
|
||||
export async function getNomicAIVector(text, source, directories) {
|
||||
const vectors = await getNomicAIBatchVector([text], source, directories);
|
||||
return vectors[0];
|
||||
}
|
||||
66
web-app/src/vectors/ollama-vectors.js
Normal file
66
web-app/src/vectors/ollama-vectors.js
Normal file
@@ -0,0 +1,66 @@
|
||||
import fetch from 'node-fetch';
|
||||
import { setAdditionalHeadersByType } from '../additional-headers.js';
|
||||
import { TEXTGEN_TYPES } from '../constants.js';
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text from Ollama
|
||||
* @param {string[]} texts - The array of texts to get the vectors for
|
||||
* @param {string} apiUrl - The API URL
|
||||
* @param {string} model - The model to use
|
||||
* @param {boolean} keep - Keep the model loaded in memory
|
||||
* @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
|
||||
* @returns {Promise<number[][]>} - The array of vectors for the texts
|
||||
*/
|
||||
export async function getOllamaBatchVector(texts, apiUrl, model, keep, directories) {
|
||||
const result = [];
|
||||
for (const text of texts) {
|
||||
const vector = await getOllamaVector(text, apiUrl, model, keep, directories);
|
||||
result.push(vector);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the vector for the given text from Ollama
|
||||
* @param {string} text - The text to get the vector for
|
||||
* @param {string} apiUrl - The API URL
|
||||
* @param {string} model - The model to use
|
||||
* @param {boolean} keep - Keep the model loaded in memory
|
||||
* @param {import('../users.js').UserDirectoryList} directories - The directories object for the user
|
||||
* @returns {Promise<number[]>} - The vector for the text
|
||||
*/
|
||||
export async function getOllamaVector(text, apiUrl, model, keep, directories) {
|
||||
const url = new URL(apiUrl);
|
||||
url.pathname = '/api/embeddings';
|
||||
|
||||
const headers = {};
|
||||
setAdditionalHeadersByType(headers, TEXTGEN_TYPES.OLLAMA, apiUrl, directories);
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...headers,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
prompt: text,
|
||||
model: model,
|
||||
keep_alive: keep ? -1 : undefined,
|
||||
truncate: true,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const responseText = await response.text();
|
||||
throw new Error(`Ollama: Failed to get vector for text: ${response.statusText} ${responseText}`);
|
||||
}
|
||||
|
||||
/** @type {any} */
|
||||
const data = await response.json();
|
||||
|
||||
if (!Array.isArray(data?.embedding)) {
|
||||
throw new Error('API response was not an array');
|
||||
}
|
||||
|
||||
return data.embedding;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user