import Handlebars from 'handlebars';
import SVGInject from '@iconfu/svg-inject';
import DOMPurify from 'dompurify';
import $ from 'jquery';
import hljs from 'highlight.js';
import openaipromptData from '../scripts/openaiprompt.json'
export let streamingProcessor = null;
import {
  collapseNewlines,
  // loadPowerUserSettings,
  // playMessageSound,
  fixMarkdown,
  power_user,
  persona_description_positions,
  // loadMovingUIState,
  // getCustomStoppingStrings,
  // MAX_CONTEXT_DEFAULT,
  // MAX_RESPONSE_DEFAULT,
  renderStoryString,
  // sortEntitiesList,
  // registerDebugFunction,
  // ui_mode,
  // switchSimpleMode,
  // flushEphemeralStoppingStrings,
  // context_presets,
  // resetMovableStyles,
  forceCharacterEditorTokenize,
} from '../scripts/power-user.js';
import {
  // world_info,
  getWorldInfoPrompt,
  // getWorldInfoSettings,
  // setWorldInfoSettings,
  // world_names,
  // importEmbeddedWorldInfo,
  // checkEmbeddedWorld,
  // setWorldInfoButtonClass,
  // importWorldInfo,
  wi_anchor_position,
} from '../scripts/world-info.js';

import showdown from 'showdown'
import { userStatsHandler, statMesProcess, initStats } from '../scripts/stats.js';
import { markdownExclusionExt } from '../scripts/showdown-exclusion.js';
import { markdownUnderscoreExt } from '../scripts/showdown-underscore.js';
import { 
  getCfgPrompt, 
  getGuidanceScale, 
  // initCfg 
} from '../scripts/cfg-scale.js';

import { 
  // NOTE_MODULE_NAME, 
  // initAuthorsNote, 
  // metadata_keys, 
  setFloatingPrompt, 
  // shouldWIAddPrompt 
} from '../scripts/authors-note.js';

import { 
  // ModuleWorkerWrapper, 
  // doDailyExtensionUpdatesCheck, 
  extension_settings, 
  // getContext, 
  // loadExtensionSettings, 
  // renderExtensionTemplate, 
  // renderExtensionTemplateAsync, 
  runGenerationInterceptors, 
  // saveMetadataDebounced,
  // writeExtensionField 
} from '../scripts/extensions.js';
import { 
  // getFriendlyTokenizerName, 
  getTokenCount, 
  getTokenCountAsync, 
  getTokenizerModel, 
  // initTokenizers, 
  // saveTokenCache 
} from '../scripts/tokenizers.js';

import { 
  COMMENT_NAME_DEFAULT,
  // executeSlashCommands, 
  // executeSlashCommandsOnChatInput, 
  getSlashCommandsHelp, 
  // isExecutingCommandsFromChatInput, 
  // pauseScriptExecution, 
  // processChatSlashCommands, 
  // registerSlashCommand, 
  // stopScriptExecution 
} from '../scripts/slash-commands.js';

import { 
  // humanizedDateTime, 
  // favsToHotswap, 
  getMessageTimeStamp, 
  // dragElement, 
  // isMobile, 
  // initRossMods, 
  // shouldSendOnEnter 
} from '../scripts/RossAscends-mods.js';

import tempData from './chartacter.json'
import openAiSettingsData from '../scripts/1.json'
console.log(`-----openAiSettingsData`,typeof openAiSettingsData)
console.log(`-----openAiSettingsData`, openAiSettingsData)
import {
  setOpenAIMessageExamples,
  setOpenAIMessages,
  setupChatCompletionPromptManager,
  prepareOpenAIMessages,
  sendOpenAIRequest,
  // loadOpenAISettings,
  oai_settings,
  openai_messages_count,
  chat_completion_sources,
  getChatCompletionModel,
  // isOpenRouterWithInstruct,
  // proxies,
  // loadProxyPresets,
  // selected_proxy,
} from '../scripts/openai.js';
const openAiSettings = openAiSettingsData
setupChatCompletionPromptManager(openAiSettings)
import { evaluateMacros } from '../scripts/macros.js';
import {
  user_avatar,
  // getUserAvatars,
  getUserAvatar,
  // setUserAvatar,
  // initPersonas,
  // setPersonaDescription,
  // initUserAvatar,
} from '../scripts/personas.js';
import {
  appendFileContent,
  hasPendingFileAttachment,
  populateFileAttachment, 
   decodeStyleTags, 
   encodeStyleTags, 
  //  isExternalMediaAllowed, 
  //  getCurrentEntityId 
} from '../scripts/chats.js';

/**
 * @enum {string} System message types
 */
export const system_message_types = {
  HELP: 'help',
  WELCOME: 'welcome',
  GROUP: 'group',
  EMPTY: 'empty',
  GENERIC: 'generic',
  BOOKMARK_CREATED: 'bookmark_created',
  BOOKMARK_BACK: 'bookmark_back',
  NARRATOR: 'narrator',
  COMMENT: 'comment',
  SLASH_COMMANDS: 'slash_commands',
  FORMATTING: 'formatting',
  HOTKEYS: 'hotkeys',
  MACROS: 'macros',
};
/**
 * @enum {number} Extension prompt types
 */
export const extension_prompt_types = {
  IN_PROMPT: 0,
  IN_CHAT: 1,
  BEFORE_PROMPT: 2,
};

import { getRegexedString, regex_placement } from '../scripts/extensions/regex/engine.js';
import {
  kai_settings,
  kai_flags,
} from '../scripts/kai-settings.js';

import {
  groups,
  selected_group,
  // saveGroupChat,
  // getGroups,
  generateGroupWrapper,
  // deleteGroup,
  is_group_generating,
  // resetSelectedGroup,
  // select_group_chats,
  // regenerateGroup,
  // group_generation_id,
  // getGroupChat,
  // renameGroupMember,
  // createNewGroupChat,
  // getGroupPastChats,
  // getGroupAvatar,
  // openGroupChat,
  // editGroup,
  // deleteGroupChat,
  // renameGroupChat,
  // importGroupChat,
  // getGroupBlock,
  // getGroupCharacterCards,
  getGroupDepthPrompts,
} from '../scripts/group-chats.js';
import {
  debounce,
  // delay,
  // trimToEndSentence,
  // countOccurrences,
  // isOdd,
  // sortMoments,
  timestampToMoment,
  // download,
  // isDataURL,
  // getCharaFilename,
  // PAGINATION_TEMPLATE,
  // waitUntilCondition,
  // escapeRegex,
  // resetScrollHeight,
  // onlyUnique,
  // getBase64Async,
  // humanFileSize,
  // Stopwatch,
  // isValidUrl,
  // ensureImageFormatSupported,
  // flashHighlight,
  // checkOverwriteExistingData,
} from '../scripts/utils.js'
import { debounce_timeout } from '../scripts/constants.js';

import {
  // force_output_sequence,
  // formatInstructModeChat,
  // formatInstructModePrompt,
  formatInstructModeExamples,
  // getInstructStoppingSequences,
  // autoSelectInstructPreset,
  formatInstructModeSystemPrompt,
  // selectInstructPreset,
  // instruct_presets,
  // selectContextPreset,
} from '../scripts/instruct-mode.js';
import {
  // generateNovelWithStreaming,
  // getNovelGenerationData,
  getKayraMaxContextTokens,
  // getNovelTier,
  // loadNovelPreset,
  // loadNovelSettings,
  nai_settings,
  adjustNovelInstructionPrompt,
  // loadNovelSubscriptionData,
  // parseNovelAILogprobs,
} from '../scripts/nai-settings.js';

// Markdown converter
export let mesForShowdownParse; //intended to be used as a context to compare showdown strings against
let converter;
reloadMarkdownProcessor();

// array for prompt token calculations
console.debug('initializing Prompt Itemization Array on Startup');
// const promptStorage = new localforage.createInstance({ name: 'SillyTavern_Prompts' });
export let itemizedPrompts = [];

export const systemUserName = 'SillyTavern System';
let default_user_name = 'User';
export let name1 = default_user_name;
export let name2 = 'SillyTavern System';
export let amount_gen = 80; //default max length of AI generated responses
export let max_context = 2048;
export let chat = []
chat = tempData.chatTemp
export let main_api = 'openai'
export let characters = [];
characters = tempData.charactersTemp;
console.log("🚀 ~ characters:", characters)
export let this_chid;
this_chid = 11
export let online_status = 'Connected'; // no_connection, Connected, offline 
export let is_send_press = true; //Send generation 
export const event_types = {
  APP_READY: 'app_ready',
  EXTRAS_CONNECTED: 'extras_connected',
  MESSAGE_SWIPED: 'message_swiped',
  MESSAGE_SENT: 'message_sent',
  MESSAGE_RECEIVED: 'message_received',
  MESSAGE_EDITED: 'message_edited',
  MESSAGE_DELETED: 'message_deleted',
  MESSAGE_UPDATED: 'message_updated',
  IMPERSONATE_READY: 'impersonate_ready',
  CHAT_CHANGED: 'chat_id_changed',
  GENERATION_STARTED: 'generation_started',
  GENERATION_STOPPED: 'generation_stopped',
  GENERATION_ENDED: 'generation_ended',
  EXTENSIONS_FIRST_LOAD: 'extensions_first_load',
  SETTINGS_LOADED: 'settings_loaded',
  SETTINGS_UPDATED: 'settings_updated',
  GROUP_UPDATED: 'group_updated',
  MOVABLE_PANELS_RESET: 'movable_panels_reset',
  SETTINGS_LOADED_BEFORE: 'settings_loaded_before',
  SETTINGS_LOADED_AFTER: 'settings_loaded_after',
  CHATCOMPLETION_SOURCE_CHANGED: 'chatcompletion_source_changed',
  CHATCOMPLETION_MODEL_CHANGED: 'chatcompletion_model_changed',
  OAI_PRESET_CHANGED_BEFORE: 'oai_preset_changed_before',
  OAI_PRESET_CHANGED_AFTER: 'oai_preset_changed_after',
  WORLDINFO_SETTINGS_UPDATED: 'worldinfo_settings_updated',
  WORLDINFO_UPDATED: 'worldinfo_updated',
  CHARACTER_EDITED: 'character_edited',
  CHARACTER_PAGE_LOADED: 'character_page_loaded',
  CHARACTER_GROUP_OVERLAY_STATE_CHANGE_BEFORE: 'character_group_overlay_state_change_before',
  CHARACTER_GROUP_OVERLAY_STATE_CHANGE_AFTER: 'character_group_overlay_state_change_after',
  USER_MESSAGE_RENDERED: 'user_message_rendered',
  CHARACTER_MESSAGE_RENDERED: 'character_message_rendered',
  FORCE_SET_BACKGROUND: 'force_set_background',
  CHAT_DELETED: 'chat_deleted',
  GROUP_CHAT_DELETED: 'group_chat_deleted',
  GENERATE_BEFORE_COMBINE_PROMPTS: 'generate_before_combine_prompts',
  GENERATE_AFTER_COMBINE_PROMPTS: 'generate_after_combine_prompts',
  GROUP_MEMBER_DRAFTED: 'group_member_drafted',
  WORLD_INFO_ACTIVATED: 'world_info_activated',
  TEXT_COMPLETION_SETTINGS_READY: 'text_completion_settings_ready',
  CHAT_COMPLETION_SETTINGS_READY: 'chat_completion_settings_ready',
  CHAT_COMPLETION_PROMPT_READY: 'chat_completion_prompt_ready',
  CHARACTER_FIRST_MESSAGE_SELECTED: 'character_first_message_selected',
  // TODO: Naming convention is inconsistent with other events
  CHARACTER_DELETED: 'characterDeleted',
  CHARACTER_DUPLICATED: 'character_duplicated',
  SMOOTH_STREAM_TOKEN_RECEIVED: 'smooth_stream_token_received',
  FILE_ATTACHMENT_DELETED: 'file_attachment_deleted',
  WORLDINFO_FORCE_ACTIVATE: 'worldinfo_force_activate',
  OPEN_CHARACTER_LIBRARY: 'open_character_library',
  LLM_FUNCTION_TOOL_REGISTER: 'llm_function_tool_register',
  LLM_FUNCTION_TOOL_CALL: 'llm_function_tool_call',
};
export const eventSource = $globalEventBus;
let generatedPromptCache = '';
var swipes = true;
let extension_prompts = {};
let generation_started = new Date();
let abortController = ''
/** @type {number} The debounce timeout used for chat/settings save. debounce_timeout.long: 1.000 ms */
const durationSaveEdit = debounce_timeout.relaxed;
export const saveSettingsDebounced = debounce(() => saveSettings(), durationSaveEdit);
window['SillyTavern'] = {}
window['SillyTavern'].getContext = function () {
  return {
      chat: chat,
      characters: characters,
      groups: groups,
      name1: name1,
      name2: name2,
      characterId: this_chid,
      groupId: selected_group,
      chatId: selected_group
          ? groups.find(x => x.id == selected_group)?.chat_id
          : (this_chid && characters[this_chid] && characters[this_chid].chat),
      // getCurrentChatId: getCurrentChatId,
      // getRequestHeaders: getRequestHeaders,
      // reloadCurrentChat: reloadCurrentChat,
      // renameChat: renameChat,
      // saveSettingsDebounced: saveSettingsDebounced,
      // onlineStatus: online_status,
      // maxContext: Number(max_context),
      // chatMetadata: chat_metadata,
      // streamingProcessor,
      // eventSource: eventSource,
      // eventTypes: event_types,
      // addOneMessage: addOneMessage,
      // generate: Generate,
      getTokenCount: getTokenCount,
      extensionPrompts: extension_prompts,
      setExtensionPrompt: setExtensionPrompt,
      // updateChatMetadata: updateChatMetadata,
      // saveChat: saveChatConditional,
      // openCharacterChat: openCharacterChat,
      // openGroupChat: openGroupChat,
      // saveMetadata: saveMetadata,
      // sendSystemMessage: sendSystemMessage,
      // activateSendButtons,
      // deactivateSendButtons,
      // saveReply,
      // registerSlashCommand: registerSlashCommand,
      // executeSlashCommands: executeSlashCommands,
      // timestampToMoment: timestampToMoment,
      // /**
      //  * @deprecated Handlebars for extensions are no longer supported.
      //  */
      // registerHelper: () => { },
      // registedDebugFunction: registerDebugFunction,
      // /**
      //  * @deprecated Use renderExtensionTemplateAsync instead.
      //  */
      // renderExtensionTemplate: renderExtensionTemplate,
      // renderExtensionTemplateAsync: renderExtensionTemplateAsync,
      // registerDataBankScraper: ScraperManager.registerDataBankScraper,
      // callPopup: callPopup,
      // callGenericPopup: callGenericPopup,
      // mainApi: main_api,
      // extensionSettings: extension_settings,
      // ModuleWorkerWrapper: ModuleWorkerWrapper,
      getTokenizerModel: getTokenizerModel,
      // generateQuietPrompt: generateQuietPrompt,
      // writeExtensionField: writeExtensionField,
      // getThumbnailUrl: getThumbnailUrl,
      // selectCharacterById: selectCharacterById,
      // messageFormatting: messageFormatting,
      // shouldSendOnEnter: shouldSendOnEnter,
      // isMobile: isMobile,
      // tags: tags,
      // tagMap: tag_map,
      // menuType: menu_type,
      // createCharacterData: create_save,
      // /**
      //  * @deprecated Legacy snake-case naming, compatibility with old extensions
      //  */
      // event_types: event_types,
  };
};
/**
 * @enum {number} Extension prompt roles
 */
export const extension_prompt_roles = {
  SYSTEM: 0,
  USER: 1,
  ASSISTANT: 2,
};

export const MAX_INJECTION_DEPTH = 1000;

export let system_messages = {};

export let token;

/** The tag of the active character. (NOT the id) */
export let active_character = '';
/** The tag of the active group. (Coincidentally also the id) */
export let active_group = '';

// export const entitiesFilter = new FilterHelper(printCharactersDebounced);
export function getRequestHeaders() {
  return {
    'Content-Type': 'application/json',
    'X-CSRF-Token': token,
  };
}

// Saved here for performance reasons
const messageTemplate = $('#message_template .mes');
const chatElement = $('#chat');

export let chat_metadata = {};

async function getSystemMessages() {
  system_messages = {
    help: {
      name: systemUserName,
      force_avatar: system_avatar,
      is_user: false,
      is_system: true,
      mes: await renderTemplateAsync('help'),
    },
    slash_commands: {
      name: systemUserName,
      force_avatar: system_avatar,
      is_user: false,
      is_system: true,
      mes: '',
    },
    hotkeys: {
      name: systemUserName,
      force_avatar: system_avatar,
      is_user: false,
      is_system: true,
      mes: await renderTemplateAsync('hotkeys'),
    },
    formatting: {
      name: systemUserName,
      force_avatar: system_avatar,
      is_user: false,
      is_system: true,
      mes: await renderTemplateAsync('formatting'),
    },
    macros: {
      name: systemUserName,
      force_avatar: system_avatar,
      is_user: false,
      is_system: true,
      mes: await renderTemplateAsync('macros'),
    },
    welcome:
    {
      name: systemUserName,
      force_avatar: system_avatar,
      is_user: false,
      is_system: true,
      mes: await renderTemplateAsync('welcome', { displayVersion }),
    },
    group: {
      name: systemUserName,
      force_avatar: system_avatar,
      is_user: false,
      is_system: true,
      is_group: true,
      mes: 'Group chat created. Say \'Hi\' to lovely people!',
    },
    empty: {
      name: systemUserName,
      force_avatar: system_avatar,
      is_user: false,
      is_system: true,
      mes: 'No one hears you. <b>Hint&#58;</b> add more members to the group!',
    },
    generic: {
      name: systemUserName,
      force_avatar: system_avatar,
      is_user: false,
      is_system: true,
      mes: 'Generic system message. User `text` parameter to override the contents',
    },
    bookmark_created: {
      name: systemUserName,
      force_avatar: system_avatar,
      is_user: false,
      is_system: true,
      mes: 'Checkpoint created! Click here to open the checkpoint chat: <a class="bookmark_link" file_name="{0}" href="javascript:void(null);">{1}</a>',
    },
    bookmark_back: {
      name: systemUserName,
      force_avatar: system_avatar,
      is_user: false,
      is_system: true,
      mes: 'Click here to return to the previous chat: <a class="bookmark_link" file_name="{0}" href="javascript:void(null);">Return</a>',
    },
  };
}

export function reloadMarkdownProcessor(render_formulas = false) {
  if (render_formulas) {
      converter = new showdown.Converter({
          emoji: true,
          underline: true,
          tables: true,
          parseImgDimensions: true,
          simpleLineBreaks: true,
          strikethrough: true,
          disableForced4SpacesIndentedSublists: true,
          extensions: [
              showdownKatex(
                  {
                      delimiters: [
                          { left: '$$', right: '$$', display: true, asciimath: false },
                          { left: '$', right: '$', display: false, asciimath: true },
                      ],
                  },
              )],
      });
  }
  else {
      converter = new showdown.Converter({
          emoji: true,
          literalMidWordUnderscores: true,
          parseImgDimensions: true,
          tables: true,
          underline: true,
          simpleLineBreaks: true,
          strikethrough: true,
          disableForced4SpacesIndentedSublists: true,
          extensions: [markdownUnderscoreExt()],
      });
  }

  // Inject the dinkus extension after creating the converter
  // Maybe move this into power_user init?
  setTimeout(() => {
      if (power_user) {
          converter.addExtension(markdownExclusionExt(), 'exclusion');
      }
  }, 1);

  return converter;
}
function addPersonaDescriptionExtensionPrompt() {
  const INJECT_TAG = 'PERSONA_DESCRIPTION';
  setExtensionPrompt(INJECT_TAG, '', extension_prompt_types.IN_PROMPT, 0);

  if (!power_user.persona_description) {
      return;
  }

  const promptPositions = [persona_description_positions.BOTTOM_AN, persona_description_positions.TOP_AN];

  if (promptPositions.includes(power_user.persona_description_position) && shouldWIAddPrompt) {
      const originalAN = extension_prompts[NOTE_MODULE_NAME].value;
      const ANWithDesc = power_user.persona_description_position === persona_description_positions.TOP_AN
          ? `${power_user.persona_description}\n${originalAN}`
          : `${originalAN}\n${power_user.persona_description}`;

      setExtensionPrompt(NOTE_MODULE_NAME, ANWithDesc, chat_metadata[metadata_keys.position], chat_metadata[metadata_keys.depth], extension_settings.note.allowWIScan, chat_metadata[metadata_keys.role]);
  }

  if (power_user.persona_description_position === persona_description_positions.AT_DEPTH) {
      setExtensionPrompt(INJECT_TAG, power_user.persona_description, extension_prompt_types.IN_CHAT, power_user.persona_description_depth, true, power_user.persona_description_role);
  }
}
export function getCurrentChatId() {
  if (selected_group) {
      return groups.find(x => x.id == selected_group)?.chat_id;
  }
  else if (this_chid !== undefined) {
      return characters[this_chid]?.chat;
  }
}

export async function Generate(type, text ,{ automatic_trigger, force_name2, quiet_prompt, quietToLoud, skipWIAN, force_chid, signal, quietImage, maxLoops, quietName } = {}, dryRun = false) {
  debugger;
  console.log("🚀 ~ Generate ~ type:", type)
  console.log('Generate entered');
  console.log(`-------power_user`, power_user)
  $globalEventBus.emit(event_types.GENERATION_STARTED, type, { automatic_trigger, force_name2, quiet_prompt, quietToLoud, skipWIAN, force_chid, signal, quietImage, maxLoops }, dryRun);
  // setGenerationProgress(0);
  generation_started = new Date();

  // Don't recreate abort controller if signal is passed
  if (!(abortController && signal)) {
    abortController = new AbortController();
  }

  // OpenAI doesn't need instruct mode. Use OAI main prompt instead.
  const isInstruct = power_user.instruct.enabled && main_api !== 'openai';
  const isImpersonate = type == 'impersonate';

  let message_already_generated = isImpersonate ? `${name1}: ` : `${name2}: `;

  if (!(dryRun || type == 'regenerate' || type == 'swipe' || type == 'quiet')) {
    // TODO : 这里是 / 输入状态 
    // const interruptedByCommand = await processCommands(String($('#send_textarea').val()));

    // if (interruptedByCommand) {
    //     //$("#send_textarea").val('')[0].dispatchEvent(new Event('input', { bubbles:true }));
    //     unblockGeneration(type);
    //     return Promise.resolve();
    // }
  }

  if (main_api == 'kobold' && kai_settings.streaming_kobold && !kai_flags.can_use_streaming) {
    toastr.error('Streaming is enabled, but the version of Kobold used does not support token streaming.', undefined, { timeOut: 10000, preventDuplicates: true });
    unblockGeneration(type);
    return Promise.resolve();
  }

  if (main_api === 'textgenerationwebui' &&
    textgen_settings.streaming &&
    textgen_settings.legacy_api &&
    (textgen_settings.type === OOBA || textgen_settings.type === APHRODITE)) {
    toastr.error('Streaming is not supported for the Legacy API. Update Ooba and use new API to enable streaming.', undefined, { timeOut: 10000, preventDuplicates: true });
    unblockGeneration(type);
    return Promise.resolve();
  }

  if (isHordeGenerationNotAllowed()) {
    unblockGeneration(type);
    return Promise.resolve();
  }

  if (!dryRun) {
    // Ping server to make sure it is still alive
    const pingResult = await pingServer();

    if (!pingResult) {
      unblockGeneration(type);
      toastr.error('Verify that the server is running and accessible.', 'ST Server cannot be reached');
      throw new Error('Server unreachable');
    }

    // Hide swipes if not in a dry run.
    hideSwipeButtons();
    // If generated any message, set the flag to indicate it can't be recreated again.
    chat_metadata['tainted'] = true;
  }

  if (selected_group && !is_group_generating) {
    if (!dryRun) {
      // Returns the promise that generateGroupWrapper returns; resolves when generation is done
      return generateGroupWrapper(false, type, { quiet_prompt, force_chid, signal: abortController.signal, quietImage, maxLoops });
    }

    const characterIndexMap = new Map(characters.map((char, index) => [char.avatar, index]));
    const group = groups.find((x) => x.id === selected_group);

    const enabledMembers = group.members.reduce((acc, member) => {
      if (!group.disabled_members.includes(member) && !acc.includes(member)) {
        acc.push(member);
      }
      return acc;
    }, []);

    const memberIds = enabledMembers
      .map((member) => characterIndexMap.get(member))
      .filter((index) => index !== undefined && index !== null);

    if (memberIds.length > 0) {
      setCharacterId(memberIds[0]);
      setCharacterName('');
    } else {
      console.log('No enabled members found');
      unblockGeneration(type);
      return Promise.resolve();
    }
  }

  //#########QUIET PROMPT STUFF##############
  //this function just gives special care to novel quiet instruction prompts
  if (quiet_prompt) {
    quiet_prompt = substituteParams(quiet_prompt);
    quiet_prompt = main_api == 'novel' && !quietToLoud ? adjustNovelInstructionPrompt(quiet_prompt) : quiet_prompt;
  }

  const isChatValid = online_status !== 'no_connection' && this_chid !== undefined;

  // We can't do anything because we're not in a chat right now. (Unless it's a dry run, in which case we need to
  // assemble the prompt so we can count its tokens regardless of whether a chat is active.)
  if (!dryRun && !isChatValid) {
    if (this_chid === undefined) {
      Toast.show({
        icon: 'fail',
        content: 'haracter is not selected',
        duration: 5000,
      })
      // toastr.warning('Сharacter is not selected');
    }
    is_send_press = false;
    return Promise.resolve();
  }

  let textareaText;
  if (type !== 'regenerate' && type !== 'swipe' && type !== 'quiet' && !isImpersonate && !dryRun) {
    is_send_press = true;
    // TODO : 这里改造一下，获取用户输入的文本
    // textareaText = String($('#send_textarea').val());
    // $('#send_textarea').val('')[0].dispatchEvent(new Event('input', { bubbles: true }));
    textareaText = '你好';
  } else {
    textareaText = '';
    if (chat.length && chat[chat.length - 1]['is_user']) {
      //do nothing? why does this check exist?
    }
    else if (type !== 'quiet' && type !== 'swipe' && !isImpersonate && !dryRun && chat.length) {
      chat.length = chat.length - 1;
      $('#chat').children().last().hide(250, function () {
        $(this).remove();
      });
      await eventSource.emit(event_types.MESSAGE_DELETED, chat.length);
    }
  }
  const isContinue = type == 'continue';

  // Rewrite the generation timer to account for the time passed for all the continuations.
  if (isContinue && chat.length) {
    const prevFinished = chat[chat.length - 1]['gen_finished'];
    const prevStarted = chat[chat.length - 1]['gen_started'];

    if (prevFinished && prevStarted) {
      const timePassed = prevFinished - prevStarted;
      generation_started = new Date(Date.now() - timePassed);
      chat[chat.length - 1]['gen_started'] = generation_started;
    }
  }

  if (!dryRun) {
    // 这里要对按钮做操作
    // deactivateSendButtons();
  }

  let { messageBias, promptBias, isUserPromptBias } = getBiasStrings(textareaText, type);

  //*********************************
  //PRE FORMATING STRING    预格式化字符串
  //*********************************
  // TODO: 对于用户发送的正常消息。。
  //for normal messages sent from user..
  if ((textareaText != '' || hasPendingFileAttachment()) && !automatic_trigger && type !== 'quiet' && !dryRun) {
    // If user message contains no text other than bias - send as a system message
    if (messageBias && !removeMacros(textareaText)) {
      sendSystemMessage(system_message_types.GENERIC, ' ', { bias: messageBias });
    }
    else {
      await sendMessageAsUser(textareaText, messageBias);
    }
  }
  else if (textareaText == '' && !automatic_trigger && !dryRun && type === undefined && main_api == 'openai' && oai_settings.send_if_empty.trim().length > 0) {
    // Use send_if_empty if set and the user message is empty. Only when sending messages normally
    await sendMessageAsUser(oai_settings.send_if_empty.trim(), messageBias);
  }

  let {
    description,
    personality,
    persona,
    scenario,
    mesExamples,
    system,
    jailbreak,
  } = getCharacterCardFields();

  if (isInstruct) {
    system = power_user.prefer_character_prompt && system ? system : baseChatReplace(power_user.instruct.system_prompt, name1, name2);
    system = formatInstructModeSystemPrompt(substituteParams(system, name1, name2, power_user.instruct.system_prompt));
  }

  // Depth prompt (character-specific A/N)
  removeDepthPrompts();
  const groupDepthPrompts = getGroupDepthPrompts(selected_group, Number(this_chid));

  if (selected_group && Array.isArray(groupDepthPrompts) && groupDepthPrompts.length > 0) {
    groupDepthPrompts.forEach((value, index) => {
      const role = getExtensionPromptRoleByName(value.role);
      setExtensionPrompt('DEPTH_PROMPT_' + index, value.text, extension_prompt_types.IN_CHAT, value.depth, extension_settings.note.allowWIScan, role);
    });
  } else {
    const depthPromptText = baseChatReplace(characters[this_chid].data?.extensions?.depth_prompt?.prompt?.trim(), name1, name2) || '';
    const depthPromptDepth = characters[this_chid].data?.extensions?.depth_prompt?.depth ?? depth_prompt_depth_default;
    const depthPromptRole = getExtensionPromptRoleByName(characters[this_chid].data?.extensions?.depth_prompt?.role ?? depth_prompt_role_default);
    setExtensionPrompt('DEPTH_PROMPT', depthPromptText, extension_prompt_types.IN_CHAT, depthPromptDepth, extension_settings.note.allowWIScan, depthPromptRole);
  }

  // First message in fresh 1-on-1 chat reacts to user/character settings changes
  if (chat.length) {
    chat[0].mes = substituteParams(chat[0].mes);
  }

  // Collect messages with usable content
  let coreChat = chat.filter(x => !x.is_system);
  if (type === 'swipe') {
    coreChat.pop();
  }

  coreChat = await Promise.all(coreChat.map(async (chatItem, index) => {
    let message = chatItem.mes;
    let regexType = chatItem.is_user ? regex_placement.USER_INPUT : regex_placement.AI_OUTPUT;
    let options = { isPrompt: true, depth: (coreChat.length - index - 1) };

    let regexedMessage = getRegexedString(message, regexType, options);
    regexedMessage = await appendFileContent(chatItem, regexedMessage);

    return {
      ...chatItem,
      mes: regexedMessage,
      index,
    };
  }));

  // Determine token limit
  let this_max_context = getMaxContextSize();

  if (!dryRun && type !== 'quiet') {
    console.debug('Running extension interceptors');
    const aborted = await runGenerationInterceptors(coreChat, this_max_context);

    if (aborted) {
      console.debug('Generation aborted by extension interceptors');
      unblockGeneration(type);
      return Promise.resolve();
    }
  } else {
    console.debug('Skipping extension interceptors for dry run');
  }
  console.log('00001')
  //   // Adjust token limit for Horde
    let adjustedParams;
    // if (main_api == 'koboldhorde' && (horde_settings.auto_adjust_context_length || horde_settings.auto_adjust_response_length)) {
    //     try {
    //         adjustedParams = await adjustHordeGenerationParams(max_context, amount_gen);
    //     }
    //     catch {
    //         unblockGeneration(type);
    //         return Promise.resolve();
    //     }
    //     if (horde_settings.auto_adjust_context_length) {
    //         this_max_context = (adjustedParams.maxContextLength - adjustedParams.maxLength);
    //     }
    // }

    console.log(`Core/all messages: ${coreChat.length}/${chat.length}`);

    // kingbri MARK: - Make sure the prompt bias isn't the same as the user bias
    if ((promptBias && !isUserPromptBias) || power_user.always_force_name2 || main_api == 'novel') {
        force_name2 = true;
    }

    if (isImpersonate) {
        force_name2 = false;
    }

    // TODO (kingbri): Migrate to a utility function
    /**
     * Parses an examples string.
     * @param {string} examplesStr
     * @returns {string[]} Examples array with block heading
     */
    function parseMesExamples(examplesStr) {
        if (examplesStr.length === 0) {
            return [];
        }

        if (!examplesStr.startsWith('<START>')) {
            examplesStr = '<START>\n' + examplesStr.trim();
        }

        const exampleSeparator = power_user.context.example_separator ? `${substituteParams(power_user.context.example_separator)}\n` : '';
        const blockHeading = main_api === 'openai' ? '<START>\n' : (exampleSeparator || (isInstruct ? '<START>\n' : ''));
        const splitExamples = examplesStr.split(/<START>/gi).slice(1).map(block => `${blockHeading}${block.trim()}\n`);

        return splitExamples;
    }

    let mesExamplesArray = parseMesExamples(mesExamples);

    //////////////////////////////////
    // Extension added strings
    // Set non-WI AN
    setFloatingPrompt();
    // Add persona description to prompt
    addPersonaDescriptionExtensionPrompt();

    // Add WI to prompt (and also inject WI to AN value via hijack)
    // Make quiet prompt available for WIAN
    setExtensionPrompt('QUIET_PROMPT', quiet_prompt || '', extension_prompt_types.IN_PROMPT, 0, true);
    const chatForWI = coreChat.map(x => `${x.name}: ${x.mes}`).reverse();
    const { worldInfoString, worldInfoBefore, worldInfoAfter, worldInfoExamples, worldInfoDepth } = await getWorldInfoPrompt(chatForWI, this_max_context, dryRun);
    setExtensionPrompt('QUIET_PROMPT', '', extension_prompt_types.IN_PROMPT, 0, true);

    // Add message example WI
    for (const example of worldInfoExamples) {
        const exampleMessage = example.content;

        if (exampleMessage.length === 0) {
            continue;
        }

        const formattedExample = baseChatReplace(exampleMessage, name1, name2);
        const cleanedExample = parseMesExamples(formattedExample);

        // Insert depending on before or after position
        if (example.position === wi_anchor_position.before) {
            mesExamplesArray.unshift(...cleanedExample);
        } else {
            mesExamplesArray.push(...cleanedExample);
        }
    }

    // At this point, the raw message examples can be created
    const mesExamplesRawArray = [...mesExamplesArray];

    if (mesExamplesArray && isInstruct) {
        mesExamplesArray = formatInstructModeExamples(mesExamplesArray, name1, name2);
    }

    if (skipWIAN !== true) {
        console.log('skipWIAN not active, adding WIAN');
        // Add all depth WI entries to prompt
        flushWIDepthInjections();
        if (Array.isArray(worldInfoDepth)) {
            worldInfoDepth.forEach((e) => {
                const joinedEntries = e.entries.join('\n');
                setExtensionPrompt(`customDepthWI-${e.depth}-${e.role}`, joinedEntries, extension_prompt_types.IN_CHAT, e.depth, false, e.role);
            });
        }
    } else {
        console.log('skipping WIAN');
    }
    // Inject all Depth prompts. Chat Completion does it separately
    let injectedIndices = [];
    if (main_api !== 'openai') {
        injectedIndices = doChatInject(coreChat, isContinue);
    }

    // Insert character jailbreak as the last user message (if exists, allowed, preferred, and not using Chat Completion)
    if (power_user.context.allow_jailbreak && power_user.prefer_character_jailbreak && main_api !== 'openai' && jailbreak) {
        // Set "original" explicity to empty string since there's no original
        jailbreak = substituteParams(jailbreak, name1, name2, '');

        // When continuing generation of previous output, last user message precedes the message to continue
        if (isContinue) {
            coreChat.splice(coreChat.length - 1, 0, { mes: jailbreak, is_user: true });
        }
        else {
            coreChat.push({ mes: jailbreak, is_user: true });
        }
    }

    let chat2 = [];
    let continue_mag = '';
    const userMessageIndices = [];
    console.log("🚀 ~ Generate ~ coreChat:", coreChat)
    for (let i = coreChat.length - 1, j = 0; i >= 0; i--, j++) {
        if (main_api == 'openai') {
            chat2[i] = coreChat[j].mes;
            if (i === 0 && isContinue) {
                chat2[i] = chat2[i].slice(0, chat2[i].lastIndexOf(coreChat[j].mes) + coreChat[j].mes.length);
                continue_mag = coreChat[j].mes;
            }
            continue;
        }

        chat2[i] = formatMessageHistoryItem(coreChat[j], isInstruct, false);

        if (j === 0 && isInstruct) {
            // Reformat with the first output sequence (if any)
            chat2[i] = formatMessageHistoryItem(coreChat[j], isInstruct, force_output_sequence.FIRST);
        }

        // Do not suffix the message for continuation
        if (i === 0 && isContinue) {
            if (isInstruct) {
                // Reformat with the last output sequence (if any)
                chat2[i] = formatMessageHistoryItem(coreChat[j], isInstruct, force_output_sequence.LAST);
            }

            chat2[i] = chat2[i].slice(0, chat2[i].lastIndexOf(coreChat[j].mes) + coreChat[j].mes.length);
            continue_mag = coreChat[j].mes;
        }

        if (coreChat[j].is_user) {
            userMessageIndices.push(i);
        }
    }

    let addUserAlignment = isInstruct && power_user.instruct.user_alignment_message;
    let userAlignmentMessage = '';

    if (addUserAlignment) {
        const alignmentMessage = {
            name: name1,
            mes: power_user.instruct.user_alignment_message,
            is_user: true,
        };
        userAlignmentMessage = formatMessageHistoryItem(alignmentMessage, isInstruct, false);
    }

  //   // Call combined AN into Generate
    const beforeScenarioAnchor = getExtensionPrompt(extension_prompt_types.BEFORE_PROMPT).trimStart();
    const afterScenarioAnchor = getExtensionPrompt(extension_prompt_types.IN_PROMPT);

    const storyStringParams = {
        description: description,
        personality: personality,
        persona: persona,
        scenario: scenario,
        system: isInstruct ? system : '',
        char: name2,
        user: name1,
        wiBefore: worldInfoBefore,
        wiAfter: worldInfoAfter,
        loreBefore: worldInfoBefore,
        loreAfter: worldInfoAfter,
        mesExamples: mesExamplesArray.join(''),
        mesExamplesRaw: mesExamplesRawArray.join(''),
    };
    console.log("🚀 ~ Generate ~ storyStringParams:", storyStringParams)
    const storyString = renderStoryString(storyStringParams);
    // Story string rendered, safe to remove
    if (power_user.strip_examples) {
        mesExamplesArray = [];
    }

    let oaiMessages = [];
    let oaiMessageExamples = [];

    if (main_api === 'openai') {
        message_already_generated = '';
        oaiMessages = setOpenAIMessages(coreChat);
        oaiMessageExamples = setOpenAIMessageExamples(mesExamplesArray);
    }

    // hack for regeneration of the first message
    if (chat2.length == 0) {
        chat2.push('');
    }

    let examplesString = '';
    let chatString = '';
    let cyclePrompt = '';

    async function getMessagesTokenCount() {
        const encodeString = [
            beforeScenarioAnchor,
            storyString,
            afterScenarioAnchor,
            examplesString,
            chatString,
            quiet_prompt,
            cyclePrompt,
            userAlignmentMessage,
        ].join('').replace(/\r/gm, '');
        return getTokenCountAsync(encodeString, power_user.token_padding);
    }

    // Force pinned examples into the context
    let pinExmString;
    if (power_user.pin_examples) {
        pinExmString = examplesString = mesExamplesArray.join('');
    }

    // Only add the chat in context if past the greeting message
    if (isContinue && (chat2.length > 1 || main_api === 'openai')) {
        cyclePrompt = chat2.shift();
    }

    // Collect enough messages to fill the context
    let arrMes = new Array(chat2.length);
    let tokenCount = await getMessagesTokenCount();
    let lastAddedIndex = -1;

    console.log(`--------4444`)
    // Pre-allocate all injections first.
    // If it doesn't fit - user shot himself in the foot
    for (const index of injectedIndices) {
        const item = chat2[index];

        if (typeof item !== 'string') {
            continue;
        }

        tokenCount += await getTokenCountAsync(item.replace(/\r/gm, ''));
        chatString = item + chatString;
        if (tokenCount < this_max_context) {
            arrMes[index] = item;
            lastAddedIndex = Math.max(lastAddedIndex, index);
        } else {
            break;
        }
    }
    for (let i = 0; i < chat2.length; i++) {
        // not needed for OAI prompting
        if (main_api == 'openai') {
            break;
        }

        // Skip already injected messages
        if (arrMes[i] !== undefined) {
            continue;
        }

        const item = chat2[i];

        if (typeof item !== 'string') {
            continue;
        }

        tokenCount += await getTokenCountAsync(item.replace(/\r/gm, ''));
        chatString = item + chatString;
        if (tokenCount < this_max_context) {
            arrMes[i] = item;
            lastAddedIndex = Math.max(lastAddedIndex, i);
        } else {
            break;
        }
    }

    // Add user alignment message if last message is not a user message
    const stoppedAtUser = userMessageIndices.includes(lastAddedIndex);
    if (addUserAlignment && !stoppedAtUser) {
        tokenCount += await getTokenCountAsync(userAlignmentMessage.replace(/\r/gm, ''));
        chatString = userAlignmentMessage + chatString;
        arrMes.push(userAlignmentMessage);
        injectedIndices.push(arrMes.length - 1);
    }

    // Unsparse the array. Adjust injected indices
    const newArrMes = [];
    const newInjectedIndices = [];
    for (let i = 0; i < arrMes.length; i++) {
        if (arrMes[i] !== undefined) {
            newArrMes.push(arrMes[i]);
            if (injectedIndices.includes(i)) {
                newInjectedIndices.push(newArrMes.length - 1);
            }
        }
    }

    arrMes = newArrMes;
    injectedIndices = newInjectedIndices;

    if (main_api !== 'openai') {
        setInContextMessages(arrMes.length - injectedIndices.length, type);
    }

    // Estimate how many unpinned example messages fit in the context
    tokenCount = await getMessagesTokenCount();
    let count_exm_add = 0;
    if (!power_user.pin_examples) {
        for (let example of mesExamplesArray) {
            tokenCount += await getTokenCountAsync(example.replace(/\r/gm, ''));
            examplesString += example;
            if (tokenCount < this_max_context) {
                count_exm_add++;
            } else {
                break;
            }
        }
    }

    let mesSend = [];
    console.debug('calling runGenerate');

    if (isContinue) {
        // Coping mechanism for OAI spacing
        const isForceInstruct = isOpenRouterWithInstruct();
        if (main_api === 'openai' && !isForceInstruct && !cyclePrompt.endsWith(' ')) {
            cyclePrompt += oai_settings.continue_postfix;
            continue_mag += oai_settings.continue_postfix;
        }
        message_already_generated = continue_mag;
    }

    const originalType = type;

    if (!dryRun) {
        is_send_press = true;
    }

    generatedPromptCache += cyclePrompt;
    if (generatedPromptCache.length == 0 || type === 'continue') {
        console.debug('generating prompt');
        chatString = '';
        arrMes = arrMes.reverse();
        arrMes.forEach(function (item, i, arr) {
            // OAI doesn't need all of this
            if (main_api === 'openai') {
                return;
            }

            // Cohee: This removes a newline from the end of the last message in the context
            // Last prompt line will add a newline if it's not a continuation
            // In instruct mode it only removes it if wrap is enabled and it's not a quiet generation
            if (i === arrMes.length - 1 && type !== 'continue') {
                if (!isInstruct || (power_user.instruct.wrap && type !== 'quiet')) {
                    item = item.replace(/\n?$/, '');
                }
            }

            mesSend[mesSend.length] = { message: item, extensionPrompts: [] };
        });
    }
    let mesExmString = '';

    function setPromptString() {
        if (main_api == 'openai') {
            return;
        }

        console.debug('--setting Prompt string');
        mesExmString = pinExmString ?? mesExamplesArray.slice(0, count_exm_add).join('');

        if (mesSend.length) {
            mesSend[mesSend.length - 1].message = modifyLastPromptLine(mesSend[mesSend.length - 1].message);
        }
    }

    function modifyLastPromptLine(lastMesString) {
        //#########QUIET PROMPT STUFF PT2##############

        // Add quiet generation prompt at depth 0
        if (quiet_prompt && quiet_prompt.length) {

            // here name1 is forced for all quiet prompts..why?
            const name = name1;
            //checks if we are in instruct, if so, formats the chat as such, otherwise just adds the quiet prompt
            const quietAppend = isInstruct ? formatInstructModeChat(name, quiet_prompt, false, true, '', name1, name2, false) : `\n${quiet_prompt}`;

            //This begins to fix quietPrompts (particularly /sysgen) for instruct
            //previously instruct input sequence was being appended to the last chat message w/o '\n'
            //and no output sequence was added after the input's content.
            //TODO: respect output_sequence vs last_output_sequence settings
            //TODO: decide how to prompt this to clarify who is talking 'Narrator', 'System', etc.
            if (isInstruct) {
                lastMesString += quietAppend; // + power_user.instruct.output_sequence + '\n';
            } else {
                lastMesString += quietAppend;
            }


            // Ross: bailing out early prevents quiet prompts from respecting other instruct prompt toggles
            // for sysgen, SD, and summary this is desireable as it prevents the AI from responding as char..
            // but for idle prompting, we want the flexibility of the other prompt toggles, and to respect them as per settings in the extension
            // need a detection for what the quiet prompt is being asked for...

            // Bail out early?
            if (!isInstruct && !quietToLoud) {
                return lastMesString;
            }
        }


        // Get instruct mode line
        if (isInstruct && !isContinue) {
            const name = (quiet_prompt && !quietToLoud) ? (quietName ?? 'System') : (isImpersonate ? name1 : name2);
            const isQuiet = quiet_prompt && type == 'quiet';
            lastMesString += formatInstructModePrompt(name, isImpersonate, promptBias, name1, name2, isQuiet, quietToLoud);
        }

        // Get non-instruct impersonation line
        if (!isInstruct && isImpersonate && !isContinue) {
            const name = name1;
            if (!lastMesString.endsWith('\n')) {
                lastMesString += '\n';
            }
            lastMesString += name + ':';
        }

        // Add character's name
        // Force name append on continue (if not continuing on user message or first message)
        const isContinuingOnFirstMessage = chat.length === 1 && isContinue;
        if (!isInstruct && force_name2 && !isContinuingOnFirstMessage) {
            if (!lastMesString.endsWith('\n')) {
                lastMesString += '\n';
            }
            if (!isContinue || !(chat[chat.length - 1]?.is_user)) {
                lastMesString += `${name2}:`;
            }
        }

        return lastMesString;
    }

    // Clean up the already generated prompt for seamless addition
    function cleanupPromptCache(promptCache) {
        // Remove the first occurrance of character's name
        if (promptCache.trimStart().startsWith(`${name2}:`)) {
            promptCache = promptCache.replace(`${name2}:`, '').trimStart();
        }

        // Remove the first occurrance of prompt bias
        if (promptCache.trimStart().startsWith(promptBias)) {
            promptCache = promptCache.replace(promptBias, '');
        }

        // Add a space if prompt cache doesn't start with one
        if (!/^\s/.test(promptCache) && !isInstruct) {
            promptCache = ' ' + promptCache;
        }

        return promptCache;
    }

    async function checkPromptSize() {
        console.debug('---checking Prompt size');
        setPromptString();
        const prompt = [
            beforeScenarioAnchor,
            storyString,
            afterScenarioAnchor,
            mesExmString,
            mesSend.map((e) => `${e.extensionPrompts.join('')}${e.message}`).join(''),
            '\n',
            generatedPromptCache,
            quiet_prompt,
        ].join('').replace(/\r/gm, '');
        let thisPromptContextSize = await getTokenCountAsync(prompt, power_user.token_padding);

        if (thisPromptContextSize > this_max_context) {        //if the prepared prompt is larger than the max context size...
            if (count_exm_add > 0) {                            // ..and we have example mesages..
                count_exm_add--;                            // remove the example messages...
                await checkPromptSize();                            // and try agin...
            } else if (mesSend.length > 0) {                    // if the chat history is longer than 0
                mesSend.shift();                            // remove the first (oldest) chat entry..
                await checkPromptSize();                            // and check size again..
            } else {
                //end
                console.debug(`---mesSend.length = ${mesSend.length}`);
            }
        }
    }

    if (generatedPromptCache.length > 0 && main_api !== 'openai') {
        console.debug('---Generated Prompt Cache length: ' + generatedPromptCache.length);
        await checkPromptSize();
    } else {
        console.debug('---calling setPromptString ' + generatedPromptCache.length);
        setPromptString();
    }

    // Fetches the combined prompt for both negative and positive prompts
    const cfgGuidanceScale = getGuidanceScale();
    const useCfgPrompt = cfgGuidanceScale && cfgGuidanceScale.value !== 1;

    // For prompt bit itemization
    let mesSendString = '';

    function getCombinedPrompt(isNegative) {
        // Only return if the guidance scale doesn't exist or the value is 1
        // Also don't return if constructing the neutral prompt
        if (isNegative && !useCfgPrompt) {
            return;
        }

        // OAI has its own prompt manager. No need to do anything here
        if (main_api === 'openai') {
            return '';
        }

        // Deep clone
        let finalMesSend = structuredClone(mesSend);

        if (useCfgPrompt) {
            const cfgPrompt = getCfgPrompt(cfgGuidanceScale, isNegative);
            if (cfgPrompt.value) {
                if (cfgPrompt.depth === 0) {
                    finalMesSend[finalMesSend.length - 1].message +=
                        /\s/.test(finalMesSend[finalMesSend.length - 1].message.slice(-1))
                            ? cfgPrompt.value
                            : ` ${cfgPrompt.value}`;
                } else {
                    // TODO: Make all extension prompts use an array/splice method
                    const lengthDiff = mesSend.length - cfgPrompt.depth;
                    const cfgDepth = lengthDiff >= 0 ? lengthDiff : 0;
                    finalMesSend[cfgDepth].extensionPrompts.push(`${cfgPrompt.value}\n`);
                }
            }
        }

        // Add prompt bias after everything else
        // Always run with continue
        if (!isInstruct && !isImpersonate) {
            if (promptBias.trim().length !== 0) {
                finalMesSend[finalMesSend.length - 1].message +=
                    /\s/.test(finalMesSend[finalMesSend.length - 1].message.slice(-1))
                        ? promptBias.trimStart()
                        : ` ${promptBias.trimStart()}`;
            }
        }

        // Prune from prompt cache if it exists
        if (generatedPromptCache.length !== 0) {
            generatedPromptCache = cleanupPromptCache(generatedPromptCache);
        }

        // Flattens the multiple prompt objects to a string.
        const combine = () => {
            // Right now, everything is suffixed with a newline
            mesSendString = finalMesSend.map((e) => `${e.extensionPrompts.join('')}${e.message}`).join('');

            // add a custom dingus (if defined)
            mesSendString = addChatsSeparator(mesSendString);

            // add chat preamble
            mesSendString = addChatsPreamble(mesSendString);

            let combinedPrompt = beforeScenarioAnchor +
                storyString +
                afterScenarioAnchor +
                mesExmString +
                mesSendString +
                generatedPromptCache;

            combinedPrompt = combinedPrompt.replace(/\r/gm, '');

            if (power_user.collapse_newlines) {
                combinedPrompt = collapseNewlines(combinedPrompt);
            }

            return combinedPrompt;
        };

        finalMesSend.forEach((item, i) => {
            item.injected = injectedIndices.includes(finalMesSend.length - i - 1);
        });

        let data = {
            api: main_api,
            combinedPrompt: null,
            description,
            personality,
            persona,
            scenario,
            char: name2,
            user: name1,
            worldInfoBefore,
            worldInfoAfter,
            beforeScenarioAnchor,
            afterScenarioAnchor,
            storyString,
            mesExmString,
            mesSendString,
            finalMesSend,
            generatedPromptCache,
            main: system,
            jailbreak,
            naiPreamble: nai_settings.preamble,
        };

        // Before returning the combined prompt, give available context related information to all subscribers.
        eventSource.emitAndWait(event_types.GENERATE_BEFORE_COMBINE_PROMPTS, data);

        // If one or multiple subscribers return a value, forfeit the responsibillity of flattening the context.
        return !data.combinedPrompt ? combine() : data.combinedPrompt;
    }

    let finalPrompt = getCombinedPrompt(false);

    const eventData = { prompt: finalPrompt, dryRun: dryRun };
    await eventSource.emit(event_types.GENERATE_AFTER_COMBINE_PROMPTS, eventData);
    finalPrompt = eventData.prompt;
    console.log(`--------finalPrompt`, finalPrompt)
    console.log(`--------888888888`)

    let maxLength = Number(amount_gen); // how many tokens the AI will be requested to generate
    let thisPromptBits = [];

    let generate_data;
    switch (main_api) {
        // case 'koboldhorde':
        // case 'kobold':
        //     if (main_api == 'koboldhorde' && horde_settings.auto_adjust_response_length) {
        //         maxLength = Math.min(maxLength, adjustedParams.maxLength);
        //         maxLength = Math.max(maxLength, MIN_LENGTH); // prevent validation errors
        //     }

        //     generate_data = {
        //         prompt: finalPrompt,
        //         gui_settings: true,
        //         max_length: maxLength,
        //         max_context_length: max_context,
        //         api_server,
        //     };

        //     if (preset_settings != 'gui') {
        //         const isHorde = main_api == 'koboldhorde';
        //         const presetSettings = koboldai_settings[koboldai_setting_names[preset_settings]];
        //         const maxContext = (adjustedParams && horde_settings.auto_adjust_context_length) ? adjustedParams.maxContextLength : max_context;
        //         generate_data = getKoboldGenerationData(finalPrompt, presetSettings, maxLength, maxContext, isHorde, type);
        //     }
        //     break;
        // case 'textgenerationwebui': {
        //     const cfgValues = useCfgPrompt ? { guidanceScale: cfgGuidanceScale, negativePrompt: getCombinedPrompt(true) } : null;
        //     generate_data = getTextGenGenerationData(finalPrompt, maxLength, isImpersonate, isContinue, cfgValues, type);
        //     break;
        // }
        // case 'novel': {
        //     const cfgValues = useCfgPrompt ? { guidanceScale: cfgGuidanceScale } : null;
        //     const presetSettings = novelai_settings[novelai_setting_names[nai_settings.preset_settings_novel]];
        //     generate_data = getNovelGenerationData(finalPrompt, presetSettings, maxLength, isImpersonate, isContinue, cfgValues, type);
        //     break;
        // }
        case 'openai': {
            let [prompt, counts] = await prepareOpenAIMessages({
                name2: name2, // 角色名称
                charDescription: description, // 角色描述
                charPersonality: personality, // 角色高级 个性摘要
                Scenario: scenario, // 角色高级 情景
                worldInfoBefore: worldInfoBefore, // 
                worldInfoAfter: worldInfoAfter,// 
                extensionPrompts: extension_prompts,
                bias: promptBias,
                type: type,
                quietPrompt: quiet_prompt,
                quietImage: quietImage,
                cyclePrompt: cyclePrompt,
                systemPromptOverride: system,
                jailbreakPromptOverride: jailbreak,
                personaDescription: persona,
                messages: oaiMessages,
                messageExamples: oaiMessageExamples,
            }, dryRun);

            generate_data = { prompt: prompt };
            
            // generate_data = openaipromptData.openaiprompt
            // TODO: 这里先忽略了prepareOpenAIMessages 方法，后面再处理, 先使用假数据
            
            // TODO: move these side-effects somewhere else, so this switch-case solely sets generate_data
            // counts will return false if the user has not enabled the token breakdown feature
            if (counts) {
                parseTokenCounts(counts, thisPromptBits);
            }

            if (!dryRun) {
                setInContextMessages(openai_messages_count, type);
            }
            break;
        }
    }

    if (dryRun) {
        generatedPromptCache = '';
        return Promise.resolve();
    }

    async function finishGenerating() {
        if (power_user.console_log_prompts) {
            console.log(generate_data.prompt);
        }

        console.debug('rungenerate calling API');

        showStopButton();

        //set array object for prompt token itemization of this message
        let currentArrayEntry = Number(thisPromptBits.length - 1);
        let additionalPromptStuff = {
            ...thisPromptBits[currentArrayEntry],
            rawPrompt: generate_data.prompt || generate_data.input,
            mesId: getNextMessageId(type),
            allAnchors: getAllExtensionPrompts(),
            chatInjects: injectedIndices?.map(index => arrMes[arrMes.length - index - 1])?.join('') || '',
            summarizeString: (extension_prompts['1_memory']?.value || ''),
            authorsNoteString: (extension_prompts['2_floating_prompt']?.value || ''),
            smartContextString: (extension_prompts['chromadb']?.value || ''),
            worldInfoString: worldInfoString,
            storyString: storyString,
            beforeScenarioAnchor: beforeScenarioAnchor,
            afterScenarioAnchor: afterScenarioAnchor,
            examplesString: examplesString,
            mesSendString: mesSendString,
            generatedPromptCache: generatedPromptCache,
            promptBias: promptBias,
            finalPrompt: finalPrompt,
            charDescription: description,
            charPersonality: personality,
            scenarioText: scenario,
            this_max_context: this_max_context,
            padding: power_user.token_padding,
            main_api: main_api,
            instruction: isInstruct ? substituteParams(power_user.prefer_character_prompt && system ? system : power_user.instruct.system_prompt) : '',
            userPersona: (power_user.persona_description || ''),
        };

        //console.log(additionalPromptStuff);
        const itemizedIndex = itemizedPrompts.findIndex((item) => item.mesId === additionalPromptStuff.mesId);

        if (itemizedIndex !== -1) {
            itemizedPrompts[itemizedIndex] = additionalPromptStuff;
        }
        else {
            itemizedPrompts.push(additionalPromptStuff);
        }

        console.debug(`pushed prompt bits to itemizedPrompts array. Length is now: ${itemizedPrompts.length}`);

        if (isStreamingEnabled() && type !== 'quiet') {
            streamingProcessor = new StreamingProcessor(type, force_name2, generation_started, message_already_generated);
            if (isContinue) {
                // Save reply does add cycle text to the prompt, so it's not needed here
                streamingProcessor.firstMessageText = '';
            }

            streamingProcessor.generator = await sendStreamingRequest(type, generate_data);

            hideSwipeButtons();
            let getMessage = await streamingProcessor.generate();
            let messageChunk = cleanUpMessage(getMessage, isImpersonate, isContinue, false);

            if (isContinue) {
                getMessage = continue_mag + getMessage;
            }

            if (streamingProcessor && !streamingProcessor.isStopped && streamingProcessor.isFinished) {
                await streamingProcessor.onFinishStreaming(streamingProcessor.messageId, getMessage);
                streamingProcessor = null;
                triggerAutoContinue(messageChunk, isImpersonate);
                return Object.defineProperties(new String(getMessage), {
                    'messageChunk': { value: messageChunk },
                    'fromStream': { value: true },
                });
            }
        } else {
            return await sendGenerationRequest(type, generate_data);
        }
    }

    return finishGenerating().then(onSuccess, onError);

    async function onSuccess(data) {
        if (!data) return;

        if (data?.fromStream) {
            return data;
        }

        let messageChunk = '';

        if (data.error) {
            unblockGeneration(type);
            generatedPromptCache = '';

            if (data?.response) {
                toastr.error(data.response, 'API Error');
            }
            throw data?.response;
        }

        //const getData = await response.json();
        let getMessage = extractMessageFromData(data);
        let title = extractTitleFromData(data);
        kobold_horde_model = title;

        const swipes = extractMultiSwipes(data, type);

        messageChunk = cleanUpMessage(getMessage, isImpersonate, isContinue, false);

        if (isContinue) {
            getMessage = continue_mag + getMessage;
        }

        //Formating
        const displayIncomplete = type === 'quiet' && !quietToLoud;
        getMessage = cleanUpMessage(getMessage, isImpersonate, isContinue, displayIncomplete);

        if (getMessage.length > 0 || data.allowEmptyResponse) {
            if (isImpersonate) {
                $('#send_textarea').val(getMessage)[0].dispatchEvent(new Event('input', { bubbles: true }));
                generatedPromptCache = '';
                await eventSource.emit(event_types.IMPERSONATE_READY, getMessage);
            }
            else if (type == 'quiet') {
                unblockGeneration(type);
                return getMessage;
            }
            else {
                // Without streaming we'll be having a full message on continuation. Treat it as a last chunk.
                if (originalType !== 'continue') {
                    ({ type, getMessage } = await saveReply(type, getMessage, false, title, swipes));
                }
                else {
                    ({ type, getMessage } = await saveReply('appendFinal', getMessage, false, title, swipes));
                }

                // This relies on `saveReply` having been called to add the message to the chat, so it must be last.
                parseAndSaveLogprobs(data, continue_mag);
            }

            if (type !== 'quiet') {
                playMessageSound();
            }
        } else {
            // If maxLoops is not passed in (e.g. first time generating), set it to MAX_GENERATION_LOOPS
            maxLoops ??= MAX_GENERATION_LOOPS;

            if (maxLoops === 0) {
                if (type !== 'quiet') {
                    throwCircuitBreakerError();
                }
                throw new Error('Generate circuit breaker interruption');
            }

            // regenerate with character speech reenforced
            // to make sure we leave on swipe type while also adding the name2 appendage
            await delay(1000);
            // A message was already deleted on regeneration, so instead treat is as a normal gen
            if (type === 'regenerate') {
                type = 'normal';
            }
            // The first await is for waiting for the generate to start. The second one is waiting for it to finish
            const result = await await Generate(type, { automatic_trigger, force_name2: true, quiet_prompt, quietToLoud, skipWIAN, force_chid, signal, quietImage, quietName, maxLoops: maxLoops - 1 });
            return result;
        }

        if (power_user.auto_swipe) {
            console.debug('checking for autoswipeblacklist on non-streaming message');
            function containsBlacklistedWords(getMessage, blacklist, threshold) {
                console.debug('checking blacklisted words');
                const regex = new RegExp(`\\b(${blacklist.join('|')})\\b`, 'gi');
                const matches = getMessage.match(regex) || [];
                return matches.length >= threshold;
            }

            const generatedTextFiltered = (getMessage) => {
                if (power_user.auto_swipe_blacklist_threshold) {
                    if (containsBlacklistedWords(getMessage, power_user.auto_swipe_blacklist, power_user.auto_swipe_blacklist_threshold)) {
                        console.debug('Generated text has blacklisted words');
                        return true;
                    }
                }

                return false;
            };
            if (generatedTextFiltered(getMessage)) {
                console.debug('swiping right automatically');
                is_send_press = false;
                swipe_right();
                // TODO: do we want to resolve after an auto-swipe?
                return;
            }
        }

        console.debug('/api/chats/save called by /Generate');
        await saveChatConditional();
        unblockGeneration(type);
        streamingProcessor = null;

        if (type !== 'quiet') {
            triggerAutoContinue(messageChunk, isImpersonate);
        }

        // Don't break the API chain that expects a single string in return
        return Object.defineProperty(new String(getMessage), 'messageChunk', { value: messageChunk });
    }

    function onError(exception) {
        if (typeof exception?.error?.message === 'string') {
            toastr.error(exception.error.message, 'Error', { timeOut: 10000, extendedTimeOut: 20000 });
        }

        generatedPromptCache = '';

        unblockGeneration(type);
        console.log(exception);
        streamingProcessor = null;
        throw exception;
    }
}
/**
 * Formats the message text into an HTML string using Markdown and other formatting.
 * @param {string} mes Message text
 * @param {string} ch_name Character name
 * @param {boolean} isSystem If the message was sent by the system
 * @param {boolean} isUser If the message was sent by the user
 * @param {number} messageId Message index in chat array
 * @returns {string} HTML string
 */
export function messageFormatting(mes, ch_name, isSystem, isUser, messageId) {
  if (!mes) {
      return '';
  }

  if (Number(messageId) === 0 && !isSystem && !isUser) {
      mes = substituteParams(mes, undefined, ch_name);
  }

  mesForShowdownParse = mes;

  // Force isSystem = false on comment messages so they get formatted properly
  if (ch_name === COMMENT_NAME_DEFAULT && isSystem && !isUser) {
      isSystem = false;
  }

  // Let hidden messages have markdown
  if (isSystem && ch_name !== systemUserName) {
      isSystem = false;
  }

  // Prompt bias replacement should be applied on the raw message
  if (!power_user.show_user_prompt_bias && ch_name && !isUser && !isSystem) {
      mes = mes.replaceAll(substituteParams(power_user.user_prompt_bias), '');
  }

  if (!isSystem) {
      function getRegexPlacement() {
          try {
              if (isUser) {
                  return regex_placement.USER_INPUT;
              } else if (chat[messageId]?.extra?.type === 'narrator') {
                  return regex_placement.SLASH_COMMAND;
              } else {
                  return regex_placement.AI_OUTPUT;
              }
          } catch {
              return regex_placement.AI_OUTPUT;
          }
      }

      const regexPlacement = getRegexPlacement();
      const usableMessages = chat.map((x, index) => ({ message: x, index: index })).filter(x => !x.message.is_system);
      const indexOf = usableMessages.findIndex(x => x.index === Number(messageId));
      const depth = messageId >= 0 && indexOf !== -1 ? (usableMessages.length - indexOf - 1) : undefined;

      // Always override the character name
      mes = getRegexedString(mes, regexPlacement, {
          characterOverride: ch_name,
          isMarkdown: true,
          depth: depth,
      });
  }

  if (power_user.auto_fix_generated_markdown) {
      mes = fixMarkdown(mes, true);
  }

  if (!isSystem && power_user.encode_tags) {
      mes = mes.replaceAll('<', '&lt;').replaceAll('>', '&gt;');
  }

  if (this_chid === undefined && !selected_group) {
      mes = mes.replace(/\*\*(.+?)\*\*/g, '<b>$1</b>');
  } else if (!isSystem) {
      // Save double quotes in tags as a special character to prevent them from being encoded
      if (!power_user.encode_tags) {
          mes = mes.replace(/<([^>]+)>/g, function (_, contents) {
              return '<' + contents.replace(/"/g, '\ufffe') + '>';
          });
      }

      mes = mes.replace(/```[\s\S]*?```|``[\s\S]*?``|`[\s\S]*?`|(".+?")|(\u201C.+?\u201D)/gm, function (match, p1, p2) {
          if (p1) {
              return '<q>"' + p1.replace(/"/g, '') + '"</q>';
          } else if (p2) {
              return '<q>“' + p2.replace(/\u201C|\u201D/g, '') + '”</q>';
          } else {
              return match;
          }
      });

      // Restore double quotes in tags
      if (!power_user.encode_tags) {
          mes = mes.replace(/\ufffe/g, '"');
      }

      mes = mes.replaceAll('\\begin{align*}', '$$');
      mes = mes.replaceAll('\\end{align*}', '$$');
      mes = converter.makeHtml(mes);

      mes = mes.replace(/<code(.*)>[\s\S]*?<\/code>/g, function (match) {
          // Firefox creates extra newlines from <br>s in code blocks, so we replace them before converting newlines to <br>s.
          return match.replace(/\n/gm, '\u0000');
      });
      mes = mes.replace(/\u0000/g, '\n'); // Restore converted newlines
      mes = mes.trim();

      mes = mes.replace(/<code(.*)>[\s\S]*?<\/code>/g, function (match) {
          return match.replace(/&amp;/g, '&');
      });
  }

  /*
  // Hides bias from empty messages send with slash commands
  if (isSystem) {
      mes = mes.replace(/\{\{[\s\S]*?\}\}/gm, "");
  }
  */

  if (!power_user.allow_name2_display && ch_name && !isUser && !isSystem) {
      mes = mes.replace(new RegExp(`(^|\n)${escapeRegex(ch_name)}:`, 'g'), '$1');
  }

  /** @type {any} */
  const config = { MESSAGE_SANITIZE: true, ADD_TAGS: ['custom-style'] };
  mes = encodeStyleTags(mes);
  mes = DOMPurify.sanitize(mes, config);
  mes = decodeStyleTags(mes);

  return mes;
}


/**
 * Displays a blocking popup with a given text and type.
 * @param {JQuery<HTMLElement>|string|Element} text - Text to display in the popup.
 * @param {string} type
 * @param {string} inputValue - Value to set the input to.
 * @param {PopupOptions} options - Options for the popup.
 * @typedef {{okButton?: string, rows?: number, wide?: boolean, large?: boolean, allowHorizontalScrolling?: boolean, allowVerticalScrolling?: boolean, cropAspect?: number }} PopupOptions - Options for the popup.
 * @returns
 */
export function callPopup(text, type, inputValue = '', { okButton, rows, wide, large, allowHorizontalScrolling, allowVerticalScrolling, cropAspect } = {}) {
  function getOkButtonText() {
      if (['avatarToCrop'].includes(popup_type)) {
          return okButton ?? 'Accept';
      } else if (['text', 'alternate_greeting', 'char_not_selected'].includes(popup_type)) {
          $dialoguePopupCancel.css('display', 'none');
          return okButton ?? 'Ok';
      } else if (['delete_extension'].includes(popup_type)) {
          return okButton ?? 'Ok';
      } else if (['new_chat', 'confirm'].includes(popup_type)) {
          return okButton ?? 'Yes';
      } else if (['input'].includes(popup_type)) {
          return okButton ?? 'Save';
      }
      return okButton ?? 'Delete';
  }

  dialogueCloseStop = true;
  if (type) {
      popup_type = type;
  }

  const $dialoguePopup = $('#dialogue_popup');
  const $dialoguePopupCancel = $('#dialogue_popup_cancel');
  const $dialoguePopupOk = $('#dialogue_popup_ok');
  const $dialoguePopupInput = $('#dialogue_popup_input');
  const $dialoguePopupText = $('#dialogue_popup_text');
  const $shadowPopup = $('#shadow_popup');

  $dialoguePopup.toggleClass('wide_dialogue_popup', !!wide)
      .toggleClass('large_dialogue_popup', !!large)
      .toggleClass('horizontal_scrolling_dialogue_popup', !!allowHorizontalScrolling)
      .toggleClass('vertical_scrolling_dialogue_popup', !!allowVerticalScrolling);

  $dialoguePopupCancel.css('display', 'inline-block');
  $dialoguePopupOk.text(getOkButtonText());
  $dialoguePopupInput.toggle(popup_type === 'input').val(inputValue).attr('rows', rows ?? 1);
  $dialoguePopupText.empty().append(text);
  $shadowPopup.css('display', 'block');

  if (popup_type == 'input') {
      $dialoguePopupInput.trigger('focus');
  }

  if (popup_type == 'avatarToCrop') {
      // unset existing data
      crop_data = undefined;

      $('#avatarToCrop').cropper({
          aspectRatio: cropAspect ?? 2 / 3,
          autoCropArea: 1,
          viewMode: 2,
          rotatable: false,
          crop: function (event) {
              crop_data = event.detail;
              crop_data.want_resize = !power_user.never_resize_avatars;
          },
      });
  }

  $shadowPopup.transition({
      opacity: 1,
      duration: animation_duration,
      easing: animation_easing,
  });

  return new Promise((resolve) => {
      dialogueResolve = resolve;
  });
}

/**
 * Inserts a user message into the chat history.
 * @param {string} messageText Message text.
 * @param {string} messageBias Message bias.
 * @param {number} [insertAt] Optional index to insert the message at.
 * @param {boolean} [compact] Send as a compact display message.
 * @param {string} [name] Name of the user sending the message. Defaults to name1.
 * @param {string} [avatar] Avatar of the user sending the message. Defaults to user_avatar.
 * @returns {Promise<void>} A promise that resolves when the message is inserted.
 */
export async function sendMessageAsUser(messageText, messageBias, insertAt = null, compact = false, name = name1, avatar = user_avatar) {
  messageText = getRegexedString(messageText, regex_placement.USER_INPUT);

  const message = {
      name: name,
      is_user: true,
      is_system: false,
      send_date: getMessageTimeStamp(),
      mes: substituteParams(messageText),
      extra: {
          isSmallSys: compact,
      },
  };

  if (power_user.message_token_count_enabled) {
      message.extra.token_count = await getTokenCountAsync(message.mes, 0);
  }

  // Lock user avatar to a persona.
  if (avatar in power_user.personas) {
      message.force_avatar = getUserAvatar(avatar);
  }

  if (messageBias) {
      message.extra.bias = messageBias;
      message.mes = removeMacros(message.mes);
  }

  await populateFileAttachment(message);
  statMesProcess(message, 'user', characters, this_chid, '');

  if (typeof insertAt === 'number' && insertAt >= 0 && insertAt <= chat.length) {
      chat.splice(insertAt, 0, message);
      await saveChatConditional();
      await eventSource.emit(event_types.MESSAGE_SENT, insertAt);
      await reloadCurrentChat();
      await eventSource.emit(event_types.USER_MESSAGE_RENDERED, insertAt);
  } else {
      chat.push(message);
      const chat_id = (chat.length - 1);
      await eventSource.emit(event_types.MESSAGE_SENT, chat_id);
      addOneMessage(message);
      await eventSource.emit(event_types.USER_MESSAGE_RENDERED, chat_id);
  }
}
export function addCopyToCodeBlocks(messageElement) {
  const codeBlocks = $(messageElement).find('pre code');
  for (let i = 0; i < codeBlocks.length; i++) {
      hljs.highlightElement(codeBlocks.get(i));
      if (navigator.clipboard !== undefined) {
          const copyButton = document.createElement('i');
          copyButton.classList.add('fa-solid', 'fa-copy', 'code-copy');
          copyButton.title = 'Copy code';
          codeBlocks.get(i).appendChild(copyButton);
          copyButton.addEventListener('pointerup', function (event) {
              navigator.clipboard.writeText(codeBlocks.get(i).innerText);
              toastr.info('Copied!', '', { timeOut: 2000 });
          });
      }
  }
}
export function addOneMessage(mes, { type = 'normal', insertAfter = null, scroll = true, insertBefore = null, forceId = null, showSwipes = true } = {}) {
    let messageText = mes['mes'];
    const momentDate = timestampToMoment(mes.send_date);
    const timestamp = momentDate.isValid() ? momentDate.format('LL LT') : '';

    if (mes?.extra?.display_text) {
        messageText = mes.extra.display_text;
    }

    // Forbidden black magic
    // This allows to use "continue" on user messages
    if (type === 'swipe' && mes.swipe_id === undefined) {
        mes.swipe_id = 0;
        mes.swipes = [mes.mes];
    }

    let avatarImg = getUserAvatar(user_avatar);
    const isSystem = mes.is_system;
    const title = mes.title;
    generatedPromptCache = '';

    //for non-user mesages
    if (!mes['is_user']) {
        if (mes.force_avatar) {
            avatarImg = mes.force_avatar;
        } else if (this_chid === undefined) {
            avatarImg = system_avatar;
        } else {
            if (characters[this_chid].avatar != 'none') {
                avatarImg = getThumbnailUrl('avatar', characters[this_chid].avatar);
            } else {
                avatarImg = default_avatar;
            }
        }
        //old processing:
        //if messge is from sytem, use the name provided in the message JSONL to proceed,
        //if not system message, use name2 (char's name) to proceed
        //characterName = mes.is_system || mes.force_avatar ? mes.name : name2;
    } else if (mes['is_user'] && mes['force_avatar']) {
        // Special case for persona images.
        avatarImg = mes['force_avatar'];
    }

    messageText = messageFormatting(
        messageText,
        mes.name,
        isSystem,
        mes.is_user,
        chat.indexOf(mes),
    );
    const bias = messageFormatting(mes.extra?.bias ?? '', '', false, false, -1);
    let bookmarkLink = mes?.extra?.bookmark_link ?? '';

    let params = {
        mesId: forceId ?? chat.length - 1,
        swipeId: mes.swipe_id ?? 0,
        characterName: mes.name,
        isUser: mes.is_user,
        avatarImg: avatarImg,
        bias: bias,
        isSystem: isSystem,
        title: title,
        bookmarkLink: bookmarkLink,
        forceAvatar: mes.force_avatar,
        timestamp: timestamp,
        extra: mes.extra,
        tokenCount: mes.extra?.token_count ?? 0,
        ...formatGenerationTimer(mes.gen_started, mes.gen_finished, mes.extra?.token_count),
    };

    const renderedMessage = getMessageFromTemplate(params);

    if (type !== 'swipe') {
        if (!insertAfter && !insertBefore) {
            chatElement.append(renderedMessage);
        }
        else if (insertAfter) {
            const target = chatElement.find(`.mes[mesid="${insertAfter}"]`);
            $(renderedMessage).insertAfter(target);
        } else {
            const target = chatElement.find(`.mes[mesid="${insertBefore}"]`);
            $(renderedMessage).insertBefore(target);
        }
    }

    // Callers push the new message to chat before calling addOneMessage
    const newMessageId = typeof forceId == 'number' ? forceId : chat.length - 1;

    const newMessage = $(`#chat [mesid="${newMessageId}"]`);
    const isSmallSys = mes?.extra?.isSmallSys;

    if (isSmallSys === true) {
        newMessage.addClass('smallSysMes');
    }

    //shows or hides the Prompt display button
    let mesIdToFind = type == 'swipe' ? params.mesId - 1 : params.mesId;  //Number(newMessage.attr('mesId'));

    //if we have itemized messages, and the array isn't null..
    if (params.isUser === false && Array.isArray(itemizedPrompts) && itemizedPrompts.length > 0) {
        const itemizedPrompt = itemizedPrompts.find(x => Number(x.mesId) === Number(mesIdToFind));
        if (itemizedPrompt) {
            newMessage.find('.mes_prompt').show();
        }
    }

    newMessage.find('.avatar img').on('error', function () {
        $(this).hide();
        $(this).parent().html('<div class="missing-avatar fa-solid fa-user-slash"></div>');
    });

    if (type === 'swipe') {
        const swipeMessage = chatElement.find(`[mesid="${chat.length - 1}"]`);
        swipeMessage.find('.mes_text').html(messageText).attr('title', title);
        swipeMessage.find('.timestamp').text(timestamp).attr('title', `${params.extra.api} - ${params.extra.model}`);
        appendMediaToMessage(mes, swipeMessage);
        if (power_user.timestamp_model_icon && params.extra?.api) {
            insertSVGIcon(swipeMessage, params.extra);
        }

        if (mes.swipe_id == mes.swipes.length - 1) {
            swipeMessage.find('.mes_timer').text(params.timerValue).attr('title', params.timerTitle);
            swipeMessage.find('.tokenCounterDisplay').text(`${params.tokenCount}t`);
        } else {
            swipeMessage.find('.mes_timer').empty();
            swipeMessage.find('.tokenCounterDisplay').empty();
        }
    } else {
        const messageId = forceId ?? chat.length - 1;
        chatElement.find(`[mesid="${messageId}"] .mes_text`).append(messageText);
        appendMediaToMessage(mes, newMessage);
        showSwipes && hideSwipeButtons();
    }

    addCopyToCodeBlocks(newMessage);

    if (showSwipes) {
        $('#chat .mes').last().addClass('last_mes');
        $('#chat .mes').eq(-2).removeClass('last_mes');
        hideSwipeButtons();
        showSwipeButtons();
    }

    // Don't scroll if not inserting last
    if (!insertAfter && !insertBefore && scroll) {
        scrollChatToBottom();
    }
}
/**
 * Formats the title for the generation timer.
 * @param {Date} gen_started Date when generation was started
 * @param {Date} gen_finished Date when generation was finished
 * @param {number} tokenCount Number of tokens generated (0 if not available)
 * @returns {Object} Object containing the formatted timer value and title
 * @example
 * const { timerValue, timerTitle } = formatGenerationTimer(gen_started, gen_finished, tokenCount);
 * console.log(timerValue); // 1.2s
 * console.log(timerTitle); // Generation queued: 12:34:56 7 Jan 2021\nReply received: 12:34:57 7 Jan 2021\nTime to generate: 1.2 seconds\nToken rate: 5 t/s
 */
function formatGenerationTimer(gen_started, gen_finished, tokenCount) {
  if (!gen_started || !gen_finished) {
      return {};
  }

  const dateFormat = 'HH:mm:ss D MMM YYYY';
  const start = moment(gen_started);
  const finish = moment(gen_finished);
  const seconds = finish.diff(start, 'seconds', true);
  const timerValue = `${seconds.toFixed(1)}s`;
  const timerTitle = [
      `Generation queued: ${start.format(dateFormat)}`,
      `Reply received: ${finish.format(dateFormat)}`,
      `Time to generate: ${seconds} seconds`,
      tokenCount > 0 ? `Token rate: ${Number(tokenCount / seconds).toFixed(1)} t/s` : '',
  ].join('\n');

  if (isNaN(seconds) || seconds < 0) {
      return { timerValue: '', timerTitle };
  }

  return { timerValue, timerTitle };
}

export function appendMediaToMessage(mes, messageElement) {
  // Add image to message
  if (mes.extra?.image) {
      const chatHeight = $('#chat').prop('scrollHeight');
      const image = messageElement.find('.mes_img');
      const text = messageElement.find('.mes_text');
      const isInline = !!mes.extra?.inline_image;
      image.on('load', function () {
          const scrollPosition = $('#chat').scrollTop();
          const newChatHeight = $('#chat').prop('scrollHeight');
          const diff = newChatHeight - chatHeight;
          $('#chat').scrollTop(scrollPosition + diff);
      });
      image.attr('src', mes.extra?.image);
      image.attr('title', mes.extra?.title || mes.title || '');
      messageElement.find('.mes_img_container').addClass('img_extra');
      image.toggleClass('img_inline', isInline);
      text.toggleClass('displayNone', !isInline);
  }

  // Add file to message
  if (mes.extra?.file) {
      messageElement.find('.mes_file_container').remove();
      const messageId = messageElement.attr('mesid');
      const template = $('#message_file_template .mes_file_container').clone();
      template.find('.mes_file_name').text(mes.extra.file.name);
      template.find('.mes_file_size').text(humanFileSize(mes.extra.file.size));
      template.find('.mes_file_download').attr('mesid', messageId);
      template.find('.mes_file_delete').attr('mesid', messageId);
      messageElement.find('.mes_block').append(template);
  } else {
      messageElement.find('.mes_file_container').remove();
  }
}

export async function saveSettings(type) {
  if (!settingsReady) {
    console.warn('Settings not ready, aborting save');
    return;
  }
  // TODO :这里来保存设置
  //console.log('Entering settings with name1 = '+name1);
  // return jQuery.ajax({
  //     type: 'POST',
  //     url: '/api/settings/save',
  //     data: JSON.stringify({
  //         firstRun: firstRun,
  //         currentVersion: currentVersion,
  //         username: name1,
  //         active_character: active_character,
  //         active_group: active_group,
  //         api_server: api_server,
  //         preset_settings: preset_settings,
  //         user_avatar: user_avatar,
  //         amount_gen: amount_gen,
  //         max_context: max_context,
  //         main_api: main_api,
  //         world_info_settings: getWorldInfoSettings(),
  //         textgenerationwebui_settings: textgen_settings,
  //         swipes: swipes,
  //         horde_settings: horde_settings,
  //         power_user: power_user,
  //         extension_settings: extension_settings,
  //         tags: tags,
  //         tag_map: tag_map,
  //         nai_settings: nai_settings,
  //         kai_settings: kai_settings,
  //         oai_settings: oai_settings,
  //         background: background_settings,
  //         proxies: proxies,
  //         selected_proxy: selected_proxy,
  //     }, null, 4),
  //     beforeSend: function () { },
  //     cache: false,
  //     dataType: 'json',
  //     contentType: 'application/json',
  //     //processData: false,
  //     success: async function (data) {
  //         eventSource.emit(event_types.SETTINGS_UPDATED);
  //     },
  //     error: function (jqXHR, exception) {
  //         toastr.error('Check the server connection and reload the page to prevent data loss.', 'Settings could not be saved');
  //         console.log(exception);
  //         console.log(jqXHR);
  //     },
  // });
}

/**
 * Inserts or replaces an SVG icon adjacent to the provided message's timestamp.
 *
 * If the `extra.api` is "openai" and `extra.model` contains the substring "claude",
 * the function fetches the "claude.svg". Otherwise, it fetches the SVG named after
 * the value in `extra.api`.
 *
 * @param {JQuery<HTMLElement>} mes - The message element containing the timestamp where the icon should be inserted or replaced.
 * @param {Object} extra - Contains the API and model details.
 * @param {string} extra.api - The name of the API, used to determine which SVG to fetch.
 * @param {string} extra.model - The model name, used to check for the substring "claude".
 */
function insertSVGIcon(mes, extra) {
  // Determine the SVG filename
  let modelName;

  // Claude on OpenRouter or Anthropic
  if (extra.api === 'openai' && extra.model?.toLowerCase().includes('claude')) {
      modelName = 'claude';
  }
  // OpenAI on OpenRouter
  else if (extra.api === 'openai' && extra.model?.toLowerCase().includes('openai')) {
      modelName = 'openai';
  }
  // OpenRouter website model or other models
  else if (extra.api === 'openai' && (extra.model === null || extra.model?.toLowerCase().includes('/'))) {
      modelName = 'openrouter';
  }
  // Everything else
  else {
      modelName = extra.api;
  }

  const image = new Image();
  // Add classes for styling and identification
  image.classList.add('icon-svg', 'timestamp-icon');
  image.src = `/img/${modelName}.svg`;
  image.title = `${extra?.api ? extra.api + ' - ' : ''}${extra?.model ?? ''}`;

  image.onload = async function () {
      // Check if an SVG already exists adjacent to the timestamp
      let existingSVG = mes.find('.timestamp').next('.timestamp-icon');

      if (existingSVG.length) {
          // Replace existing SVG
          existingSVG.replaceWith(image);
      } else {
          // Append the new SVG if none exists
          mes.find('.timestamp').after(image);
      }

      await SVGInject(this);
  };
}

function getMessageFromTemplate({
  mesId,
  swipeId,
  characterName,
  isUser,
  avatarImg,
  bias,
  isSystem,
  title,
  timerValue,
  timerTitle,
  bookmarkLink,
  forceAvatar,
  timestamp,
  tokenCount,
  extra,
}) {
  const mes = messageTemplate.clone();
  mes.attr({
      'mesid': mesId,
      'swipeid': swipeId,
      'ch_name': characterName,
      'is_user': isUser,
      'is_system': !!isSystem,
      'bookmark_link': bookmarkLink,
      'force_avatar': !!forceAvatar,
      'timestamp': timestamp,
  });
  mes.find('.avatar img').attr('src', avatarImg);
  mes.find('.ch_name .name_text').text(characterName);
  mes.find('.mes_bias').html(bias);
  mes.find('.timestamp').text(timestamp).attr('title', `${extra?.api ? extra.api + ' - ' : ''}${extra?.model ?? ''}`);
  mes.find('.mesIDDisplay').text(`#${mesId}`);
  tokenCount && mes.find('.tokenCounterDisplay').text(`${tokenCount}t`);
  title && mes.attr('title', title);
  timerValue && mes.find('.mes_timer').attr('title', timerTitle).text(timerValue);

  if (power_user.timestamp_model_icon && extra?.api) {
      insertSVGIcon(mes, extra);
  }

  return mes;
}

/**
 * Returns the character card fields for the current character.
 * @returns {{system: string, mesExamples: string, description: string, personality: string, persona: string, scenario: string, jailbreak: string, version: string}}
 */
export function getCharacterCardFields() {
  const result = { system: '', mesExamples: '', description: '', personality: '', persona: '', scenario: '', jailbreak: '', version: '' };
  const character = characters[this_chid];

  if (!character) {
    return result;
  }

  const scenarioText = chat_metadata['scenario'] || characters[this_chid]?.scenario;
  result.description = baseChatReplace(characters[this_chid].description?.trim(), name1, name2);
  result.personality = baseChatReplace(characters[this_chid].personality?.trim(), name1, name2);
  result.scenario = baseChatReplace(scenarioText.trim(), name1, name2);
  result.mesExamples = baseChatReplace(characters[this_chid].mes_example?.trim(), name1, name2);
  result.persona = baseChatReplace(power_user.persona_description?.trim(), name1, name2);
  result.system = power_user.prefer_character_prompt ? baseChatReplace(characters[this_chid].data?.system_prompt?.trim(), name1, name2) : '';
  result.jailbreak = power_user.prefer_character_jailbreak ? baseChatReplace(characters[this_chid].data?.post_history_instructions?.trim(), name1, name2) : '';
  result.version = characters[this_chid].data?.character_version ?? '';

  // if (selected_group) {
  //     const groupCards = getGroupCharacterCards(selected_group, Number(this_chid));

  //     if (groupCards) {
  //         result.description = groupCards.description;
  //         result.personality = groupCards.personality;
  //         result.scenario = groupCards.scenario;
  //         result.mesExamples = groupCards.mesExamples;
  //     }
  // }

  return result;
}

/**
 * Pings the STserver to check if it is reachable.
 * @returns {Promise<boolean>} True if the server is reachable, false otherwise.
 */
export async function pingServer() {
  try {
    const result = await fetch('api/ping', {
      method: 'GET',
      headers: getRequestHeaders(),
    });
    // todo:  本地模拟时全部为 true
    // if (!result.ok) {
    //     return true;
    // }

    return true;
  } catch (error) {
    console.error('Error pinging server', error);
    return false;
  }
}
export function hideSwipeButtons() {
  //console.log('hideswipebuttons entered');
  // $('#chat').find('.swipe_right').css('display', 'none');
  // $('#chat').find('.swipe_left').css('display', 'none');
}

export function baseChatReplace(value, name1, name2) {
  if (value !== undefined && value.length > 0) {
    const _ = undefined;
    value = substituteParams(value, name1, name2, _, _, false);

    if (power_user.collapse_newlines) {
      value = collapseNewlines(value);
    }

    value = value.replace(/\r/g, '');
  }
  return value;
}

/**
 * Substitutes {{macro}} parameters in a string.
 * @param {string} content - The string to substitute parameters in.
 * @param {string} [_name1] - The name of the user. Uses global name1 if not provided.
 * @param {string} [_name2] - The name of the character. Uses global name2 if not provided.
 * @param {string} [_original] - The original message for {{original}} substitution.
 * @param {string} [_group] - The group members list for {{group}} substitution.
 * @param {boolean} [_replaceCharacterCard] - Whether to replace character card macros.
 * @returns {string} The string with substituted parameters.
 */
export function substituteParams(content, _name1, _name2, _original, _group, _replaceCharacterCard = true) {
  const environment = {};

  if (typeof _original === 'string') {
    let originalSubstituted = false;
    environment.original = () => {
      if (originalSubstituted) {
        return '';
      }

      originalSubstituted = true;
      return _original;
    };
  }

  const getGroupValue = () => {
    if (typeof _group === 'string') {
      return _group;
    }

    if (selected_group) {
      const members = groups.find(x => x.id === selected_group)?.members;
      const names = Array.isArray(members)
        ? members.map(m => characters.find(c => c.avatar === m)?.name).filter(Boolean).join(', ')
        : '';
      return names;
    } else {
      return _name2 ?? name2;
    }
  };

  if (_replaceCharacterCard) {
    const fields = getCharacterCardFields();
    environment.charPrompt = fields.system || '';
    environment.charJailbreak = fields.jailbreak || '';
    environment.description = fields.description || '';
    environment.personality = fields.personality || '';
    environment.scenario = fields.scenario || '';
    environment.persona = fields.persona || '';
    environment.mesExamples = fields.mesExamples || '';
    environment.charVersion = fields.version || '';
    environment.char_version = fields.version || '';
  }

  // Must be substituted last so that they're replaced inside {{description}}
  environment.user = _name1 ?? name1;
  environment.char = _name2 ?? name2;
  environment.group = environment.charIfNotGroup = getGroupValue();
  environment.model = getGeneratingModel();

  return evaluateMacros(content, environment);
}


function isHordeGenerationNotAllowed() {
  if (main_api == 'koboldhorde' && preset_settings == 'gui') {
    toastr.error('GUI Settings preset is not supported for Horde. Please select another preset.');
    return true;
  }

  return false;
}

/**
 * Unblocks the UI after a generation is complete.
 * @param {string} [type] Generation type (optional)
 */
function unblockGeneration(type) {
  // Don't unblock if a parallel stream is still running
  if (type === 'quiet' && streamingProcessor && !streamingProcessor.isFinished) {
    return;
  }

  is_send_press = false;
  // activateSendButtons();
  // showSwipeButtons();
  // setGenerationProgress(0);
  // flushEphemeralStoppingStrings();
  // flushWIDepthInjections();
}
export function getBiasStrings(textareaText, type) {
  if (type == 'impersonate' || type == 'continue') {
    return { messageBias: '', promptBias: '', isUserPromptBias: false };
  }

  let promptBias = '';
  let messageBias = extractMessageBias(textareaText);

  // If user input is not provided, retrieve the bias of the most recent relevant message
  if (!textareaText) {
    for (let i = chat.length - 1; i >= 0; i--) {
      const mes = chat[i];
      if (type === 'swipe' && chat.length - 1 === i) {
        continue;
      }
      if (mes && (mes.is_user || mes.is_system || mes.extra?.type === system_message_types.NARRATOR)) {
        if (mes.extra?.bias?.trim()?.length > 0) {
          promptBias = mes.extra.bias;
        }
        break;
      }
    }
  }

  promptBias = messageBias || promptBias || power_user.user_prompt_bias || '';
  const isUserPromptBias = promptBias === power_user.user_prompt_bias;

  // Substitute params for everything
  messageBias = substituteParams(messageBias);
  promptBias = substituteParams(promptBias);
  return { messageBias, promptBias, isUserPromptBias };
}

/**
 * Executes slash commands and returns the new text and whether the generation was interrupted.
 * @param {string} message Text to be sent
 * @returns {Promise<boolean>} Whether the message sending was interrupted
 */
export async function processCommands(message) {
  if (!message || !message.trim().startsWith('/')) {
    return false;
  }
  // await executeSlashCommandsOnChatInput(message, {
  //     clearChatInput: true,
  // });
  return true;
}

export function sendSystemMessage(type, text, extra = {}) {
  const systemMessage = system_messages[type];

  if (!systemMessage) {
    return;
  }

  const newMessage = { ...systemMessage, send_date: getMessageTimeStamp() };

  if (text) {
    newMessage.mes = text;
  }

  if (type == system_message_types.SLASH_COMMANDS) {
    newMessage.mes = getSlashCommandsHelp();
  }

  if (!newMessage.extra) {
    newMessage.extra = {};
  }

  newMessage.extra = Object.assign(newMessage.extra, extra);
  newMessage.extra.type = type;

  chat.push(newMessage);
  addOneMessage(newMessage);
  is_send_press = false;
  if (type == system_message_types.SLASH_COMMANDS) {
    const browser = new SlashCommandBrowser();
    const spinner = document.querySelector('#chat .last_mes .custom-slashHelp');
    const parent = spinner.parentElement;
    spinner.remove();
    browser.renderInto(parent);
    browser.search.focus();
  }
}


/**
 * Removes all char A/N prompt injections from the chat.
 * To clean up when switching from groups to solo and vice versa.
 */
export function removeDepthPrompts() {
  for (const key of Object.keys(extension_prompts)) {
    if (key.startsWith('DEPTH_PROMPT')) {
      delete extension_prompts[key];
    }
  }
}

/**
 * Extracts the contents of bias macros from a message.
 * @param {string} message Message text
 * @returns {string} Message bias extracted from the message (or an empty string if not found)
 */
export function extractMessageBias(message) {
  if (!message) {
    return '';
  }

  try {
    const biasHandlebars = Handlebars.create();
    const biasMatches = [];
    biasHandlebars.registerHelper('bias', function (text) {
      biasMatches.push(text);
      return '';
    });
    const template = biasHandlebars.compile(message);
    template({});

    if (biasMatches && biasMatches.length > 0) {
      return ` ${biasMatches.join(' ')}`;
    }

    return '';
  } catch {
    return '';
  }
}
export function getThumbnailUrl(type, file) {
  return `/thumbnail?type=${type}&file=${encodeURIComponent(file)}`;
}

function getGeneratingModel(mes) {
  let model = '';
  switch (main_api) {
    case 'kobold':
      model = online_status;
      break;
    case 'novel':
      model = nai_settings.model_novel;
      break;
    case 'openai':
      model = getChatCompletionModel();
      break;
    case 'textgenerationwebui':
      model = online_status;
      break;
    case 'koboldhorde':
      model = kobold_horde_model;
      break;
  }
  return model;
}

/**
 * Gets the maximum usable context size for the current API.
 * @param {number|null} overrideResponseLength Optional override for the response length.
 * @returns {number} Maximum usable context size.
 */
export function getMaxContextSize(overrideResponseLength = null) {
  if (typeof overrideResponseLength !== 'number' || overrideResponseLength <= 0 || isNaN(overrideResponseLength)) {
    overrideResponseLength = null;
  }

  let this_max_context = 1487;
  if (main_api == 'kobold' || main_api == 'koboldhorde' || main_api == 'textgenerationwebui') {
    this_max_context = (max_context - (overrideResponseLength || amount_gen));
  }
  if (main_api == 'novel') {
    this_max_context = Number(max_context);
    if (nai_settings.model_novel.includes('clio')) {
      this_max_context = Math.min(max_context, 8192);
    }
    if (nai_settings.model_novel.includes('kayra')) {
      this_max_context = Math.min(max_context, 8192);

      const subscriptionLimit = getKayraMaxContextTokens();
      if (typeof subscriptionLimit === 'number' && this_max_context > subscriptionLimit) {
        this_max_context = subscriptionLimit;
        console.log(`NovelAI subscription limit reached. Max context size is now ${this_max_context}`);
      }
    }

    this_max_context = this_max_context - (overrideResponseLength || amount_gen);
  }
  if (main_api == 'openai') {
    this_max_context = oai_settings.openai_max_context - (overrideResponseLength || oai_settings.openai_max_tokens);
  }
  return this_max_context;
}

/**
 * Sets a prompt injection to insert custom text into any outgoing prompt. For use in UI extensions.
 * @param {string} key Prompt injection id.
 * @param {string} value Prompt injection value.
 * @param {number} position Insertion position. 0 is after story string, 1 is in-chat with custom depth.
 * @param {number} depth Insertion depth. 0 represets the last message in context. Expected values up to MAX_INJECTION_DEPTH.
 * @param {number} role Extension prompt role. Defaults to SYSTEM.
 * @param {boolean} scan Should the prompt be included in the world info scan.
 */
export function setExtensionPrompt(key, value, position, depth, scan = false, role = extension_prompt_roles.SYSTEM) {
  extension_prompts[key] = {
    value: String(value),
    position: Number(position),
    depth: Number(depth),
    scan: !!scan,
    role: Number(role ?? extension_prompt_roles.SYSTEM),
  };
}

/**
 * Gets a enum value of the extension prompt role by its name.
 * @param {string} roleName The name of the extension prompt role.
 * @returns {number} The role id of the extension prompt.
 */
export function getExtensionPromptRoleByName(roleName) {
  // If the role is already a valid number, return it
  if (typeof roleName === 'number' && Object.values(extension_prompt_roles).includes(roleName)) {
    return roleName;
  }

  switch (roleName) {
    case 'system':
      return extension_prompt_roles.SYSTEM;
    case 'user':
      return extension_prompt_roles.USER;
    case 'assistant':
      return extension_prompt_roles.ASSISTANT;
  }

  // Skill issue?
  return extension_prompt_roles.SYSTEM;
}

/**
 * Removes all {{macros}} from a string.
 * @param {string} str String to remove macros from.
 * @returns {string} String with macros removed.
 */
export function removeMacros(str) {
  return (str ?? '').replace(/\{\{[\s\S]*?\}\}/gm, '').trim();
}

function showStopButton() {
  $('#mes_stop').css({ 'display': 'flex' });
}

export function deactivateSendButtons() {
  $('#send_but').addClass('displayNone');
  $('#mes_continue').addClass('displayNone');
  showStopButton();
}
export function showSwipeButtons() {
  if (chat.length === 0) {
      return;
  }

  if (
      chat[chat.length - 1].is_system ||
      !swipes ||
      Number($('.mes:last').attr('mesid')) < 0 ||
      chat[chat.length - 1].is_user ||
      chat[chat.length - 1].extra?.image ||
      (selected_group && is_group_generating)
  ) { return; }

  // swipe_id should be set if alternate greetings are added
  if (chat.length == 1 && chat[0].swipe_id === undefined) {
      return;
  }

  //had to add this to make the swipe counter work
  //(copied from the onclick functions for swipe buttons..
  //don't know why the array isn't set for non-swipe messsages in Generate or addOneMessage..)

  if (chat[chat.length - 1]['swipe_id'] === undefined) {              // if there is no swipe-message in the last spot of the chat array
      chat[chat.length - 1]['swipe_id'] = 0;                        // set it to id 0
      chat[chat.length - 1]['swipes'] = [];                         // empty the array
      chat[chat.length - 1]['swipes'][0] = chat[chat.length - 1]['mes'];  //assign swipe array with last message from chat
  }

  const currentMessage = $('#chat').children().filter(`[mesid="${chat.length - 1}"]`);
  const swipeId = chat[chat.length - 1].swipe_id;
  var swipesCounterHTML = (`${(swipeId + 1)}/${(chat[chat.length - 1].swipes.length)}`);

  if (swipeId !== undefined && (chat[chat.length - 1].swipes.length > 1 || swipeId > 0)) {
      currentMessage.children('.swipe_left').css('display', 'flex');
  }
  //only show right when generate is off, or when next right swipe would not make a generate happen
  if (is_send_press === false || chat[chat.length - 1].swipes.length >= swipeId) {
      currentMessage.children('.swipe_right').css('display', 'flex');
      currentMessage.children('.swipe_right').css('opacity', '0.3');
  }
  //console.log((chat[chat.length - 1]));
  if ((chat[chat.length - 1].swipes.length - swipeId) === 1) {
      //console.log('highlighting R swipe');
      currentMessage.children('.swipe_right').css('opacity', '0.7');
  }
  //console.log(swipesCounterHTML);

  $('.swipes-counter').html(swipesCounterHTML);

  //console.log(swipeId);
  //console.log(chat[chat.length - 1].swipes.length);
}
export function scrollChatToBottom() {
  if (power_user.auto_scroll_chat_to_bottom) {
      // let position = chatElement[0].scrollHeight;

      // if (power_user.waifuMode) {
      //     const lastMessage = chatElement.find('.mes').last();
      //     if (lastMessage.length) {
      //         const lastMessagePosition = lastMessage.position().top;
      //         position = chatElement.scrollTop() + lastMessagePosition;
      //     }
      // }

      // chatElement.scrollTop(position);
  }
}
// Wrapper to fetch extension prompts by module name
export function getExtensionPromptByName(moduleName) {
  if (moduleName) {
      return substituteParams(extension_prompts[moduleName]?.value);
  } else {
      return;
  }
}
/**
 * Returns the extension prompt for the given position, depth, and role.
 * If multiple prompts are found, they are joined with a separator.
 * @param {number} [position] Position of the prompt
 * @param {number} [depth] Depth of the prompt
 * @param {string} [separator] Separator for joining multiple prompts
 * @param {number} [role] Role of the prompt
 * @param {boolean} [wrap] Wrap start and end with a separator
 * @returns {string} Extension prompt
 */
export function getExtensionPrompt(position = extension_prompt_types.IN_PROMPT, depth = undefined, separator = '\n', role = undefined, wrap = true) {
  let extension_prompt = Object.keys(extension_prompts)
      .sort()
      .map((x) => extension_prompts[x])
      .filter(x => x.position == position && x.value)
      .filter(x => depth === undefined || x.depth === undefined || x.depth === depth)
      .filter(x => role === undefined || x.role === undefined || x.role === role)
      .map(x => x.value.trim())
      .join(separator);
  if (wrap && extension_prompt.length && !extension_prompt.startsWith(separator)) {
      extension_prompt = separator + extension_prompt;
  }
  if (wrap && extension_prompt.length && !extension_prompt.endsWith(separator)) {
      extension_prompt = extension_prompt + separator;
  }
  if (extension_prompt.length) {
      extension_prompt = substituteParams(extension_prompt);
  }
  return extension_prompt;
}

/**
 * Injects extension prompts into chat messages.
 * @param {object[]} messages Array of chat messages
 * @param {boolean} isContinue Whether the generation is a continuation. If true, the extension prompts of depth 0 are injected at position 1.
 * @returns {number[]} Array of indices where the extension prompts were injected
 */
function doChatInject(messages, isContinue) {
  const injectedIndices = [];
  let totalInsertedMessages = 0;
  messages.reverse();

  for (let i = 0; i <= MAX_INJECTION_DEPTH; i++) {
      // Order of priority (most important go lower)
      const roles = [extension_prompt_roles.SYSTEM, extension_prompt_roles.USER, extension_prompt_roles.ASSISTANT];
      const names = {
          [extension_prompt_roles.SYSTEM]: '',
          [extension_prompt_roles.USER]: name1,
          [extension_prompt_roles.ASSISTANT]: name2,
      };
      const roleMessages = [];
      const separator = '\n';
      const wrap = false;

      for (const role of roles) {
          const extensionPrompt = String(getExtensionPrompt(extension_prompt_types.IN_CHAT, i, separator, role, wrap)).trimStart();
          const isNarrator = role === extension_prompt_roles.SYSTEM;
          const isUser = role === extension_prompt_roles.USER;
          const name = names[role];

          if (extensionPrompt) {
              roleMessages.push({
                  name: name,
                  is_user: isUser,
                  mes: extensionPrompt,
                  extra: {
                      type: isNarrator ? system_message_types.NARRATOR : null,
                  },
              });
          }
      }

      if (roleMessages.length) {
          const depth = isContinue && i === 0 ? 1 : i;
          const injectIdx = depth + totalInsertedMessages;
          messages.splice(injectIdx, 0, ...roleMessages);
          totalInsertedMessages += roleMessages.length;
          injectedIndices.push(...Array.from({ length: roleMessages.length }, (_, i) => injectIdx + i));
      }
  }

  messages.reverse();
  return injectedIndices;
}

function flushWIDepthInjections() {
  //prevent custom depth WI entries (which have unique random key names) from duplicating
  for (const key of Object.keys(extension_prompts)) {
      if (key.startsWith('customDepthWI')) {
          delete extension_prompts[key];
      }
  }
}

function setInContextMessages(lastmsg, type) {
  $('#chat .mes').removeClass('lastInContext');

  if (type === 'swipe' || type === 'regenerate' || type === 'continue') {
      lastmsg++;
  }

  const lastMessageBlock = $('#chat .mes:not([is_system="true"])').eq(-lastmsg);
  lastMessageBlock.addClass('lastInContext');

  if (lastMessageBlock.length === 0) {
      const firstMessageId = getFirstDisplayedMessageId();
      $(`#chat .mes[mesid="${firstMessageId}"`).addClass('lastInContext');
  }
}

export function getFirstDisplayedMessageId() {
  const allIds = Array.from(document.querySelectorAll('#chat .mes')).map(el => Number(el.getAttribute('mesid'))).filter(x => !isNaN(x));
  const minId = Math.min(...allIds);
  return minId;
}

function getFirstMessage() {
  const firstMes = characters[this_chid].first_mes || '';
  const alternateGreetings = characters[this_chid]?.data?.alternate_greetings;

  const message = {
      name: name2,
      is_user: false,
      is_system: false,
      send_date: getMessageTimeStamp(),
      mes: getRegexedString(firstMes, regex_placement.AI_OUTPUT),
      extra: {},
  };

  if (Array.isArray(alternateGreetings) && alternateGreetings.length > 0) {
      const swipes = [message.mes, ...(alternateGreetings.map(greeting => getRegexedString(greeting, regex_placement.AI_OUTPUT)))];

      if (!message.mes) {
          swipes.shift();
          message.mes = swipes[0];
      }

      message['swipe_id'] = 0;
      message['swipes'] = swipes;
      message['swipe_info'] = [];
  }

  return message;
}
export async function clearChat() {
  // closeMessageEditor();
  // extension_prompts = {};
  // if (is_delete_mode) {
  //     $('#dialogue_del_mes_cancel').trigger('click');
  // }
  // $('#chat').children().remove();
  // if ($('.zoomed_avatar[forChar]').length) {
  //     console.debug('saw avatars to remove');
  //     $('.zoomed_avatar[forChar]').remove();
  // } else { console.debug('saw no avatars'); }

  // await saveItemizedPrompts(getCurrentChatId());
  // itemizedPrompts = [];
}
export async function openCharacterChat(file_name) {
  // await clearChat();
  // characters[this_chid]['chat'] = file_name;
  // chat.length = 0;
  // chat_metadata = {};
  // await getChat();
  // $('#selected_chat_pole').val(file_name);
  // await createOrEditCharacter();
}

////////// OPTIMZED MAIN API CHANGE FUNCTION ////////////

export function changeMainAPI() {
  const selectedVal = $('#main_api').val();
  //console.log(selectedVal);
  const apiElements = {
      'koboldhorde': {
          apiSettings: $('#kobold_api-settings'),
          apiConnector: $('#kobold_horde'),
          apiPresets: $('#kobold_api-presets'),
          apiRanges: $('#range_block'),
          maxContextElem: $('#max_context_block'),
          amountGenElem: $('#amount_gen_block'),
      },
      'kobold': {
          apiSettings: $('#kobold_api-settings'),
          apiConnector: $('#kobold_api'),
          apiPresets: $('#kobold_api-presets'),
          apiRanges: $('#range_block'),
          maxContextElem: $('#max_context_block'),
          amountGenElem: $('#amount_gen_block'),
      },
      'textgenerationwebui': {
          apiSettings: $('#textgenerationwebui_api-settings'),
          apiConnector: $('#textgenerationwebui_api'),
          apiPresets: $('#textgenerationwebui_api-presets'),
          apiRanges: $('#range_block_textgenerationwebui'),
          maxContextElem: $('#max_context_block'),
          amountGenElem: $('#amount_gen_block'),
      },
      'novel': {
          apiSettings: $('#novel_api-settings'),
          apiConnector: $('#novel_api'),
          apiPresets: $('#novel_api-presets'),
          apiRanges: $('#range_block_novel'),
          maxContextElem: $('#max_context_block'),
          amountGenElem: $('#amount_gen_block'),
      },
      'openai': {
          apiSettings: $('#openai_settings'),
          apiConnector: $('#openai_api'),
          apiPresets: $('#openai_api-presets'),
          apiRanges: $('#range_block_openai'),
          maxContextElem: $('#max_context_block'),
          amountGenElem: $('#amount_gen_block'),
      },
  };
  console.log("🚀 ~ changeMainAPI ~ apiElements:", apiElements)
  //first, disable everything so the old elements stop showing
  for (const apiName in apiElements) {
      const apiObj = apiElements[apiName];
      //do not hide items to then proceed to immediately show them.
      if (selectedVal === apiName) {
          continue;
      }
      apiObj.apiSettings.css('display', 'none');
      apiObj.apiConnector.css('display', 'none');
      apiObj.apiRanges.css('display', 'none');
      apiObj.apiPresets.css('display', 'none');
  }

  //then, find and enable the active item.
  //This is split out of the loop so that different apis can share settings divs
  let activeItem = apiElements[selectedVal];

  activeItem.apiSettings.css('display', 'block');
  activeItem.apiConnector.css('display', 'block');
  activeItem.apiRanges.css('display', 'block');
  activeItem.apiPresets.css('display', 'block');

  if (selectedVal === 'openai') {
      activeItem.apiPresets.css('display', 'flex');
  }

  if (selectedVal === 'textgenerationwebui' || selectedVal === 'novel') {
      console.log('enabling amount_gen for ooba/novel');
      activeItem.amountGenElem.find('input').prop('disabled', false);
      activeItem.amountGenElem.css('opacity', 1.0);
  }

  //custom because streaming has been moved up under response tokens, which exists inside common settings block
  if (selectedVal === 'textgenerationwebui') {
      $('#streaming_textgenerationwebui_block').css('display', 'block');
  } else {
      $('#streaming_textgenerationwebui_block').css('display', 'none');
  }
  if (selectedVal === 'kobold') {
      $('#streaming_kobold_block').css('display', 'block');
  } else {
      $('#streaming_kobold_block').css('display', 'none');
  }

  if (selectedVal === 'novel') {
      $('#ai_module_block_novel').css('display', 'block');
  } else {
      $('#ai_module_block_novel').css('display', 'none');
  }

  // Hide common settings for OpenAI
  console.debug('value?', selectedVal);
  if (selectedVal == 'openai') {
      console.debug('hiding settings?');
      $('#common-gen-settings-block').css('display', 'none');
  } else {
      $('#common-gen-settings-block').css('display', 'block');
  }

  main_api = selectedVal;
  online_status = 'no_connection';

  if (main_api == 'openai' && oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) {
      $('#api_button_openai').trigger('click');
  }

  // if (main_api == 'koboldhorde') {
  //     getStatusHorde();
  //     getHordeModels(true);
  // }
  validateDisabledSamplers();
  setupChatCompletionPromptManager(oai_settings);
  forceCharacterEditorTokenize();
}
function parseTokenCounts(counts, thisPromptBits) {
  /**
   * @param {any[]} numbers
   */
  function getSum(...numbers) {
      return numbers.map(x => Number(x)).filter(x => !Number.isNaN(x)).reduce((acc, val) => acc + val, 0);
  }
  const total = getSum(Object.values(counts));

  thisPromptBits.push({
      oaiStartTokens: (counts?.start + counts?.controlPrompts) || 0,
      oaiPromptTokens: getSum(counts?.prompt, counts?.charDescription, counts?.charPersonality, counts?.scenario) || 0,
      oaiBiasTokens: counts?.bias || 0,
      oaiNudgeTokens: counts?.nudge || 0,
      oaiJailbreakTokens: counts?.jailbreak || 0,
      oaiImpersonateTokens: counts?.impersonate || 0,
      oaiExamplesTokens: (counts?.dialogueExamples + counts?.examples) || 0,
      oaiConversationTokens: (counts?.conversation + counts?.chatHistory) || 0,
      oaiNsfwTokens: counts?.nsfw || 0,
      oaiMainTokens: counts?.main || 0,
      oaiTotalTokens: total,
  });
}

export function getNextMessageId(type) {
  return type == 'swipe' ? chat.length - 1 : chat.length;
}

function getAllExtensionPrompts() {
  const value = Object
      .values(extension_prompts)
      .filter(x => x.value)
      .map(x => x.value.trim())
      .join('\n');

  return value.length ? substituteParams(value) : '';
}
export function isStreamingEnabled() {
  const noStreamSources = [chat_completion_sources.SCALE, chat_completion_sources.AI21];
  return ((main_api == 'openai' && oai_settings.stream_openai && !noStreamSources.includes(oai_settings.chat_completion_source) && !(oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE && oai_settings.google_model.includes('bison')))
      || (main_api == 'kobold' && kai_settings.streaming_kobold && kai_flags.can_use_streaming)
      || (main_api == 'novel' && nai_settings.streaming_novel)
      || (main_api == 'textgenerationwebui' && textgen_settings.streaming));
}
/**
 * Parses the example messages into individual messages.
 * @param {string} messageExampleString - The string containing the example messages
 * @param {boolean} appendNamesForGroup - Whether to append the character name for group chats
 * @returns {Message[]} Array of message objects
 */
export function parseExampleIntoIndividual(messageExampleString, appendNamesForGroup = true) {
    let result = []; // array of msgs
    let tmp = messageExampleString.split('\n');
    let cur_msg_lines = [];
    let in_user = false;
    let in_bot = false;
    // DRY my cock and balls :)
    function add_msg(name, role, system_name) {
        // join different newlines (we split them by \n and join by \n)
        // remove char name
        // strip to remove extra spaces
        let parsed_msg = cur_msg_lines.join('\n').replace(name + ':', '').trim();

        if (appendNamesForGroup && selected_group && ['example_user', 'example_assistant'].includes(system_name)) {
            parsed_msg = `${name}: ${parsed_msg}`;
        }

        result.push({ 'role': role, 'content': parsed_msg, 'name': system_name });
        cur_msg_lines = [];
    }
    // skip first line as it'll always be "This is how {bot name} should talk"
    for (let i = 1; i < tmp.length; i++) {
        let cur_str = tmp[i];
        // if it's the user message, switch into user mode and out of bot mode
        // yes, repeated code, but I don't care
        if (cur_str.startsWith(name1 + ':')) {
            in_user = true;
            // we were in the bot mode previously, add the message
            if (in_bot) {
                add_msg(name2, 'system', 'example_assistant');
            }
            in_bot = false;
        } else if (cur_str.startsWith(name2 + ':')) {
            in_bot = true;
            // we were in the user mode previously, add the message
            if (in_user) {
                add_msg(name1, 'system', 'example_user');
            }
            in_user = false;
        }
        // push the current line into the current message array only after checking for presence of user/bot
        cur_msg_lines.push(cur_str);
    }
    // Special case for last message in a block because we don't have a new message to trigger the switch
    if (in_user) {
        add_msg(name1, 'system', 'example_user');
    } else if (in_bot) {
        add_msg(name2, 'system', 'example_assistant');
    }
    return result;
}

/**
 * Sends a non-streaming request to the API.
 * @param {string} type Generation type
 * @param {object} data Generation data
 * @returns {Promise<object>} Response data from the API
 */
async function sendGenerationRequest(type, data) {
  if (main_api === 'openai') {
      return await sendOpenAIRequest(type, data.prompt, abortController.signal);
  }

  if (main_api === 'koboldhorde') {
      return await generateHorde(data.prompt, data, abortController.signal, true);
  }

  const response = await fetch(getGenerateUrl(main_api), {
      method: 'POST',
      headers: getRequestHeaders(),
      cache: 'no-cache',
      body: JSON.stringify(data),
      signal: abortController.signal,
  });

  if (!response.ok) {
      const error = await response.json();
      throw error;
  }

  const responseData = await response.json();
  return responseData;
}
