From 77dcff11cdaed4d1eb4d36072c4f5d1660747763 Mon Sep 17 00:00:00 2001
From: xModo99 <xmodo999@gmail.com>
Date: Wed, 10 Jul 2024 02:08:29 +0000
Subject: [PATCH] [AI] Seperate Sampling and Scheduling

---
 js/003-data/gameVariableData.js  |   2 +
 src/art/genAI/stableDiffusion.js |  61 +++-
 src/gui/options/options.js       | 524 +++++++++++++++++--------------
 3 files changed, 330 insertions(+), 257 deletions(-)

diff --git a/js/003-data/gameVariableData.js b/js/003-data/gameVariableData.js
index 85f94bc7fd4..de2217ac1ec 100644
--- a/js/003-data/gameVariableData.js
+++ b/js/003-data/gameVariableData.js
@@ -239,6 +239,7 @@ App.Data.defaultGameStateVariables = {
 	aiSamplingSteps: 20,
 	aiSamplingStepsEvent: 20,
 	aiStyle: 1,
+	aiSchedulingMethod: 'karras',
 	aiRestoreFaces: false,
 	aiUpscale: false,
 	aiUpscaleScale: 1.75,
@@ -1660,6 +1661,7 @@ App.Data.defaultGameOptions = {
 	aiLoraPack: true,
 	aiDisabledLoRAs: [],
 	aiStyle: 1,
+	aiSchedulingMethod: 'karras',
 	aiCustomStylePos: "",
 	aiCustomStyleNeg: "",
 	aiNationality: 2,
diff --git a/src/art/genAI/stableDiffusion.js b/src/art/genAI/stableDiffusion.js
index 391ea39e8d8..95bf04e8d7e 100644
--- a/src/art/genAI/stableDiffusion.js
+++ b/src/art/genAI/stableDiffusion.js
@@ -82,7 +82,7 @@ App.Art.GenAI.StableDiffusionSettings = class {
 async function fetchWithTimeout(url, timeout, options) {
 	const controller = new AbortController();
 	const id = setTimeout(() => controller.abort(), timeout);
-	const response = await fetch(url, {signal: controller.signal, ...options});
+	const response = await fetch(url, { signal: controller.signal, ...options });
 	clearTimeout(id);
 	return response;
 }
@@ -411,8 +411,8 @@ App.Art.GenAI.StableDiffusionClient = class {
 	 * @param {string} [method="GET"]
 	 * @returns {Promise<Response>}
 	 */
-	async fetchAPIQuery(relativeUrl, method="GET") {
-		return fetchWithTimeout(`${V.aiApiUrl}${relativeUrl}`, 30000, {method: method});
+	async fetchAPIQuery(relativeUrl, method = "GET") {
+		return fetchWithTimeout(`${V.aiApiUrl}${relativeUrl}`, 30000, { method: method });
 	}
 
 	/**
@@ -449,6 +449,40 @@ App.Art.GenAI.StableDiffusionClient = class {
 			});
 	}
 
+
+	/**
+	 * @returns {Promise<string[]>}
+	 */
+	async getSchedulerList() {
+		return this.fetchAPIQuery(`/sdapi/v1/schedulers`)
+			.then((value) => {
+				return value.json();
+			})
+			.then((list) => {
+				return list.map(o => o.name);
+			})
+			.catch(err => {
+				console.log(`Failed to get scheduler list from Stable Diffusion.`);
+				return [];
+			});
+	}
+
+
+
+	/** Gets the sysinfo
+	 * @returns {Promise<{Version: string}>}
+	 */
+	async getSysInfo() {
+		return this.fetchAPIQuery(`/internal/sysinfo`)
+			.then((value) => {
+				return value.json()
+			})
+			.catch(err => {
+				console.log(`Failed to get sysinfo from Stable Diffusion.`);
+				return {};
+			});
+	}
+
 	/** Check to see whether a face restore model is configured.
 	 * @returns {Promise<boolean>}
 	 */
@@ -466,6 +500,7 @@ App.Art.GenAI.StableDiffusionClient = class {
 			});
 	}
 
+
 	/** Check to see if the ADetailer script is installed. Probably should check more than that, but this'll catch the dumb cases.
 	 * @returns {Promise<boolean>}
 	 */
@@ -588,7 +623,7 @@ App.Art.GenAI.StableDiffusionClient = class {
 		}
 		/** @type {string[]} */
 		let list = await this.fetchAPIQuery(`/sdapi/v1/loras`)
-		// cSpell:enable
+			// cSpell:enable
 			.then((value) => { return value.json(); })
 			.then((list) => {
 				let entries = [];
@@ -661,7 +696,7 @@ App.Art.GenAI.StaticCaching = class {
 		let settingsSlave = slave;
 		if (V.aiUseRAForEvents && isEventImage) {
 			settingsSlave = structuredClone(slave);
-			DefaultRules(settingsSlave, {aiPromptsOnly: true});
+			DefaultRules(settingsSlave, { aiPromptsOnly: true });
 		}
 		const settings = await App.Art.GenAI.sdClient.buildStableDiffusionSettings(settingsSlave, steps);
 		const body = JSON.stringify(settings);
@@ -725,7 +760,7 @@ App.Art.GenAI.StaticCaching = class {
 					}
 					// If new image, add or replace it in
 					if (imagePreexisting === -1) {
-						const imageId = await App.Art.GenAI.staticImageDB.putImage({data: imageData});
+						const imageId = await App.Art.GenAI.staticImageDB.putImage({ data: imageData });
 						if (replacementImageIndex !== null) {
 							await App.Art.GenAI.staticImageDB.removeImage(slave.custom.aiImageIds[replacementImageIndex]);
 							slave.custom.aiImageIds[replacementImageIndex] = imageId;
@@ -740,11 +775,11 @@ App.Art.GenAI.StaticCaching = class {
 					}
 				})().then(resolve).catch(reject);
 			}), {
-				/**
-				 * Do something when there's progress on generating an image
-				 * @param {(progress: number) => void} fn A function to call when there's progress
-				 * @returns {FC.PromiseWithProgress<void>}
-				 */
+			/**
+			 * Do something when there's progress on generating an image
+			 * @param {(progress: number) => void} fn A function to call when there's progress
+			 * @returns {FC.PromiseWithProgress<void>}
+			 */
 				onProgress(fn) {
 					progressFns.push(fn);
 					return result;
@@ -799,7 +834,7 @@ App.Art.GenAI.ReactiveCaching = class {
 		let settingsSlave = slave;
 		if (V.aiUseRAForEvents && isEventImage) {
 			settingsSlave = structuredClone(slave);
-			DefaultRules(settingsSlave, {aiPromptsOnly: true});
+			DefaultRules(settingsSlave, { aiPromptsOnly: true });
 		}
 		const settings = await App.Art.GenAI.sdClient.buildStableDiffusionSettings(settingsSlave, steps);
 		const body = JSON.stringify(settings);
@@ -856,7 +891,7 @@ App.Art.GenAI.ReactiveCaching = class {
 		}
 		// If new image, add or replace it in
 		if (imagePreexisting === -1) {
-			const imageId = await App.Art.GenAI.reactiveImageDB.putImage({data: imageData});
+			const imageId = await App.Art.GenAI.reactiveImageDB.putImage({ data: imageData });
 			if (replacementImageIndex !== null) {
 				await App.Art.GenAI.reactiveImageDB.removeImage(slave.custom.aiImageIds[replacementImageIndex]);
 				slave.custom.aiImageIds[replacementImageIndex] = imageId;
diff --git a/src/gui/options/options.js b/src/gui/options/options.js
index 6dc1f9bfb2b..d8914b7414d 100644
--- a/src/gui/options/options.js
+++ b/src/gui/options/options.js
@@ -1,6 +1,6 @@
 // cSpell:ignore SSAA
 
-App.UI.optionsPassage = function() {
+App.UI.optionsPassage = function () {
 	const el = new DocumentFragment();
 	App.UI.DOM.appendNewElement("h1", el, `Game Options`);
 	App.Utils.PassageSwitchHandler.set(App.EventHandlers.optionsChanged);
@@ -806,7 +806,7 @@ App.UI.optionsPassage = function() {
 			options.addOption("Maximum random slave events per week", "RIEPerWeek", V.eventControl)
 				.addValueList(V.eventControl.level > 4 ? [1, 2, 3, 4] : [1, 2, 3]);
 		}
-		
+
 		el.append(options.render());
 
 		App.UI.DOM.appendNewElement("div", el, "Importing options into an in-progress game risks breaking the game, but you can export options from this game and import them into a new game.", ["warning"]);
@@ -846,7 +846,7 @@ App.UI.optionsPassage = function() {
  * @param {boolean} isIntro
  * @returns {DocumentFragment}
  */
-App.Intro.display = function(isIntro) {
+App.Intro.display = function (isIntro) {
 	const el = new DocumentFragment();
 	let options;
 	let r;
@@ -1028,7 +1028,7 @@ App.Intro.display = function(isIntro) {
  * @param {boolean} isIntro
  * @returns {DocumentFragment}
  */
-App.Intro.contentAndFlavor = function(isIntro) {
+App.Intro.contentAndFlavor = function (isIntro) {
 	const el = new DocumentFragment();
 	let r;
 	let options;
@@ -1334,7 +1334,7 @@ App.UI.aiLoraList = () => {
 		}
 	};
 
-	const loraDiv = (lora, installed=false) => {
+	const loraDiv = (lora, installed = false) => {
 		const lDiv = App.UI.DOM.makeElement("div");
 		const links = [];
 		lora.urls.forEach((url) => {
@@ -1454,23 +1454,30 @@ App.UI.aiLoraList = () => {
 /**
  * @param {InstanceType<App.UI.OptionsGroup>} options
  */
-App.UI.aiPromptingOptions = function(options) {
+App.UI.aiPromptingOptions = function (options) {
+	options.addCustom("AI Model");
 	options.addOption("AI style prompting", "aiStyle")
 		.addValueList([
 			["Photorealistic", 1],
 			["Anime/Hentai", 2],
 			["Custom", 0]
 		]);
+
 	if (V.aiStyle === 0) {
-		options.addOption("AI custom style positive prompt", "aiCustomStylePos").showTextBox({large: true, forceString: true})
+		options.addOption("AI custom style positive prompt", "aiCustomStylePos").showTextBox({ large: true, forceString: true })
 			.addComment("Include desired LoRA triggers (<code>&lt;lora:LowRA:0.5&gt;</code>) and general style prompts relevant to your chosen model ('<code>hand drawn, dark theme, black background</code>'), but no slave-specific prompts");
-		options.addOption("AI custom style negative prompt", "aiCustomStyleNeg").showTextBox({large: true, forceString: true})
+		options.addOption("AI custom style negative prompt", "aiCustomStyleNeg").showTextBox({ large: true, forceString: true })
 			.addComment("Include undesired general style prompts relevant to your chosen model ('<code>greyscale, photography, forest, low camera angle</code>'), but no slave-specific prompts");
 	} else if (V.aiStyle === 1) {
 		options.addComment("For best results, use an appropriately-trained photorealistic base model, such as MajicMIX or Life Like Diffusion.");
 	} else if (V.aiStyle === 2) {
 		options.addComment("For best results, use an appropriately-trained hentai base model, such as Hassaku.");
 	}
+
+	options.addCustom("Prompt Details");
+	options.addOption("Visual age filter", 'aiAgeFilter')
+		.addValue("Enabled", true).on().addValue("Disabled", false).off()
+		.addComment(`Creating images of characters that <U>appear to be</U> minors may be questionable in some countries, especially if they are generated by AI. Realistic images are even riskier due to their easy confusion with real ones. This option attempts to generate SFW images for them. <span class="warning">You may want to check you local laws before disabling this option.</span>`);
 	options.addOption("LoRA models are", "aiLoraPack")
 		.addValue("Enabled", true).on().addValue("Disabled", false).off();
 	if (V.aiLoraPack) {
@@ -1482,6 +1489,270 @@ App.UI.aiPromptingOptions = function(options) {
 	options.addOption("Gender hints come from", "aiGenderHint")
 		.addValue("Hormone balance", 1).addValue("Perceived gender", 2).addValue("Pronouns", 3)
 		.addComment("How to determine whether to include words like \"woman\" or \"man\" in a prompt.");
+
+
+};
+
+/**
+ * @param {InstanceType<App.UI.OptionsGroup>} options
+ */
+App.UI.aiArtOptions = function(options) {
+	options.addComment("This is experimental. Please follow the setup instructions below.");
+	options.addCustom(App.UI.stableDiffusionInstallationGuide("Stable Diffusion Installation Guide"));
+	if (V.aiApiUrl.endsWith('/')) { // common error is including a trailing slash, which will fuck us up, so strip it automatically
+		V.aiApiUrl = V.aiApiUrl.slice(0, -1);
+	}
+	options.addOption("API URL", "aiApiUrl").showTextBox().addComment("The URL of the Automatic 1111 Stable Diffusion API.");
+
+	// Prompting
+	App.UI.aiPromptingOptions(options);
+
+	options.addCustom("Behavior");
+	options.addOption("Caching Strategy", 'aiCachingStrategy')
+		.addValue("Reactive", 'reactive').addValue("Static", 'static')
+		.addComment("Caching behavior for AI images. Reactive pictures always reflect the state of the slave at the current time. Static refreshes every set amount of weeks, or manually. Images will not be brought across different strategies, but if the model is the same the generated images will be the same as well.");
+
+	if (V.aiCachingStrategy === 'static') {
+		options.addOption("Automatic generation", "aiAutoGen")
+			.addValue("Enabled", true).on().addValue("Disabled", false).off()
+			.addComment("Generate images for new slaves on the fly. If disabled, you will need to manually click to generate each slave's image.");
+		if (V.aiAutoGen) {
+			if (V.aiAutoGenFrequency < 1) {
+				V.aiAutoGenFrequency = 1;
+			}
+			V.aiAutoGenFrequency = Math.round(V.aiAutoGenFrequency);
+			options.addOption("Regeneration Frequency", "aiAutoGenFrequency").showTextBox()
+				.addComment("How often (in weeks) regenerate slave images. Slaves will render when 'Weeks Owned' is divisible by this number.");
+		}
+	}
+
+	options.addOption("Apply RA prompt changes for event images", "aiUseRAForEvents")
+		.addValue("Enabled", true).on().addValue("Disabled", false).off()
+		.addComment("Apply image generation prompt changes from Rules Assistant for event images, including slave marketplace images. Useful for customizing prompts of non-owned slaves.");
+
+
+	options.addCustom("Advanced Config");
+	const samplerListSpan = App.UI.DOM.makeElement('span', `Fetching options, please wait...`);
+	App.Art.GenAI.sdClient.getSamplerList().then(list => {
+		if (list.length === 0) {
+			samplerListSpan.textContent = `Could not fetch valid samplers. Check your configuration.`;
+			samplerListSpan.classList.add('error');
+		} else {
+			samplerListSpan.textContent = `Valid options on your Stable Diffusion installation: ${toSentence(list)}.`;
+			if (!list.includes(V.aiSamplingMethod)) {
+				samplerListSpan.classList.add('error');
+				samplerListSpan.textContent = "ERROR: " + samplerListSpan.textContent;
+			}
+		}
+	});
+	options.addOption("Sampling Method", "aiSamplingMethod").showTextBox()
+		.addComment(App.UI.DOM.combineNodes(`The sampling method used by AI. `, samplerListSpan));
+
+
+	const schedulerListSpan = App.UI.DOM.makeElement('span', `Fetching options, please wait...`);
+	App.Art.GenAI.sdClient.getSchedulerList().then(list => {
+		if (list.length === 0) {
+			schedulerListSpan.textContent = `Could not fetch valid schedulers. Check your configuration.`;
+			schedulerListSpan.classList.add('error');
+		} else {
+			schedulerListSpan.textContent = `Valid options on your Stable Diffusion installation: ${toSentence(list)}.`;
+			if (!list.includes(V.aiSchedulingMethod)) {
+				schedulerListSpan.classList.add('error');
+				schedulerListSpan.textContent = "ERROR: " + schedulerListSpan.textContent;
+			}
+		}
+	});
+	options.addOption("Scheduling Method", "aiSchedulingMethod").showTextBox()
+		.addComment(App.UI.DOM.combineNodes(`The scheduling method used by AI. `, schedulerListSpan));
+
+	if (V.aiCfgScale < 1) {
+		V.aiCfgScale = 1;
+	}
+	options.addOption("CFG Scale", "aiCfgScale").showTextBox()
+		.addComment("The higher this number, the more the prompt influences the image. Generally between 5 to 12.");
+	if (V.aiTimeoutPerStep < 0.01) {
+		V.aiTimeoutPerStep = 0.01;
+	}
+
+	options.addOption("Seconds per Step", "aiTimeoutPerStep").showTextBox()
+		.addComment("The maximum number of Seconds (per Step) your system takes to render an image.  This time is from the time the request is sent to the time it is saved divided by the number of Sampling Steps. Please set this at as small a value as reasonable to avoid the game from waiting longer than you are for images to generate.");
+	if (V.aiSamplingSteps < 2) {
+		V.aiSamplingSteps = 2;
+	}
+	options.addOption("Sampling Steps", "aiSamplingSteps").showTextBox()
+		.addComment("The number of steps used when generating the image. More steps might reduce artifacts but increases generation time. Generally between 20 to 50, but may be as high as 500 if you don't mind long queues in the background.");
+	if (V.aiSamplingStepsEvent < 2) {
+		V.aiSamplingStepsEvent = 2;
+	}
+	options.addOption("Event Sampling Steps", "aiSamplingStepsEvent").showTextBox()
+		.addComment("The number of steps used when generating an image during events. Generally between 20 to 50 to maintain a reasonable speed.");
+	if (V.aiHeight < 10) {
+		V.aiHeight = 10;
+	}
+	options.addOption("Height", "aiHeight").showTextBox()
+		.addComment("The height of the image.");
+	if (V.aiWidth < 10) {
+		V.aiWidth = 10;
+	}
+	options.addOption("Width", "aiWidth").showTextBox()
+		.addComment("The width of the image.");
+
+	const rfCheckSpan = App.UI.DOM.makeElement('span', `Validating Restore Faces...`);
+	App.Art.GenAI.sdClient.canRestoreFaces().then(result => {
+		if (result) {
+			if (V.aiAdetailerFace && V.aiRestoreFaces) {
+				rfCheckSpan.textContent = `Do not use Restore Faces and ADetailer Restore Face at the same time. Pick one.`;
+				rfCheckSpan.classList.add("error");
+			} else {
+				rfCheckSpan.textContent = "";
+			}
+		} else {
+			rfCheckSpan.textContent = `Restore Faces is unavailable on your Stable Diffusion installation.`;
+			rfCheckSpan.classList.add("error");
+		}
+	});
+	options.addOption("Restore Faces", "aiRestoreFaces")
+		.addValue("Enabled", true).on().addValue("Disabled", false).off()
+		.addComment(App.UI.DOM.combineNodes("Use a model to restore faces after the image has been generated. May result in 'samey' faces. ", rfCheckSpan));
+
+	const adCheckSpan = App.UI.DOM.makeElement('span', `Validating ADetailer setup...`);
+	App.Art.GenAI.sdClient.hasAdetailer().then(result => {
+		if (result) {
+			adCheckSpan.textContent = "";
+		} else {
+			adCheckSpan.textContent = `ADetailer is unavailable on your Stable Diffusion installation.`;
+			adCheckSpan.classList.add("error");
+		}
+	});
+	options.addOption("ADetailer restore face", "aiAdetailerFace")
+		.addValue("Enabled", true).on().addValue("Disabled", false).off()
+		.addComment(App.UI.DOM.combineNodes("Use AI to recognize and re-render faces with better detail. Much better than Restore Faces, but requires more technical setup. ", adCheckSpan));
+
+	options.addOption("Upscaling/highres fix", "aiUpscale")
+		.addValue("Enabled", true).on().addValue("Disabled", false).off()
+		.addComment("Use AI upscaling to produce higher-resolution images. Significantly increases both time to generate and image quality.");
+	if (V.aiUpscale) {
+		options.addOption("Upscaling size", "aiUpscaleScale").showTextBox()
+			.addComment("Scales the dimensions of the image by this factor. Defaults to 1.75.");
+
+		const upscalerListSpan = App.UI.DOM.makeElement('span', `Fetching options, please wait...`);
+		App.Art.GenAI.sdClient.getUpscalerList().then(list => {
+			if (list.length === 0) {
+				upscalerListSpan.textContent = `Could not fetch valid upscalers. Check your configuration.`;
+				upscalerListSpan.classList.add('error');
+			} else {
+				upscalerListSpan.textContent = `Valid options on your Stable Diffusion installation: ${toSentence(list)}.`;
+				if (!list.includes(V.aiUpscaler)) {
+					upscalerListSpan.classList.add('error');
+					upscalerListSpan.textContent = "ERROR: " + upscalerListSpan.textContent;
+				}
+			}
+		});
+		options.addOption("Upscaling method", "aiUpscaler").showTextBox()
+			.addComment(App.UI.DOM.combineNodes(`The method used for upscaling the image. `, upscalerListSpan));
+	}
+
+	const opCheckSpan = App.UI.DOM.makeElement('span', `Validating ControlNet and OpenPose setup...`);
+	App.Art.GenAI.sdClient.hasOpenPose().then(result => {
+		if (result) {
+			opCheckSpan.textContent = "";
+		} else {
+			opCheckSpan.textContent = `OpenPose is unavailable on your Stable Diffusion installation. Check your ControlNet configuration.`;
+			opCheckSpan.classList.add("error");
+		}
+	});
+	options.addOption("Strictly control posing", "aiOpenPose")
+		.addValue("Enabled", true).on().addValue("Disabled", false).off()
+		.addComment(App.UI.DOM.combineNodes(`Use the ControlNet extension's OpenPose module to strictly control slave poses. `, opCheckSpan));
+	if (V.aiOpenPose) {
+		const opModelList = App.UI.DOM.makeElement('span', `Fetching options, please wait...`);
+		App.Art.GenAI.sdClient.getOpenPoseModelList().then(list => {
+			if (list.length === 0) {
+				opModelList.textContent = `Could not fetch valid OpenPose models. Check your configuration.`;
+				opModelList.classList.add('error');
+			} else {
+				opModelList.textContent = `Valid options on your Stable Diffusion installation: ${toSentence(list)}.`;
+				if (!list.includes(V.aiOpenPoseModel)) {
+					opModelList.classList.add('error');
+					opModelList.textContent = "ERROR: " + opModelList.textContent;
+				}
+			}
+		});
+		options.addOption("OpenPose Model", "aiOpenPoseModel").showTextBox()
+			.addComment(App.UI.DOM.combineNodes(`The model used for applying the pose to the image. Enter the entire model name, including the checksum (i.e. "control_v11p_sd15_openpose [cab727d4]").`, opModelList));
+	}
+
+
+	options.addOption("CFG Scale Fix", "aiDynamicCfgEnabled")
+		.addValue("Enabled", true).on().addValue("Disabled", false).off()
+		.addComment('Use the "Stable Diffusion Dynamic Thresholding" extension.');
+
+	if (V.aiDynamicCfgEnabled) {
+		options.addOption("CFG Scale Fix: Mimicked Number", "aiDynamicCfgMimic").showTextBox()
+			.addComment("If CFG Scale Fix is on, then set this number to a CFG scale to mimic a normal CFG (5 to 12), and then set your actual CFG to something high (20, 30, etc.)");
+		if (V.aiDynamicCfgMimic < 0) {
+			V.aiDynamicCfgMimic = 0;
+		}
+		options.addOption("CFG Scale Fix: Minimum Scale", "aiDynamicCfgMinimum").showTextBox()
+			.addComment("CFG Scheduler minimums. Set to around 3 or 4 for best results.");
+		if (V.aiDynamicCfgMinimum < 0) {
+			V.aiDynamicCfgMinimum = 0;
+		}
+	}
+
+	const renderQueueOption = async (clicked = false) => {
+		const sleep = (ms) => new Promise(r => setTimeout(r, ms));
+		// wait for the button to render
+		while (!$("button:contains('Interrupt rendering')").length) {
+			await sleep(10);
+		}
+		if (clicked) {
+			// send interrupt when clicked
+			App.Art.GenAI.sdQueue.interrupt();
+		}
+		if (App.Art.GenAI.sdQueue.interrupted) {
+			$("button:contains('Interrupt rendering')").removeClass("off").addClass("on selected disabled");
+			await App.Art.GenAI.sdQueue.resumeAfterInterrupt();
+		}
+		$("button:contains('Interrupt rendering')").removeClass("on selected disabled").addClass("off");
+		App.Art.GenAI.sdQueue.updateQueueCounts();
+	};
+	options.addCustomOption("Rendering Queue management")
+		.addButton("Interrupt rendering and clear the rendering queues", () => renderQueueOption(true))
+		.addComment(`<span id="mainQueueCount">N/A</span> main images and <span id="backlogQueueCount">N/A</span> backlog images queued for generation.`);
+	// adjust the state of the button when it is rendered
+	renderQueueOption();
+	options.addCustomOption("Cache database management")
+		.addButton("Purge all images", async () => {
+			await App.Art.GenAI.staticImageDB.clear();
+			await App.Art.GenAI.reactiveImageDB.clear();
+		})
+		.addButton("Regenerate images for all slaves", () => {
+			// queue all slaves for regeneration in the background
+			if (V.aiCachingStrategy === 'static') {
+				V.slaves.forEach(s => App.Art.GenAI.staticCache.updateSlave(s)
+					.catch(error => {
+						console.log(error.message || error);
+					}));
+			} else {
+				// reactive
+				V.slaves.forEach(s => App.Art.GenAI.reactiveCache.updateSlave(s)
+					.catch(error => {
+						console.log(error.message || error);
+					}));
+			}
+			console.log(`${App.Art.GenAI.sdQueue.queue.length} requests queued for rendering.`);
+		})
+		.addComment(`The cache database is shared between games. Current cache size: <span id="cacheCount">Please wait...</span>`);
+	if (V.aiCachingStrategy === 'static') {
+		App.Art.GenAI.staticImageDB.sizeInfo().then((result) => {
+			$("#cacheCount").empty().append(result);
+		});
+	} else {
+		App.Art.GenAI.reactiveImageDB.sizeInfo().then((result) => {
+			$("#cacheCount").empty().append(result);
+		});
+	}
 };
 
 App.UI.artOptions = function() {
@@ -1613,242 +1884,7 @@ App.UI.artOptions = function() {
 			} else if (V.imageChoice === 2) {
 				option.addComment("This art development is dead since vanilla. Since it is not embedded, requires a separate art pack to be downloaded.");
 			} else if (V.imageChoice === 6) {
-				options.addComment("This is experimental. Please follow the setup instructions below.");
-				options.addCustom(App.UI.stableDiffusionInstallationGuide("Stable Diffusion Installation Guide"));
-				if (V.aiApiUrl.endsWith('/')) { // common error is including a trailing slash, which will fuck us up, so strip it automatically
-					V.aiApiUrl = V.aiApiUrl.slice(0, -1);
-				}
-				options.addOption("API URL", "aiApiUrl").showTextBox().addComment("The URL of the Automatic 1111 Stable Diffusion API.");
-				App.UI.aiPromptingOptions(options);
-
-				options.addOption("Visual age filter", 'aiAgeFilter')
-					.addValue("Enabled", true).on().addValue("Disabled", false).off()
-					.addComment(`Creating images of characters that <U>appear to be</U> minors may be questionable in some countries, especially if they are generated by AI. Realistic images are even riskier due to their easy confusion with real ones. This option attempts to generate SFW images for them. <span class="warning">You may want to check you local laws before disabling this option.</span>`);
-
-				options.addOption("Caching Strategy", 'aiCachingStrategy')
-					.addValue("Reactive", 'reactive').addValue("Static", 'static')
-					.addComment("Caching behavior for AI images. Reactive pictures always reflect the state of the slave at the current time. Static refreshes every set amount of weeks, or manually. Images will not be brought across different strategies, but if the model is the same the generated images will be the same as well.");
-
-				if (V.aiCachingStrategy === 'static') {
-					options.addOption("Automatic generation", "aiAutoGen")
-						.addValue("Enabled", true).on().addValue("Disabled", false).off()
-						.addComment("Generate images for new slaves on the fly. If disabled, you will need to manually click to generate each slave's image.");
-					if (V.aiAutoGen) {
-						if (V.aiAutoGenFrequency < 1) {
-							V.aiAutoGenFrequency = 1;
-						}
-						V.aiAutoGenFrequency = Math.round(V.aiAutoGenFrequency);
-						options.addOption("Regeneration Frequency", "aiAutoGenFrequency").showTextBox()
-							.addComment("How often (in weeks) regenerate slave images. Slaves will render when 'Weeks Owned' is divisible by this number.");
-					}
-				}
-
-				options.addOption("Apply RA prompt changes for event images", "aiUseRAForEvents")
-					.addValue("Enabled", true).on().addValue("Disabled", false).off()
-					.addComment("Apply image generation prompt changes from Rules Assistant for event images, including slave marketplace images. Useful for customizing prompts of non-owned slaves.");
-
-				const samplerListSpan = App.UI.DOM.makeElement('span', `Fetching options, please wait...`);
-				App.Art.GenAI.sdClient.getSamplerList().then(list => {
-					if (list.length === 0) {
-						samplerListSpan.textContent = `Could not fetch valid samplers. Check your configuration.`;
-						samplerListSpan.classList.add('error');
-					} else {
-						samplerListSpan.textContent = `Valid options on your Stable Diffusion installation: ${toSentence(list)}.`;
-						if (!list.includes(V.aiSamplingMethod)) {
-							samplerListSpan.classList.add('error');
-							samplerListSpan.textContent = "ERROR: " + samplerListSpan.textContent;
-						}
-					}
-				});
-				options.addOption("Sampling Method", "aiSamplingMethod").showTextBox()
-					.addComment(App.UI.DOM.combineNodes(`The sampling method used by AI. `, samplerListSpan));
-
-				if (V.aiCfgScale < 1) {
-					V.aiCfgScale = 1;
-				}
-				options.addOption("CFG Scale", "aiCfgScale").showTextBox()
-					.addComment("The higher this number, the more the prompt influences the image. Generally between 5 to 12.");
-				if (V.aiTimeoutPerStep < 0.01) {
-					V.aiTimeoutPerStep = 0.01;
-				}
-				options.addOption("CFG Scale Fix", "aiDynamicCfgEnabled")
-					.addValue("Enabled", true).on().addValue("Disabled", false).off()
-					.addComment('Use the "Stable Diffusion Dynamic Thresholding" extension.');
-
-				if (V.aiDynamicCfgEnabled) {
-					options.addOption("CFG Scale Fix: Mimicked Number", "aiDynamicCfgMimic").showTextBox()
-						.addComment("If CFG Scale Fix is on, then set this number to a CFG scale to mimic a normal CFG (5 to 12), and then set your actual CFG to something high (20, 30, etc.)");
-					if (V.aiDynamicCfgMimic < 0) {
-						V.aiDynamicCfgMimic = 0;
-					}
-					options.addOption("CFG Scale Fix: Minimum Scale", "aiDynamicCfgMinimum").showTextBox()
-						.addComment("CFG Scheduler minimums. Set to around 3 or 4 for best results.");
-					if (V.aiDynamicCfgMinimum < 0) {
-						V.aiDynamicCfgMinimum = 0;
-					}
-				}
-
-				options.addOption("Seconds per Step", "aiTimeoutPerStep").showTextBox()
-					.addComment("The maximum number of Seconds (per Step) your system takes to render an image.  This time is from the time the request is sent to the time it is saved divided by the number of Sampling Steps. Please set this at as small a value as reasonable to avoid the game from waiting longer than you are for images to generate.");
-				if (V.aiSamplingSteps < 2) {
-					V.aiSamplingSteps = 2;
-				}
-				options.addOption("Sampling Steps", "aiSamplingSteps").showTextBox()
-					.addComment("The number of steps used when generating the image. More steps might reduce artifacts but increases generation time. Generally between 20 to 50, but may be as high as 500 if you don't mind long queues in the background.");
-				if (V.aiSamplingStepsEvent < 2) {
-					V.aiSamplingStepsEvent = 2;
-				}
-				options.addOption("Event Sampling Steps", "aiSamplingStepsEvent").showTextBox()
-					.addComment("The number of steps used when generating an image during events. Generally between 20 to 50 to maintain a reasonable speed.");
-				if (V.aiHeight < 10) {
-					V.aiHeight = 10;
-				}
-				options.addOption("Height", "aiHeight").showTextBox()
-					.addComment("The height of the image.");
-				if (V.aiWidth < 10) {
-					V.aiWidth = 10;
-				}
-				options.addOption("Width", "aiWidth").showTextBox()
-					.addComment("The width of the image.");
-
-				const rfCheckSpan = App.UI.DOM.makeElement('span', `Validating Restore Faces...`);
-				App.Art.GenAI.sdClient.canRestoreFaces().then(result => {
-					if (result) {
-						if (V.aiAdetailerFace && V.aiRestoreFaces) {
-							rfCheckSpan.textContent = `Do not use Restore Faces and ADetailer Restore Face at the same time. Pick one.`;
-							rfCheckSpan.classList.add("error");
-						} else {
-							rfCheckSpan.textContent = "";
-						}
-					} else {
-						rfCheckSpan.textContent = `Restore Faces is unavailable on your Stable Diffusion installation.`;
-						rfCheckSpan.classList.add("error");
-					}
-				});
-				options.addOption("Restore Faces", "aiRestoreFaces")
-					.addValue("Enabled", true).on().addValue("Disabled", false).off()
-					.addComment(App.UI.DOM.combineNodes("Use a model to restore faces after the image has been generated. May result in 'samey' faces. ", rfCheckSpan));
-
-				const adCheckSpan = App.UI.DOM.makeElement('span', `Validating ADetailer setup...`);
-				App.Art.GenAI.sdClient.hasAdetailer().then(result => {
-					if (result) {
-						adCheckSpan.textContent = "";
-					} else {
-						adCheckSpan.textContent = `ADetailer is unavailable on your Stable Diffusion installation.`;
-						adCheckSpan.classList.add("error");
-					}
-				});
-				options.addOption("ADetailer restore face", "aiAdetailerFace")
-					.addValue("Enabled", true).on().addValue("Disabled", false).off()
-					.addComment(App.UI.DOM.combineNodes("Use AI to recognize and re-render faces with better detail. Much better than Restore Faces, but requires more technical setup. ", adCheckSpan));
-
-				options.addOption("Upscaling/highres fix", "aiUpscale")
-					.addValue("Enabled", true).on().addValue("Disabled", false).off()
-					.addComment("Use AI upscaling to produce higher-resolution images. Significantly increases both time to generate and image quality.");
-				if (V.aiUpscale) {
-					options.addOption("Upscaling size", "aiUpscaleScale").showTextBox()
-						.addComment("Scales the dimensions of the image by this factor. Defaults to 1.75.");
-
-					const upscalerListSpan = App.UI.DOM.makeElement('span', `Fetching options, please wait...`);
-					App.Art.GenAI.sdClient.getUpscalerList().then(list => {
-						if (list.length === 0) {
-							upscalerListSpan.textContent = `Could not fetch valid upscalers. Check your configuration.`;
-							upscalerListSpan.classList.add('error');
-						} else {
-							upscalerListSpan.textContent = `Valid options on your Stable Diffusion installation: ${toSentence(list)}.`;
-							if (!list.includes(V.aiUpscaler)) {
-								upscalerListSpan.classList.add('error');
-								upscalerListSpan.textContent = "ERROR: " + upscalerListSpan.textContent;
-							}
-						}
-					});
-					options.addOption("Upscaling method", "aiUpscaler").showTextBox()
-						.addComment(App.UI.DOM.combineNodes(`The method used for upscaling the image. `, upscalerListSpan));
-				}
-
-				const opCheckSpan = App.UI.DOM.makeElement('span', `Validating ControlNet and OpenPose setup...`);
-				App.Art.GenAI.sdClient.hasOpenPose().then(result => {
-					if (result) {
-						opCheckSpan.textContent = "";
-					} else {
-						opCheckSpan.textContent = `OpenPose is unavailable on your Stable Diffusion installation. Check your ControlNet configuration.`;
-						opCheckSpan.classList.add("error");
-					}
-				});
-				options.addOption("Strictly control posing", "aiOpenPose")
-					.addValue("Enabled", true).on().addValue("Disabled", false).off()
-					.addComment(App.UI.DOM.combineNodes(`Use the ControlNet extension's OpenPose module to strictly control slave poses. `, opCheckSpan));
-				if (V.aiOpenPose) {
-					const opModelList = App.UI.DOM.makeElement('span', `Fetching options, please wait...`);
-					App.Art.GenAI.sdClient.getOpenPoseModelList().then(list => {
-						if (list.length === 0) {
-							opModelList.textContent = `Could not fetch valid OpenPose models. Check your configuration.`;
-							opModelList.classList.add('error');
-						} else {
-							opModelList.textContent = `Valid options on your Stable Diffusion installation: ${toSentence(list)}.`;
-							if (!list.includes(V.aiOpenPoseModel)) {
-								opModelList.classList.add('error');
-								opModelList.textContent = "ERROR: " + opModelList.textContent;
-							}
-						}
-					});
-					options.addOption("OpenPose Model", "aiOpenPoseModel").showTextBox()
-						.addComment(App.UI.DOM.combineNodes(`The model used for applying the pose to the image. Enter the entire model name, including the checksum (i.e. "control_v11p_sd15_openpose [cab727d4]").`, opModelList));
-				}
-
-				const renderQueueOption = async (clicked = false) => {
-					const sleep = (ms) => new Promise(r => setTimeout(r, ms));
-					// wait for the button to render
-					while (!$("button:contains('Interrupt rendering')").length) {
-						await sleep(10);
-					}
-					if (clicked) {
-						// send interrupt when clicked
-						App.Art.GenAI.sdQueue.interrupt();
-					}
-					if (App.Art.GenAI.sdQueue.interrupted) {
-						$("button:contains('Interrupt rendering')").removeClass("off").addClass("on selected disabled");
-						await App.Art.GenAI.sdQueue.resumeAfterInterrupt();
-					}
-					$("button:contains('Interrupt rendering')").removeClass("on selected disabled").addClass("off");
-					App.Art.GenAI.sdQueue.updateQueueCounts();
-				};
-				options.addCustomOption("Rendering Queue management")
-					.addButton("Interrupt rendering and clear the rendering queues", () => renderQueueOption(true))
-					.addComment(`<span id="mainQueueCount">N/A</span> main images and <span id="backlogQueueCount">N/A</span> backlog images queued for generation.`);
-				// adjust the state of the button when it is rendered
-				renderQueueOption();
-				options.addCustomOption("Cache database management")
-					.addButton("Purge all images", async () => {
-						await App.Art.GenAI.staticImageDB.clear();
-						await App.Art.GenAI.reactiveImageDB.clear();
-					})
-					.addButton("Regenerate images for all slaves", () => {
-						// queue all slaves for regeneration in the background
-						if (V.aiCachingStrategy === 'static') {
-							V.slaves.forEach(s => App.Art.GenAI.staticCache.updateSlave(s)
-								.catch(error => {
-									console.log(error.message || error);
-								}));
-						} else {
-							// reactive
-							V.slaves.forEach(s => App.Art.GenAI.reactiveCache.updateSlave(s)
-								.catch(error => {
-									console.log(error.message || error);
-								}));
-						}
-						console.log(`${App.Art.GenAI.sdQueue.queue.length} requests queued for rendering.`);
-					})
-					.addComment(`The cache database is shared between games. Current cache size: <span id="cacheCount">Please wait...</span>`);
-				if (V.aiCachingStrategy === 'static') {
-					App.Art.GenAI.staticImageDB.sizeInfo().then((result) => {
-						$("#cacheCount").empty().append(result);
-					});
-				} else {
-					App.Art.GenAI.reactiveImageDB.sizeInfo().then((result) => {
-						$("#cacheCount").empty().append(result);
-					});
-				}
+				App.UI.aiArtOptions(options);
 			}
 		} else { // custom images only
 			options.addOption("Show suggested AI prompts in Customize tab", "aiCustomImagePrompts")
-- 
GitLab