mirror of
https://github.com/DominikDoom/a1111-sd-webui-tagcomplete.git
synced 2026-01-27 03:29:55 +00:00
Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1c6bba2a3d | ||
|
|
9a47c2ec2c | ||
|
|
fe32ad739d | ||
|
|
ade67e30a6 | ||
|
|
e9a21e7a55 | ||
|
|
3ef2a7d206 | ||
|
|
29b5bf0701 | ||
|
|
3eef536b64 | ||
|
|
0d24e697d2 | ||
|
|
a27633da55 | ||
|
|
4cd6174a22 | ||
|
|
9155e4d42c | ||
|
|
700642a400 | ||
|
|
1b592dbf56 | ||
|
|
d1eea880f3 |
@@ -20,18 +20,15 @@ Booru style tag autocompletion for the AUTOMATIC1111 Stable Diffusion WebUI
|
||||
</div>
|
||||
<br/>
|
||||
|
||||
#### ⚠️ Notice:
|
||||
I am currently looking for feedback on a new feature I'm working on and want to release soon.<br/>
|
||||
Please check [the announcement post](https://github.com/DominikDoom/a1111-sd-webui-tagcomplete/discussions/270) for more info if you are interested to help.
|
||||
|
||||
# 📄 Description
|
||||
|
||||
Tag Autocomplete is an extension for the popular [AUTOMATIC1111 web UI](https://github.com/AUTOMATIC1111/stable-diffusion-webui) for Stable Diffusion.
|
||||
You can install it using the inbuilt available extensions list, clone the files manually as described [below](#-installation), or use a pre-packaged version from [Releases](https://github.com/DominikDoom/a1111-sd-webui-tagcomplete/releases).
|
||||
|
||||
It displays autocompletion hints for recognized tags from "image booru" boards such as Danbooru, which are primarily used for browsing Anime-style illustrations.
|
||||
Since some Stable Diffusion models were trained using this information, for example [Waifu Diffusion](https://github.com/harubaru/waifu-diffusion) and many of the NAI-descendant models or merges, using exact tags in prompts can often improve composition and consistency.
|
||||
Since most custom Stable Diffusion models were trained using this information or merged with ones that did, using exact tags in prompts can often improve composition and consistency, even if the model itself has a photorealistic style.
|
||||
|
||||
You can install it using the inbuilt available extensions list, clone the files manually as described [below](#-installation), or use a pre-packaged version from [Releases](https://github.com/DominikDoom/a1111-sd-webui-tagcomplete/releases).
|
||||
Disclaimer: The default tag lists contain NSFW terms, please use them responsibly.
|
||||
|
||||
<br/>
|
||||
|
||||
|
||||
@@ -61,7 +61,7 @@ async function loadCSV(path) {
|
||||
}
|
||||
|
||||
// Fetch API
|
||||
async function fetchAPI(url, json = true, cache = false) {
|
||||
async function fetchTacAPI(url, json = true, cache = false) {
|
||||
if (!cache) {
|
||||
const appendChar = url.includes("?") ? "&" : "?";
|
||||
url += `${appendChar}${new Date().getTime()}`
|
||||
@@ -80,7 +80,7 @@ async function fetchAPI(url, json = true, cache = false) {
|
||||
return await response.text();
|
||||
}
|
||||
|
||||
async function postAPI(url, body = null) {
|
||||
async function postTacAPI(url, body = null) {
|
||||
let response = await fetch(url, {
|
||||
method: "POST",
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
@@ -95,7 +95,7 @@ async function postAPI(url, body = null) {
|
||||
return await response.json();
|
||||
}
|
||||
|
||||
async function putAPI(url, body = null) {
|
||||
async function putTacAPI(url, body = null) {
|
||||
let response = await fetch(url, { method: "PUT", body: body });
|
||||
|
||||
if (response.status != 200) {
|
||||
@@ -107,8 +107,8 @@ async function putAPI(url, body = null) {
|
||||
}
|
||||
|
||||
// Extra network preview thumbnails
|
||||
async function getExtraNetworkPreviewURL(filename, type) {
|
||||
const previewJSON = await fetchAPI(`tacapi/v1/thumb-preview/${filename}?type=${type}`, true, true);
|
||||
async function getTacExtraNetworkPreviewURL(filename, type) {
|
||||
const previewJSON = await fetchTacAPI(`tacapi/v1/thumb-preview/${filename}?type=${type}`, true, true);
|
||||
if (previewJSON?.url) {
|
||||
const properURL = `sd_extra_networks/thumb?filename=${previewJSON.url}`;
|
||||
if ((await fetch(properURL)).status == 200) {
|
||||
@@ -237,24 +237,34 @@ function mapUseCountArray(useCounts, posAndNeg = false) {
|
||||
}
|
||||
// Call API endpoint to increase bias of tag in the database
|
||||
function increaseUseCount(tagName, type, negative = false) {
|
||||
postAPI(`tacapi/v1/increase-use-count?tagname=${tagName}&ttype=${type}&neg=${negative}`);
|
||||
postTacAPI(`tacapi/v1/increase-use-count?tagname=${tagName}&ttype=${type}&neg=${negative}`);
|
||||
}
|
||||
// Get use count of tag from the database
|
||||
async function getUseCount(tagName, type, negative = false) {
|
||||
return (await fetchAPI(`tacapi/v1/get-use-count?tagname=${tagName}&ttype=${type}&neg=${negative}`, true, false))["result"];
|
||||
const response = await fetchTacAPI(`tacapi/v1/get-use-count?tagname=${tagName}&ttype=${type}&neg=${negative}`, true, false);
|
||||
// Guard for no db
|
||||
if (response == null) return null;
|
||||
// Result
|
||||
return response["result"];
|
||||
}
|
||||
async function getUseCounts(tagNames, types, negative = false) {
|
||||
// While semantically weird, we have to use POST here for the body, as urls are limited in length
|
||||
const body = JSON.stringify({"tagNames": tagNames, "tagTypes": types, "neg": negative});
|
||||
const rawArray = (await postAPI(`tacapi/v1/get-use-count-list`, body))["result"]
|
||||
return mapUseCountArray(rawArray);
|
||||
const response = await postTacAPI(`tacapi/v1/get-use-count-list`, body)
|
||||
// Guard for no db
|
||||
if (response == null) return null;
|
||||
// Results
|
||||
return mapUseCountArray(response["result"]);
|
||||
}
|
||||
async function getAllUseCounts() {
|
||||
const rawArray = (await fetchAPI(`tacapi/v1/get-all-use-counts`))["result"];
|
||||
return mapUseCountArray(rawArray, true);
|
||||
const response = await fetchTacAPI(`tacapi/v1/get-all-use-counts`);
|
||||
// Guard for no db
|
||||
if (response == null) return null;
|
||||
// Results
|
||||
return mapUseCountArray(response["result"], true);
|
||||
}
|
||||
async function resetUseCount(tagName, type, resetPosCount, resetNegCount) {
|
||||
await putAPI(`tacapi/v1/reset-use-count?tagname=${tagName}&ttype=${type}&pos=${resetPosCount}&neg=${resetNegCount}`);
|
||||
await putTacAPI(`tacapi/v1/reset-use-count?tagname=${tagName}&ttype=${type}&pos=${resetPosCount}&neg=${resetNegCount}`);
|
||||
}
|
||||
|
||||
function createTagUsageTable(tagCounts) {
|
||||
|
||||
@@ -50,7 +50,7 @@ async function load() {
|
||||
async function sanitize(tagType, text) {
|
||||
if (tagType === ResultType.lora) {
|
||||
let multiplier = TAC_CFG.extraNetworksDefaultMultiplier;
|
||||
let info = await fetchAPI(`tacapi/v1/lora-info/${text}`)
|
||||
let info = await fetchTacAPI(`tacapi/v1/lora-info/${text}`)
|
||||
if (info && info["preferred weight"]) {
|
||||
multiplier = info["preferred weight"];
|
||||
}
|
||||
|
||||
@@ -50,7 +50,7 @@ async function load() {
|
||||
async function sanitize(tagType, text) {
|
||||
if (tagType === ResultType.lyco) {
|
||||
let multiplier = TAC_CFG.extraNetworksDefaultMultiplier;
|
||||
let info = await fetchAPI(`tacapi/v1/lyco-info/${text}`)
|
||||
let info = await fetchTacAPI(`tacapi/v1/lyco-info/${text}`)
|
||||
if (info && info["preferred weight"]) {
|
||||
multiplier = info["preferred weight"];
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
// Regex
|
||||
const WC_REGEX = /\b__([^,]+)__([^, ]*)\b/g;
|
||||
const WC_REGEX = new RegExp(/__([^,]+)__([^, ]*)/g);
|
||||
|
||||
// Trigger conditions
|
||||
const WC_TRIGGER = () => TAC_CFG.useWildcards && [...tagword.matchAll(WC_REGEX)].length > 0;
|
||||
const WC_FILE_TRIGGER = () => TAC_CFG.useWildcards && (tagword.startsWith("__") && !tagword.endsWith("__") || tagword === "__");
|
||||
const WC_TRIGGER = () => TAC_CFG.useWildcards && [...tagword.matchAll(new RegExp(WC_REGEX.source.replaceAll("__", escapeRegExp(TAC_CFG.wcWrap)), "g"))].length > 0;
|
||||
const WC_FILE_TRIGGER = () => TAC_CFG.useWildcards && (tagword.startsWith(TAC_CFG.wcWrap) && !tagword.endsWith(TAC_CFG.wcWrap) || tagword === TAC_CFG.wcWrap);
|
||||
|
||||
class WildcardParser extends BaseTagParser {
|
||||
async parse() {
|
||||
// Show wildcards from a file with that name
|
||||
let wcMatch = [...tagword.matchAll(WC_REGEX)]
|
||||
let wcMatch = [...tagword.matchAll(new RegExp(WC_REGEX.source.replaceAll("__", escapeRegExp(TAC_CFG.wcWrap)), "g"))];
|
||||
let wcFile = wcMatch[0][1];
|
||||
let wcWord = wcMatch[0][2];
|
||||
|
||||
@@ -38,7 +38,7 @@ class WildcardParser extends BaseTagParser {
|
||||
}
|
||||
wildcards = wildcards.concat(getDescendantProp(yamlWildcards[basePath], fileName));
|
||||
} else {
|
||||
const fileContent = (await fetchAPI(`tacapi/v1/wildcard-contents?basepath=${basePath}&filename=${fileName}.txt`, false))
|
||||
const fileContent = (await fetchTacAPI(`tacapi/v1/wildcard-contents?basepath=${basePath}&filename=${fileName}.txt`, false))
|
||||
.split("\n")
|
||||
.filter(x => x.trim().length > 0 && !x.startsWith('#')); // Remove empty lines and comments
|
||||
wildcards = wildcards.concat(fileContent);
|
||||
@@ -64,8 +64,8 @@ class WildcardFileParser extends BaseTagParser {
|
||||
parse() {
|
||||
// Show available wildcard files
|
||||
let tempResults = [];
|
||||
if (tagword !== "__") {
|
||||
let lmb = (x) => x[1].toLowerCase().includes(tagword.replace("__", ""))
|
||||
if (tagword !== TAC_CFG.wcWrap) {
|
||||
let lmb = (x) => x[1].toLowerCase().includes(tagword.replace(TAC_CFG.wcWrap, ""))
|
||||
tempResults = wildcardFiles.filter(lmb).concat(wildcardExtFiles.filter(lmb)) // Filter by tagword
|
||||
} else {
|
||||
tempResults = wildcardFiles.concat(wildcardExtFiles);
|
||||
@@ -151,7 +151,7 @@ async function load() {
|
||||
|
||||
function sanitize(tagType, text) {
|
||||
if (tagType === ResultType.wildcardFile || tagType === ResultType.yamlWildcard) {
|
||||
return `__${text}__`;
|
||||
return `${TAC_CFG.wcWrap}${text}${TAC_CFG.wcWrap}`;
|
||||
} else if (tagType === ResultType.wildcardTag) {
|
||||
return text;
|
||||
}
|
||||
|
||||
@@ -241,6 +241,7 @@ async function syncOptions() {
|
||||
wildcardCompletionMode: opts["tac_wildcardCompletionMode"],
|
||||
modelKeywordCompletion: opts["tac_modelKeywordCompletion"],
|
||||
modelKeywordLocation: opts["tac_modelKeywordLocation"],
|
||||
wcWrap: opts["dp_parser_wildcard_wrap"] || "__", // to support custom wrapper chars set by dp_parser
|
||||
// Alias settings
|
||||
alias: {
|
||||
searchByAlias: opts["tac_alias.searchByAlias"],
|
||||
@@ -413,7 +414,7 @@ const COMPLETED_WILDCARD_REGEX = /__[^\s,_][^\t\n\r,_]*[^\s,_]__[^\s,_]*/g;
|
||||
const STYLE_VAR_REGEX = /\$\(?[^$|\[\],\s]*\)?/g;
|
||||
const NORMAL_TAG_REGEX = /[^\s,|<>\[\]:]+_\([^\s,|<>\[\]:]*\)?|[^\s,|<>():\[\]]+|</g;
|
||||
const RUBY_TAG_REGEX = /[\w\d<][\w\d' \-?!/$%]{2,}>?/g;
|
||||
const TAG_REGEX = new RegExp(`${POINTY_REGEX.source}|${COMPLETED_WILDCARD_REGEX.source}|${STYLE_VAR_REGEX.source}|${NORMAL_TAG_REGEX.source}`, "g");
|
||||
const TAG_REGEX = () => { return new RegExp(`${POINTY_REGEX.source}|${COMPLETED_WILDCARD_REGEX.source.replaceAll("__", escapeRegExp(TAC_CFG.wcWrap))}|${STYLE_VAR_REGEX.source}|${NORMAL_TAG_REGEX.source}`, "g"); }
|
||||
|
||||
// On click, insert the tag into the prompt textbox with respect to the cursor position
|
||||
async function insertTextAtCursor(textArea, result, tagword, tabCompletedWithoutChoice = false) {
|
||||
@@ -469,7 +470,7 @@ async function insertTextAtCursor(textArea, result, tagword, tabCompletedWithout
|
||||
// Don't cut off the __ at the end if it is already the full path
|
||||
if (firstDifference > 0 && firstDifference < longestResult) {
|
||||
// +2 because the sanitized text already has the __ at the start but the matched text doesn't
|
||||
sanitizedText = sanitizedText.substring(0, firstDifference + 2);
|
||||
sanitizedText = sanitizedText.substring(0, firstDifference + TAC_CFG.wcWrap.length);
|
||||
} else if (firstDifference === 0) {
|
||||
sanitizedText = tagword;
|
||||
}
|
||||
@@ -484,7 +485,7 @@ async function insertTextAtCursor(textArea, result, tagword, tabCompletedWithout
|
||||
case ResultType.wildcardFile:
|
||||
case ResultType.yamlWildcard:
|
||||
// We only want to update the frequency for a full wildcard, not partial paths
|
||||
if (sanitizedText.endsWith("__"))
|
||||
if (sanitizedText.endsWith(TAC_CFG.wcWrap))
|
||||
name = text
|
||||
break;
|
||||
case ResultType.chant:
|
||||
@@ -552,7 +553,7 @@ async function insertTextAtCursor(textArea, result, tagword, tabCompletedWithout
|
||||
let keywords = null;
|
||||
// Check built-in activation words first
|
||||
if (tagType === ResultType.lora || tagType === ResultType.lyco) {
|
||||
let info = await fetchAPI(`tacapi/v1/lora-info/${result.text}`)
|
||||
let info = await fetchTacAPI(`tacapi/v1/lora-info/${result.text}`)
|
||||
if (info && info["activation text"]) {
|
||||
keywords = info["activation text"];
|
||||
}
|
||||
@@ -564,7 +565,7 @@ async function insertTextAtCursor(textArea, result, tagword, tabCompletedWithout
|
||||
|
||||
// No match, try to find a sha256 match from the cache file
|
||||
if (!nameDict) {
|
||||
const sha256 = await fetchAPI(`/tacapi/v1/lora-cached-hash/${result.text}`)
|
||||
const sha256 = await fetchTacAPI(`/tacapi/v1/lora-cached-hash/${result.text}`)
|
||||
if (sha256) {
|
||||
nameDict = modelKeywordDict.get(sha256);
|
||||
}
|
||||
@@ -622,7 +623,7 @@ async function insertTextAtCursor(textArea, result, tagword, tabCompletedWithout
|
||||
// Update previous tags with the edited prompt to prevent re-searching the same term
|
||||
let weightedTags = [...newPrompt.matchAll(WEIGHT_REGEX)]
|
||||
.map(match => match[1]);
|
||||
let tags = newPrompt.match(TAG_REGEX)
|
||||
let tags = newPrompt.match(TAG_REGEX())
|
||||
if (weightedTags !== null) {
|
||||
tags = tags.filter(tag => !weightedTags.some(weighted => tag.includes(weighted)))
|
||||
.concat(weightedTags);
|
||||
@@ -818,7 +819,7 @@ function addResultsToList(textArea, results, tagword, resetList) {
|
||||
// Check if it's a negative prompt
|
||||
let isNegative = textAreaId.includes("n");
|
||||
|
||||
// Add listener
|
||||
// Add click listener
|
||||
li.addEventListener("click", (e) => {
|
||||
if (e.ctrlKey || e.metaKey) {
|
||||
resetUseCount(result.text, result.type, !isNegative, isNegative);
|
||||
@@ -827,6 +828,38 @@ function addResultsToList(textArea, results, tagword, resetList) {
|
||||
insertTextAtCursor(textArea, result, tagword);
|
||||
}
|
||||
});
|
||||
// Add delayed hover listener for extra network previews
|
||||
if (
|
||||
TAC_CFG.showExtraNetworkPreviews &&
|
||||
[
|
||||
ResultType.embedding,
|
||||
ResultType.hypernetwork,
|
||||
ResultType.lora,
|
||||
ResultType.lyco,
|
||||
].includes(result.type)
|
||||
) {
|
||||
li.addEventListener("mouseover", async () => {
|
||||
const me = this;
|
||||
let hoverTimeout;
|
||||
|
||||
hoverTimeout = setTimeout(async () => {
|
||||
// If the tag we hover over is already selected, do nothing
|
||||
if (selectedTag && selectedTag === i) return;
|
||||
|
||||
oldSelectedTag = selectedTag;
|
||||
selectedTag = i;
|
||||
|
||||
// Update selection without scrolling to the item (since we would
|
||||
// immediately trigger the next scroll as the items move under the cursor)
|
||||
updateSelectionStyle(textArea, selectedTag, oldSelectedTag, false);
|
||||
}, 400);
|
||||
// Reset delay timer if we leave the item
|
||||
me.addEventListener("mouseout", () => {
|
||||
clearTimeout(hoverTimeout);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Add element to list
|
||||
resultsList.appendChild(li);
|
||||
}
|
||||
@@ -839,7 +872,7 @@ function addResultsToList(textArea, results, tagword, resetList) {
|
||||
}
|
||||
}
|
||||
|
||||
async function updateSelectionStyle(textArea, newIndex, oldIndex) {
|
||||
async function updateSelectionStyle(textArea, newIndex, oldIndex, scroll = true) {
|
||||
let textAreaId = getTextAreaIdentifier(textArea);
|
||||
let resultDiv = gradioApp().querySelector('.autocompleteResults' + textAreaId);
|
||||
let resultsList = resultDiv.querySelector('ul');
|
||||
@@ -854,40 +887,25 @@ async function updateSelectionStyle(textArea, newIndex, oldIndex) {
|
||||
let selected = items[newIndex];
|
||||
selected.classList.add('selected');
|
||||
|
||||
// Set scrolltop to selected item
|
||||
resultDiv.scrollTop = selected.offsetTop - resultDiv.offsetTop;
|
||||
// Set scrolltop to selected item
|
||||
if (scroll) resultDiv.scrollTop = selected.offsetTop - resultDiv.offsetTop;
|
||||
}
|
||||
|
||||
// Show preview if enabled and the selected type supports it
|
||||
if (newIndex !== null) {
|
||||
let selected = items[newIndex];
|
||||
let previewTypes = ["v1 Embedding", "v2 Embedding", "Hypernetwork", "Lora", "Lyco"];
|
||||
let selectedType = selected.querySelector(".acMetaText").innerText;
|
||||
let selectedFilename = selected.querySelector(".acListItem").innerText;
|
||||
let selectedResult = results[newIndex];
|
||||
let selectedType = selectedResult.type;
|
||||
// These types support previews (others could technically too, but are not native to the webui gallery)
|
||||
let previewTypes = [ResultType.embedding, ResultType.hypernetwork, ResultType.lora, ResultType.lyco];
|
||||
|
||||
let previewDiv = gradioApp().querySelector(`.autocompleteParent${textAreaId} .sideInfo`);
|
||||
|
||||
if (TAC_CFG.showExtraNetworkPreviews && previewTypes.includes(selectedType)) {
|
||||
let shorthandType = "";
|
||||
switch (selectedType) {
|
||||
case "v1 Embedding":
|
||||
case "v2 Embedding":
|
||||
shorthandType = "embed";
|
||||
break;
|
||||
case "Hypernetwork":
|
||||
shorthandType = "hyper";
|
||||
break;
|
||||
case "Lora":
|
||||
shorthandType = "lora";
|
||||
break;
|
||||
case "Lyco":
|
||||
shorthandType = "lyco";
|
||||
break;
|
||||
}
|
||||
|
||||
let img = previewDiv.querySelector("img");
|
||||
|
||||
let url = await getExtraNetworkPreviewURL(selectedFilename, shorthandType);
|
||||
// String representation of our type enum
|
||||
const typeString = Object.keys(ResultType)[selectedType - 1].toLowerCase();
|
||||
// Get image from API
|
||||
let url = await getTacExtraNetworkPreviewURL(selectedResult.text, typeString);
|
||||
if (url) {
|
||||
img.src = url;
|
||||
previewDiv.style.display = "block";
|
||||
@@ -1055,7 +1073,7 @@ async function autocomplete(textArea, prompt, fixedTag = null) {
|
||||
// We also match for the weighting format (e.g. "tag:1.0") here, and combine the two to get the full tag word set
|
||||
let weightedTags = [...prompt.matchAll(WEIGHT_REGEX)]
|
||||
.map(match => match[1]);
|
||||
let tags = prompt.match(TAG_REGEX)
|
||||
let tags = prompt.match(TAG_REGEX())
|
||||
if (weightedTags !== null && tags !== null) {
|
||||
tags = tags.filter(tag => !weightedTags.some(weighted => tag.includes(weighted) && !tag.startsWith("<[") && !tag.startsWith("$(")))
|
||||
.concat(weightedTags);
|
||||
@@ -1201,7 +1219,7 @@ async function autocomplete(textArea, prompt, fixedTag = null) {
|
||||
|
||||
// Request use counts from the DB
|
||||
const names = TAC_CFG.frequencyIncludeAlias ? tagNames.concat(aliasNames) : tagNames;
|
||||
const counts = await getUseCounts(names, types, isNegative);
|
||||
const counts = await getUseCounts(names, types, isNegative) || [];
|
||||
|
||||
// Pre-calculate weights to prevent duplicate work
|
||||
const resultBiasMap = new Map();
|
||||
@@ -1361,7 +1379,7 @@ async function refreshTacTempFiles(api = false) {
|
||||
}
|
||||
|
||||
if (api) {
|
||||
await postAPI("tacapi/v1/refresh-temp-files");
|
||||
await postTacAPI("tacapi/v1/refresh-temp-files");
|
||||
await reload();
|
||||
} else {
|
||||
setTimeout(async () => {
|
||||
@@ -1371,7 +1389,7 @@ async function refreshTacTempFiles(api = false) {
|
||||
}
|
||||
|
||||
async function refreshEmbeddings() {
|
||||
await postAPI("tacapi/v1/refresh-embeddings", null);
|
||||
await postTacAPI("tacapi/v1/refresh-embeddings", null);
|
||||
embeddings = [];
|
||||
await processQueue(QUEUE_FILE_LOAD, null);
|
||||
console.log("TAC: Refreshed embeddings");
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# This helper script scans folders for wildcards and embeddings and writes them
|
||||
# to a temporary file to expose it to the javascript side
|
||||
|
||||
import sys
|
||||
import glob
|
||||
import importlib
|
||||
import json
|
||||
@@ -21,7 +22,14 @@ from scripts.model_keyword_support import (get_lora_simple_hash,
|
||||
from scripts.shared_paths import *
|
||||
|
||||
try:
|
||||
import scripts.tag_frequency_db as tdb
|
||||
try:
|
||||
from scripts import tag_frequency_db as tdb
|
||||
except ModuleNotFoundError:
|
||||
from inspect import getframeinfo, currentframe
|
||||
filename = getframeinfo(currentframe()).filename
|
||||
parent = Path(filename).resolve().parent
|
||||
sys.path.append(str(parent))
|
||||
import tag_frequency_db as tdb
|
||||
|
||||
# Ensure the db dependency is reloaded on script reload
|
||||
importlib.reload(tdb)
|
||||
@@ -197,7 +205,7 @@ def get_embeddings(sd_model):
|
||||
skipped = skipped | skipped_sdnext
|
||||
|
||||
# Add embeddings to the correct list
|
||||
for key, emb in (loaded | skipped).items():
|
||||
for key, emb in (skipped | loaded).items():
|
||||
if emb.filename is None:
|
||||
continue
|
||||
|
||||
@@ -719,9 +727,9 @@ def api_tac(_: gr.Blocks, app: FastAPI):
|
||||
return LORA_PATH
|
||||
elif type == "lyco":
|
||||
return LYCO_PATH
|
||||
elif type == "hyper":
|
||||
elif type == "hypernetwork":
|
||||
return HYP_PATH
|
||||
elif type == "embed":
|
||||
elif type == "embedding":
|
||||
return EMB_PATH
|
||||
else:
|
||||
return None
|
||||
@@ -802,7 +810,10 @@ def api_tac(_: gr.Blocks, app: FastAPI):
|
||||
date_limit = getattr(shared.opts, "tac_frequencyMaxAge", 30)
|
||||
date_limit = date_limit if date_limit > 0 else None
|
||||
|
||||
count_list = list(db.get_tag_counts(body.tagNames, body.tagTypes, body.neg, date_limit))
|
||||
if db:
|
||||
count_list = list(db.get_tag_counts(body.tagNames, body.tagTypes, body.neg, date_limit))
|
||||
else:
|
||||
count_list = None
|
||||
|
||||
# If a limit is set, return at max the top n results by count
|
||||
if count_list and len(count_list):
|
||||
|
||||
@@ -78,6 +78,7 @@ class TagFrequencyDb:
|
||||
)
|
||||
|
||||
def __get_version(self):
|
||||
db_version = None
|
||||
with transaction() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
|
||||
@@ -28,5 +28,17 @@
|
||||
"terms": "Water, Magic, Fancy",
|
||||
"content": "(extremely detailed CG unity 8k wallpaper), (masterpiece), (best quality), (ultra-detailed), (best illustration),(best shadow), (an extremely delicate and beautiful), classic, dynamic angle, floating, fine detail, Depth of field, classic, (painting), (sketch), (bloom), (shine), glinting stars,\n\na girl, solo, bare shoulders, flat chest, diamond and glaring eyes, beautiful detailed cold face, very long blue and sliver hair, floating black feathers, wavy hair, extremely delicate and beautiful girls, beautiful detailed eyes, glowing eyes,\n\nriver, (forest),palace, (fairyland,feather,flowers, nature),(sunlight),Hazy fog, mist",
|
||||
"color": 5
|
||||
},
|
||||
{
|
||||
"name": "Pony-Positive",
|
||||
"terms": "Pony,Score,Positive,Quality",
|
||||
"content": "score_9, score_8_up, score_7_up, score_6_up, source_anime, source_furry, source_pony, source_cartoon",
|
||||
"color": 1
|
||||
},
|
||||
{
|
||||
"name": "Pony-Negative",
|
||||
"terms": "Pony,Score,Negative,Quality",
|
||||
"content": "score_1, score_2, score_3, score_4, score_5, source_anime, source_furry, source_pony, source_cartoon",
|
||||
"color": 3
|
||||
}
|
||||
]
|
||||
141146
tags/derpibooru.csv
141146
tags/derpibooru.csv
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user