mirror of
https://github.com/DominikDoom/a1111-sd-webui-tagcomplete.git
synced 2026-01-27 03:29:55 +00:00
Compare commits
40 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5db035cc3a | ||
|
|
90cf3147fd | ||
|
|
4d4f23e551 | ||
|
|
80b47c61bb | ||
|
|
57821aae6a | ||
|
|
e23bb6d4ea | ||
|
|
d4cca00575 | ||
|
|
86ea94a565 | ||
|
|
53f46c91a2 | ||
|
|
e5f93188c3 | ||
|
|
3e57842ac6 | ||
|
|
32c4589df3 | ||
|
|
5bbd97588c | ||
|
|
b2a663f7a7 | ||
|
|
6f93d19a2b | ||
|
|
79bab04fd2 | ||
|
|
5b69d1e622 | ||
|
|
651cf5fb46 | ||
|
|
5deb72cddf | ||
|
|
97ebe78205 | ||
|
|
b937e853c9 | ||
|
|
f63bbf947f | ||
|
|
16bc6d8868 | ||
|
|
ebe276ee44 | ||
|
|
995a5ecdba | ||
|
|
90d144a5f4 | ||
|
|
14a4440c33 | ||
|
|
cdf092f3ac | ||
|
|
e1598378dc | ||
|
|
599ad7f95f | ||
|
|
0b2bb138ee | ||
|
|
4a415f1a04 | ||
|
|
21de5fe003 | ||
|
|
a020df91b2 | ||
|
|
0260765b27 | ||
|
|
638c073f37 | ||
|
|
d11b53083b | ||
|
|
571072eea4 | ||
|
|
acfdbf1ed4 | ||
|
|
2e271aea5c |
59
README.md
59
README.md
@@ -74,6 +74,10 @@ Wildcard script support:
|
||||
|
||||
https://user-images.githubusercontent.com/34448969/200128031-22dd7c33-71d1-464f-ae36-5f6c8fd49df0.mp4
|
||||
|
||||
Extra Network preview support:
|
||||
|
||||
https://github.com/DominikDoom/a1111-sd-webui-tagcomplete/assets/34448969/3c0cad84-fb5f-436d-b05a-28db35860d13
|
||||
|
||||
Dark and Light mode supported, including tag colors:
|
||||
|
||||

|
||||
@@ -123,24 +127,43 @@ Completion for these types is triggered by typing `<`. By default it will show t
|
||||
- Or `<lora:` and `<lyco:` respectively for the long form
|
||||
- `<h:` or `<hypernet:` will only show Hypernetworks
|
||||
|
||||
### Live previews
|
||||
Tag Autocomplete will now also show the preview images used for the cards in the Extra Networks menu in a small window next to the regular popup.
|
||||
This enables quick comparisons and additional info for unclear filenames without having to stop typing to look it up in the webui menu.
|
||||
It works for all supported extra network types that use preview images (Loras/Lycos, Embeddings & Hypernetworks). The preview window will stay hidden for normal tags or if no preview was found.
|
||||
|
||||

|
||||
|
||||
### Lora / Lyco trigger word completion
|
||||
This is an advanced feature that will try to add known trigger words on autocompleting a Lora/Lyco.
|
||||
This feature will try to add known trigger words on autocompleting a Lora/Lyco.
|
||||
|
||||
It uses the list provided by the [model-keyword](https://github.com/mix1009/model-keyword/) extension, which thus needs to be installed to use this feature. The list is also regularly updated through it.
|
||||
It primarily uses the list provided by the [model-keyword](https://github.com/mix1009/model-keyword/) extension, which thus needs to be installed to use this feature. The list is also regularly updated through it.
|
||||
However, once installed, you can deactivate it if you want, since tag autocomplete only needs the local keyword lists it ships with, not the extension itself.
|
||||
|
||||
The used files are `lora-keywords.txt` and `lora-keywords-user.txt` in the model-keyword installation folder.
|
||||
The used files are `lora-keyword.txt` and `lora-keyword-user.txt` in the model-keyword installation folder.
|
||||
If the main file isn't found, the feature will simply deactivate itself, everything else should work normally.
|
||||
|
||||
To add custom mappings for unknown Loras, you can use the UI provided by model-keyword, it will automatically write it to the `lora-keywords-user.txt` for you (and create it if it doesn't exist).
|
||||
The only issue is that it has no official support for the Lycoris extension and doesn't scan its folder for files, so to add them through the UI you will have to temporarily move them into the Lora model folder to be able to select them in model-keywords dropdown.
|
||||
Some are already included in the default list though, so trying it out first is advisable.
|
||||
<details>
|
||||
<summary>Walkthrough to add custom keywords</summary>
|
||||
#### Note:
|
||||
As of [v1.5.0](https://github.com/AUTOMATIC1111/stable-diffusion-webui/commit/a3ddf464a2ed24c999f67ddfef7969f8291567be), the webui provides a native method to add activation keywords for Lora through the Extra networks config UI.
|
||||
These trigger words will always be preferred over the model-keyword ones and can be used without needing to install the model-keyword extension. This will however, obviously, be limited to those manually added keywords. For automatic discovery of keywords, you will still need the big list provided by model-keyword.
|
||||
|
||||

|
||||
</details>
|
||||
After having added your custom keywords, you will need to either restart the UI or use the "Refresh TAC temp files" setting button.
|
||||
Custom trigger words can be added through two methods:
|
||||
1. Using the extra networks UI (recommended):
|
||||
- Only works with webui version v1.5.0 upwards, but much easier to use and works without the model-keyword extension
|
||||
- This method requires no manual refresh
|
||||
- <details>
|
||||
<summary>Image example</summary>
|
||||
|
||||

|
||||

|
||||
</details>
|
||||
2. Through the model-keyword UI:
|
||||
- One issue with this method is that it has no official support for the Lycoris extension and doesn't scan its folder for files, so to add them through the UI you will have to temporarily move them into the Lora model folder to be able to select them in model-keywords dropdown. Some are already included in the default list though, so trying it out first is advisable.
|
||||
- After having added your custom keywords, you will need to either restart the UI or use the "Refresh TAC temp files" setting button.
|
||||
- <details>
|
||||
<summary>Image example</summary>
|
||||
|
||||

|
||||
</details>
|
||||
|
||||
Sometimes the inserted keywords can be wrong due to a hash collision, however model-keyword and tag autocomplete take the name of the file into account too if the collision is known.
|
||||
|
||||
@@ -295,6 +318,14 @@ If this option is turned on, it will show a `?` link next to the tag. Clicking t
|
||||
|
||||

|
||||
</details>
|
||||
<!-- Wiki links -->
|
||||
<details>
|
||||
<summary>Extra network live previews</summary>
|
||||
|
||||
This option enables a small preview window alongside the normal completion popup that will show the card preview also usd in the extra networks tab for that file.
|
||||
|
||||

|
||||
</details>
|
||||
<!-- Insertion -->
|
||||
<details>
|
||||
<summary>Completion settings</summary>
|
||||
@@ -443,7 +474,9 @@ You can also add this to your quicksettings bar to have the refresh button avail
|
||||
|
||||
# Translations
|
||||
An additional file can be added in the translation section, which will be used to translate both tags and aliases and also enables searching by translation.
|
||||
This file needs to be a CSV in the format `<English tag/alias>,<Translation>`, but for backwards compatibility with older files that used a three column format, you can turn on `Translation file uses old 3-column translation format instead of the new 2-column one` to support them. In that case, the second column will be unused and skipped during parsing.
|
||||
This file needs to be a CSV in the format `<English tag/alias>,<Translation>`. Some older files use a three column format, which requires a compatibility setting to be activated.
|
||||
You can find it under `Settings > Tag autocomplete > Translation filename > Translation file uses old 3-column translation format instead of the new 2-column one`.
|
||||
With it on, the second column will be unused and skipped during parsing.
|
||||
|
||||
Example with Chinese translation:
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ var extras = [];
|
||||
var wildcardFiles = [];
|
||||
var wildcardExtFiles = [];
|
||||
var yamlWildcards = [];
|
||||
var umiWildcards = [];
|
||||
var embeddings = [];
|
||||
var hypernetworks = [];
|
||||
var loras = [];
|
||||
@@ -35,6 +36,7 @@ let hideBlocked = false;
|
||||
// Tag selection for keyboard navigation
|
||||
var selectedTag = null;
|
||||
var oldSelectedTag = null;
|
||||
var resultCountBeforeNormalTags = 0;
|
||||
|
||||
// Lora keyword undo/redo history
|
||||
var textBeforeKeywordInsertion = "";
|
||||
|
||||
@@ -8,10 +8,11 @@ const ResultType = Object.freeze({
|
||||
"wildcardTag": 4,
|
||||
"wildcardFile": 5,
|
||||
"yamlWildcard": 6,
|
||||
"hypernetwork": 7,
|
||||
"lora": 8,
|
||||
"lyco": 9,
|
||||
"chant": 10
|
||||
"umiWildcard": 7,
|
||||
"hypernetwork": 8,
|
||||
"lora": 9,
|
||||
"lyco": 10,
|
||||
"chant": 11
|
||||
});
|
||||
|
||||
// Class to hold result data and annotations to make it clearer to use
|
||||
|
||||
@@ -9,7 +9,11 @@ const core = [
|
||||
"#img2img_prompt > label > textarea",
|
||||
"#txt2img_neg_prompt > label > textarea",
|
||||
"#img2img_neg_prompt > label > textarea",
|
||||
".prompt > label > textarea"
|
||||
".prompt > label > textarea",
|
||||
"#txt2img_edit_style_prompt > label > textarea",
|
||||
"#txt2img_edit_style_neg_prompt > label > textarea",
|
||||
"#img2img_edit_style_prompt > label > textarea",
|
||||
"#img2img_edit_style_neg_prompt > label > textarea"
|
||||
];
|
||||
|
||||
// Third party text area selectors
|
||||
@@ -57,6 +61,24 @@ const thirdParty = {
|
||||
"[id^=MD-i2i][id$=prompt] textarea",
|
||||
"[id^=MD-i2i][id$=prompt] input[type='text']"
|
||||
]
|
||||
},
|
||||
"adetailer-t2i": {
|
||||
"base": "#txt2img_script_container",
|
||||
"hasIds": true,
|
||||
"onDemand": true,
|
||||
"selectors": [
|
||||
"[id^=script_txt2img_adetailer_ad_prompt] textarea",
|
||||
"[id^=script_txt2img_adetailer_ad_negative_prompt] textarea"
|
||||
]
|
||||
},
|
||||
"adetailer-i2i": {
|
||||
"base": "#img2img_script_container",
|
||||
"hasIds": true,
|
||||
"onDemand": true,
|
||||
"selectors": [
|
||||
"[id^=script_img2img_adetailer_ad_prompt] textarea",
|
||||
"[id^=script_img2img_adetailer_ad_negative_prompt] textarea"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -90,7 +112,7 @@ function addOnDemandObservers(setupFunction) {
|
||||
|
||||
let base = gradioApp().querySelector(entry.base);
|
||||
if (!base) continue;
|
||||
|
||||
|
||||
let accordions = [...base?.querySelectorAll(".gradio-accordion")];
|
||||
if (!accordions) continue;
|
||||
|
||||
@@ -115,12 +137,12 @@ function addOnDemandObservers(setupFunction) {
|
||||
[...gradioApp().querySelectorAll(entry.selectors.join(", "))].forEach(x => setupFunction(x));
|
||||
} else { // Otherwise, we have to find the text areas by their adjacent labels
|
||||
let base = gradioApp().querySelector(entry.base);
|
||||
|
||||
|
||||
// Safety check
|
||||
if (!base) continue;
|
||||
|
||||
|
||||
let allTextAreas = [...base.querySelectorAll("textarea, input[type='text']")];
|
||||
|
||||
|
||||
// Filter the text areas where the adjacent label matches one of the selectors
|
||||
let matchingTextAreas = allTextAreas.filter(ta => [...ta.parentElement.childNodes].some(x => entry.selectors.includes(x.innerText)));
|
||||
matchingTextAreas.forEach(x => setupFunction(x));
|
||||
|
||||
@@ -41,7 +41,7 @@ function parseCSV(str) {
|
||||
async function readFile(filePath, json = false, cache = false) {
|
||||
if (!cache)
|
||||
filePath += `?${new Date().getTime()}`;
|
||||
|
||||
|
||||
let response = await fetch(`file=${filePath}`);
|
||||
|
||||
if (response.status != 200) {
|
||||
@@ -61,6 +61,43 @@ async function loadCSV(path) {
|
||||
return parseCSV(text);
|
||||
}
|
||||
|
||||
// Fetch API
|
||||
async function fetchAPI(url, json = true, cache = false) {
|
||||
if (!cache) {
|
||||
const appendChar = url.includes("?") ? "&" : "?";
|
||||
url += `${appendChar}${new Date().getTime()}`
|
||||
}
|
||||
|
||||
let response = await fetch(url);
|
||||
|
||||
if (response.status != 200) {
|
||||
console.error(`Error fetching API endpoint "${url}": ` + response.status, response.statusText);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (json)
|
||||
return await response.json();
|
||||
else
|
||||
return await response.text();
|
||||
}
|
||||
|
||||
// Extra network preview thumbnails
|
||||
async function getExtraNetworkPreviewURL(filename, type) {
|
||||
const previewJSON = await fetchAPI(`tacapi/v1/thumb-preview/${filename}?type=${type}`, true, true);
|
||||
if (previewJSON?.url) {
|
||||
const properURL = `sd_extra_networks/thumb?filename=${previewJSON.url}`;
|
||||
if ((await fetch(properURL)).status == 200) {
|
||||
return properURL;
|
||||
} else {
|
||||
// create blob url
|
||||
const blob = await (await fetch(`tacapi/v1/thumb-preview-blob/${filename}?type=${type}`)).blob();
|
||||
return URL.createObjectURL(blob);
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Debounce function to prevent spamming the autocomplete function
|
||||
var dbTimeOut;
|
||||
const debounce = (func, wait = 300) => {
|
||||
@@ -89,6 +126,28 @@ function difference(a, b) {
|
||||
)].reduce((acc, [v, count]) => acc.concat(Array(Math.abs(count)).fill(v)), []);
|
||||
}
|
||||
|
||||
// Object flatten function adapted from https://stackoverflow.com/a/61602592
|
||||
// $roots keeps previous parent properties as they will be added as a prefix for each prop.
|
||||
// $sep is just a preference if you want to seperate nested paths other than dot.
|
||||
function flatten(obj, roots = [], sep = ".") {
|
||||
return Object.keys(obj).reduce(
|
||||
(memo, prop) =>
|
||||
Object.assign(
|
||||
// create a new object
|
||||
{},
|
||||
// include previously returned object
|
||||
memo,
|
||||
Object.prototype.toString.call(obj[prop]) === "[object Object]"
|
||||
? // keep working if value is an object
|
||||
flatten(obj[prop], roots.concat([prop]), sep)
|
||||
: // include current prop and value and prefix prop with the roots
|
||||
{ [roots.concat([prop]).join(sep)]: obj[prop] }
|
||||
),
|
||||
{}
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
// Sliding window function to get possible combination groups of an array
|
||||
function toNgrams(inputArray, size) {
|
||||
return Array.from(
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const EMB_REGEX = /<(?!l:|h:|c:)[^,> ]*>?/g;
|
||||
const EMB_TRIGGER = () => TAC_CFG.useEmbeddings && tagword.match(EMB_REGEX);
|
||||
const EMB_TRIGGER = () => TAC_CFG.useEmbeddings && (tagword.match(EMB_REGEX) || TAC_CFG.includeEmbeddingsInNormalResults);
|
||||
|
||||
class EmbeddingParser extends BaseTagParser {
|
||||
parse() {
|
||||
|
||||
@@ -16,7 +16,12 @@ class LoraParser extends BaseTagParser {
|
||||
// Add final results
|
||||
let finalResults = [];
|
||||
tempResults.forEach(t => {
|
||||
let result = new AutocompleteResult(t[0].trim(), ResultType.lora)
|
||||
const text = t[0].trim();
|
||||
let lastDot = text.lastIndexOf(".") > -1 ? text.lastIndexOf(".") : text.length;
|
||||
let lastSlash = text.lastIndexOf("/") > -1 ? text.lastIndexOf("/") : -1;
|
||||
let name = text.substring(lastSlash + 1, lastDot);
|
||||
|
||||
let result = new AutocompleteResult(name, ResultType.lora)
|
||||
result.meta = "Lora";
|
||||
result.hash = t[1];
|
||||
finalResults.push(result);
|
||||
@@ -38,9 +43,15 @@ async function load() {
|
||||
}
|
||||
}
|
||||
|
||||
function sanitize(tagType, text) {
|
||||
async function sanitize(tagType, text) {
|
||||
if (tagType === ResultType.lora) {
|
||||
return `<lora:${text}:${TAC_CFG.extraNetworksDefaultMultiplier}>`;
|
||||
let multiplier = TAC_CFG.extraNetworksDefaultMultiplier;
|
||||
let info = await fetchAPI(`tacapi/v1/lora-info/${text}`)
|
||||
if (info && info["preferred weight"]) {
|
||||
multiplier = info["preferred weight"];
|
||||
}
|
||||
|
||||
return `<lora:${text}:${multiplier}>`;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -16,7 +16,12 @@ class LycoParser extends BaseTagParser {
|
||||
// Add final results
|
||||
let finalResults = [];
|
||||
tempResults.forEach(t => {
|
||||
let result = new AutocompleteResult(t[0].trim(), ResultType.lyco)
|
||||
const text = t[0].trim();
|
||||
let lastDot = text.lastIndexOf(".") > -1 ? text.lastIndexOf(".") : text.length;
|
||||
let lastSlash = text.lastIndexOf("/") > -1 ? text.lastIndexOf("/") : -1;
|
||||
let name = text.substring(lastSlash + 1, lastDot);
|
||||
|
||||
let result = new AutocompleteResult(name, ResultType.lyco)
|
||||
result.meta = "Lyco";
|
||||
result.hash = t[1];
|
||||
finalResults.push(result);
|
||||
@@ -38,9 +43,15 @@ async function load() {
|
||||
}
|
||||
}
|
||||
|
||||
function sanitize(tagType, text) {
|
||||
async function sanitize(tagType, text) {
|
||||
if (tagType === ResultType.lyco) {
|
||||
return `<lyco:${text}:${TAC_CFG.extraNetworksDefaultMultiplier}>`;
|
||||
let multiplier = TAC_CFG.extraNetworksDefaultMultiplier;
|
||||
let info = await fetchAPI(`tacapi/v1/lyco-info/${text}`)
|
||||
if (info && info["preferred weight"]) {
|
||||
multiplier = info["preferred weight"];
|
||||
}
|
||||
|
||||
return `<lyco:${text}:${multiplier}>`;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ class UmiParser extends BaseTagParser {
|
||||
parse(textArea, prompt) {
|
||||
// We are in a UMI yaml tag definition, parse further
|
||||
let umiSubPrompts = [...prompt.matchAll(UMI_PROMPT_REGEX)];
|
||||
|
||||
|
||||
let umiTags = [];
|
||||
let umiTagsWithOperators = []
|
||||
|
||||
@@ -15,7 +15,7 @@ class UmiParser extends BaseTagParser {
|
||||
|
||||
umiSubPrompts.forEach(umiSubPrompt => {
|
||||
umiTags = umiTags.concat([...umiSubPrompt[0].matchAll(UMI_TAG_REGEX)].map(x => x[1].toLowerCase()));
|
||||
|
||||
|
||||
const start = umiSubPrompt.index;
|
||||
const end = umiSubPrompt.index + umiSubPrompt[0].length;
|
||||
if (textArea.selectionStart >= start && textArea.selectionStart <= end) {
|
||||
@@ -74,7 +74,7 @@ class UmiParser extends BaseTagParser {
|
||||
//console.log({ matches })
|
||||
|
||||
const filteredWildcards = (tagword) => {
|
||||
const wildcards = yamlWildcards.filter(x => {
|
||||
const wildcards = umiWildcards.filter(x => {
|
||||
let tags = x[1];
|
||||
const matchesNeg =
|
||||
matches.negative.length === 0
|
||||
@@ -113,7 +113,7 @@ class UmiParser extends BaseTagParser {
|
||||
|| !matches.all.includes(x[0])
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
if (umiTags.length > 0) {
|
||||
// Get difference for subprompt
|
||||
let tagCountChange = umiTags.length - umiPreviousTags.length;
|
||||
@@ -144,7 +144,7 @@ class UmiParser extends BaseTagParser {
|
||||
// Add final results
|
||||
let finalResults = [];
|
||||
tempResults.forEach(t => {
|
||||
let result = new AutocompleteResult(t[0].trim(), ResultType.yamlWildcard)
|
||||
let result = new AutocompleteResult(t[0].trim(), ResultType.umiWildcard)
|
||||
result.count = t[1];
|
||||
finalResults.push(result);
|
||||
});
|
||||
@@ -152,26 +152,26 @@ class UmiParser extends BaseTagParser {
|
||||
return finalResults;
|
||||
} else if (showAll) {
|
||||
let filteredWildcardsSorted = filteredWildcards("");
|
||||
|
||||
|
||||
// Add final results
|
||||
let finalResults = [];
|
||||
filteredWildcardsSorted.forEach(t => {
|
||||
let result = new AutocompleteResult(t[0].trim(), ResultType.yamlWildcard)
|
||||
let result = new AutocompleteResult(t[0].trim(), ResultType.umiWildcard)
|
||||
result.count = t[1];
|
||||
finalResults.push(result);
|
||||
});
|
||||
|
||||
|
||||
originalTagword = tagword;
|
||||
tagword = "";
|
||||
return finalResults;
|
||||
}
|
||||
} else {
|
||||
let filteredWildcardsSorted = filteredWildcards("");
|
||||
|
||||
|
||||
// Add final results
|
||||
let finalResults = [];
|
||||
filteredWildcardsSorted.forEach(t => {
|
||||
let result = new AutocompleteResult(t[0].trim(), ResultType.yamlWildcard)
|
||||
let result = new AutocompleteResult(t[0].trim(), ResultType.umiWildcard)
|
||||
result.count = t[1];
|
||||
finalResults.push(result);
|
||||
});
|
||||
@@ -184,8 +184,8 @@ class UmiParser extends BaseTagParser {
|
||||
}
|
||||
|
||||
function updateUmiTags( tagType, sanitizedText, newPrompt, textArea) {
|
||||
// If it was a yaml wildcard, also update the umiPreviousTags
|
||||
if (tagType === ResultType.yamlWildcard && originalTagword.length > 0) {
|
||||
// If it was a umi wildcard, also update the umiPreviousTags
|
||||
if (tagType === ResultType.umiWildcard && originalTagword.length > 0) {
|
||||
let umiSubPrompts = [...newPrompt.matchAll(UMI_PROMPT_REGEX)];
|
||||
|
||||
let umiTags = [];
|
||||
@@ -203,11 +203,11 @@ function updateUmiTags( tagType, sanitizedText, newPrompt, textArea) {
|
||||
}
|
||||
|
||||
async function load() {
|
||||
if (yamlWildcards.length === 0) {
|
||||
if (umiWildcards.length === 0) {
|
||||
try {
|
||||
let yamlTags = (await readFile(`${tagBasePath}/temp/wcet.txt`)).split("\n");
|
||||
let umiTags = (await readFile(`${tagBasePath}/temp/umi_tags.txt`)).split("\n");
|
||||
// Split into tag, count pairs
|
||||
yamlWildcards = yamlTags.map(x => x
|
||||
umiWildcards = umiTags.map(x => x
|
||||
.trim()
|
||||
.split(","))
|
||||
.map(([i, ...rest]) => [
|
||||
@@ -218,14 +218,14 @@ async function load() {
|
||||
}, {}),
|
||||
]);
|
||||
} catch (e) {
|
||||
console.error("Error loading yaml wildcards: " + e);
|
||||
console.error("Error loading umi wildcards: " + e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function sanitize(tagType, text) {
|
||||
// Replace underscores only if the yaml tag is not using them
|
||||
if (tagType === ResultType.yamlWildcard && !yamlWildcards.includes(text)) {
|
||||
// Replace underscores only if the umi tag is not using them
|
||||
if (tagType === ResultType.umiWildcard && !umiWildcards.includes(text)) {
|
||||
return text.replaceAll("_", " ");
|
||||
}
|
||||
return null;
|
||||
|
||||
@@ -13,12 +13,40 @@ class WildcardParser extends BaseTagParser {
|
||||
let wcWord = wcMatch[0][2];
|
||||
|
||||
// Look in normal wildcard files
|
||||
let wcFound = wildcardFiles.find(x => x[1].toLowerCase() === wcFile);
|
||||
let wcFound = wildcardFiles.filter(x => x[1].toLowerCase() === wcFile);
|
||||
if (wcFound.length === 0) wcFound = null;
|
||||
// Use found wildcard file or look in external wildcard files
|
||||
let wcPair = wcFound || wildcardExtFiles.find(x => x[1].toLowerCase() === wcFile);
|
||||
let wcPairs = wcFound || wildcardExtFiles.filter(x => x[1].toLowerCase() === wcFile);
|
||||
|
||||
let wildcards = (await readFile(`${wcPair[0]}/${wcPair[1]}.txt`)).split("\n")
|
||||
.filter(x => x.trim().length > 0 && !x.startsWith('#')); // Remove empty lines and comments
|
||||
if (!wcPairs) return [];
|
||||
|
||||
let wildcards = [];
|
||||
for (let i = 0; i < wcPairs.length; i++) {
|
||||
const basePath = wcPairs[i][0];
|
||||
const fileName = wcPairs[i][1];
|
||||
if (!basePath || !fileName) return;
|
||||
|
||||
// YAML wildcards are already loaded as json, so we can get the values directly.
|
||||
// basePath is the name of the file in this case, and fileName the key
|
||||
if (basePath.endsWith(".yaml")) {
|
||||
const getDescendantProp = (obj, desc) => {
|
||||
const arr = desc.split("/");
|
||||
while (arr.length) {
|
||||
obj = obj[arr.shift()];
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
wildcards = wildcards.concat(getDescendantProp(yamlWildcards[basePath], fileName));
|
||||
} else {
|
||||
const fileContent = (await fetchAPI(`tacapi/v1/wildcard-contents?basepath=${basePath}&filename=${fileName}.txt`, false))
|
||||
.split("\n")
|
||||
.filter(x => x.trim().length > 0 && !x.startsWith('#')); // Remove empty lines and comments
|
||||
wildcards = wildcards.concat(fileContent);
|
||||
}
|
||||
}
|
||||
|
||||
if (TAC_CFG.sortWildcardResults)
|
||||
wildcards.sort((a, b) => a.localeCompare(b));
|
||||
|
||||
let finalResults = [];
|
||||
let tempResults = wildcards.filter(x => (wcWord !== null && wcWord.length > 0) ? x.toLowerCase().includes(wcWord) : x) // Filter by tagword
|
||||
@@ -44,13 +72,27 @@ class WildcardFileParser extends BaseTagParser {
|
||||
}
|
||||
|
||||
let finalResults = [];
|
||||
const alreadyAdded = new Map();
|
||||
// Get final results
|
||||
tempResults.forEach(wcFile => {
|
||||
let result = new AutocompleteResult(wcFile[1].trim(), ResultType.wildcardFile);
|
||||
result.meta = "Wildcard file";
|
||||
// Skip duplicate entries incase multiple files have the same name or yaml category
|
||||
if (alreadyAdded.has(wcFile[1])) return;
|
||||
|
||||
let result = null;
|
||||
if (wcFile[0].endsWith(".yaml")) {
|
||||
result = new AutocompleteResult(wcFile[1].trim(), ResultType.yamlWildcard);
|
||||
result.meta = "YAML wildcard collection";
|
||||
} else {
|
||||
result = new AutocompleteResult(wcFile[1].trim(), ResultType.wildcardFile);
|
||||
result.meta = "Wildcard file";
|
||||
}
|
||||
|
||||
finalResults.push(result);
|
||||
alreadyAdded.set(wcFile[1], true);
|
||||
});
|
||||
|
||||
finalResults.sort((a, b) => a.text.localeCompare(b.text));
|
||||
|
||||
return finalResults;
|
||||
}
|
||||
}
|
||||
@@ -87,6 +129,17 @@ async function load() {
|
||||
wcExtFile = wcExtFile.map(x => [base, x]);
|
||||
wildcardExtFiles.push(...wcExtFile);
|
||||
}
|
||||
|
||||
// Load the yaml wildcard json file and append it as a wildcard file, appending each key as a path component until we reach the end
|
||||
yamlWildcards = await readFile(`${tagBasePath}/temp/wc_yaml.json`, true);
|
||||
|
||||
// Append each key as a path component until we reach a leaf
|
||||
Object.keys(yamlWildcards).forEach(file => {
|
||||
const flattened = flatten(yamlWildcards[file], [], "/");
|
||||
Object.keys(flattened).forEach(key => {
|
||||
wildcardExtFiles.push([file, key]);
|
||||
});
|
||||
});
|
||||
} catch (e) {
|
||||
console.error("Error loading wildcards: " + e);
|
||||
}
|
||||
@@ -94,7 +147,7 @@ async function load() {
|
||||
}
|
||||
|
||||
function sanitize(tagType, text) {
|
||||
if (tagType === ResultType.wildcardFile) {
|
||||
if (tagType === ResultType.wildcardFile || tagType === ResultType.yamlWildcard) {
|
||||
return `__${text}__`;
|
||||
} else if (tagType === ResultType.wildcardTag) {
|
||||
return text.replace(/^.*?: /g, "");
|
||||
@@ -104,9 +157,8 @@ function sanitize(tagType, text) {
|
||||
|
||||
function keepOpenIfWildcard(tagType, sanitizedText, newPrompt, textArea) {
|
||||
// If it's a wildcard, we want to keep the results open so the user can select another wildcard
|
||||
if (tagType === ResultType.wildcardFile) {
|
||||
if (tagType === ResultType.wildcardFile || tagType === ResultType.yamlWildcard) {
|
||||
hideBlocked = true;
|
||||
autocomplete(textArea, newPrompt, sanitizedText);
|
||||
setTimeout(() => { hideBlocked = false; }, 450);
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -25,18 +25,36 @@ const autocompleteCSS = `
|
||||
background-color: transparent;
|
||||
min-width: fit-content;
|
||||
}
|
||||
.autocompleteResults {
|
||||
.autocompleteParent {
|
||||
display: flex;
|
||||
position: absolute;
|
||||
z-index: 999;
|
||||
max-width: calc(100% - 1.5rem);
|
||||
margin: 5px 0 0 0;
|
||||
}
|
||||
.autocompleteResults {
|
||||
background-color: var(--results-bg) !important;
|
||||
border: var(--results-border-width) solid var(--results-border-color) !important;
|
||||
border-radius: 12px !important;
|
||||
height: fit-content;
|
||||
flex-basis: fit-content;
|
||||
flex-shrink: 0;
|
||||
overflow-y: var(--results-overflow-y);
|
||||
overflow-x: hidden;
|
||||
word-break: break-word;
|
||||
}
|
||||
.sideInfo {
|
||||
display: none;
|
||||
position: relative;
|
||||
margin-left: 10px;
|
||||
height: 18rem;
|
||||
max-width: 16rem;
|
||||
}
|
||||
.sideInfo > img {
|
||||
object-fit: cover;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
}
|
||||
.autocompleteResultsList > li:nth-child(odd) {
|
||||
background-color: var(--results-bg-odd);
|
||||
}
|
||||
@@ -56,6 +74,7 @@ const autocompleteCSS = `
|
||||
}
|
||||
.acListItem {
|
||||
white-space: break-spaces;
|
||||
min-width: 100px;
|
||||
}
|
||||
.acMetaText {
|
||||
position: relative;
|
||||
@@ -190,11 +209,14 @@ async function syncOptions() {
|
||||
resultStepLength: opts["tac_resultStepLength"],
|
||||
delayTime: opts["tac_delayTime"],
|
||||
useWildcards: opts["tac_useWildcards"],
|
||||
sortWildcardResults: opts["tac_sortWildcardResults"],
|
||||
useEmbeddings: opts["tac_useEmbeddings"],
|
||||
includeEmbeddingsInNormalResults: opts["tac_includeEmbeddingsInNormalResults"],
|
||||
useHypernetworks: opts["tac_useHypernetworks"],
|
||||
useLoras: opts["tac_useLoras"],
|
||||
useLycos: opts["tac_useLycos"],
|
||||
showWikiLinks: opts["tac_showWikiLinks"],
|
||||
showExtraNetworkPreviews: opts["tac_showExtraNetworkPreviews"],
|
||||
// Insertion related settings
|
||||
replaceUnderscores: opts["tac_replaceUnderscores"],
|
||||
escapeParentheses: opts["tac_escapeParentheses"],
|
||||
@@ -203,6 +225,7 @@ async function syncOptions() {
|
||||
alwaysSpaceAtEnd: opts["tac_alwaysSpaceAtEnd"],
|
||||
wildcardCompletionMode: opts["tac_wildcardCompletionMode"],
|
||||
modelKeywordCompletion: opts["tac_modelKeywordCompletion"],
|
||||
modelKeywordLocation: opts["tac_modelKeywordLocation"],
|
||||
// Alias settings
|
||||
alias: {
|
||||
searchByAlias: opts["tac_alias.searchByAlias"],
|
||||
@@ -269,50 +292,65 @@ async function syncOptions() {
|
||||
|
||||
// Create the result list div and necessary styling
|
||||
function createResultsDiv(textArea) {
|
||||
let parentDiv = document.createElement("div");
|
||||
let resultsDiv = document.createElement("div");
|
||||
let resultsList = document.createElement("ul");
|
||||
let sideDiv = document.createElement("div");
|
||||
let sideDivImg = document.createElement("img");
|
||||
|
||||
let textAreaId = getTextAreaIdentifier(textArea);
|
||||
let typeClass = textAreaId.replaceAll(".", " ");
|
||||
|
||||
parentDiv.setAttribute("class", `autocompleteParent${typeClass}`);
|
||||
|
||||
resultsDiv.style.maxHeight = `${TAC_CFG.maxResults * 50}px`;
|
||||
resultsDiv.setAttribute("class", `autocompleteResults ${typeClass} notranslate`);
|
||||
resultsDiv.setAttribute("class", `autocompleteResults${typeClass} notranslate`);
|
||||
resultsDiv.setAttribute("translate", "no");
|
||||
resultsList.setAttribute("class", "autocompleteResultsList");
|
||||
resultsDiv.appendChild(resultsList);
|
||||
|
||||
return resultsDiv;
|
||||
sideDiv.setAttribute("class", `autocompleteResults${typeClass} sideInfo`);
|
||||
sideDiv.appendChild(sideDivImg);
|
||||
|
||||
parentDiv.appendChild(resultsDiv);
|
||||
parentDiv.appendChild(sideDiv);
|
||||
|
||||
return parentDiv;
|
||||
}
|
||||
|
||||
// Show or hide the results div
|
||||
function isVisible(textArea) {
|
||||
let textAreaId = getTextAreaIdentifier(textArea);
|
||||
let resultsDiv = gradioApp().querySelector('.autocompleteResults' + textAreaId);
|
||||
return resultsDiv.style.display === "block";
|
||||
let parentDiv = gradioApp().querySelector('.autocompleteParent' + textAreaId);
|
||||
return parentDiv.style.display === "flex";
|
||||
}
|
||||
function showResults(textArea) {
|
||||
let textAreaId = getTextAreaIdentifier(textArea);
|
||||
let resultsDiv = gradioApp().querySelector('.autocompleteResults' + textAreaId);
|
||||
resultsDiv.style.display = "block";
|
||||
let parentDiv = gradioApp().querySelector('.autocompleteParent' + textAreaId);
|
||||
parentDiv.style.display = "flex";
|
||||
|
||||
if (TAC_CFG.slidingPopup) {
|
||||
let caretPosition = getCaretCoordinates(textArea, textArea.selectionEnd).left;
|
||||
let offset = Math.min(textArea.offsetLeft - textArea.scrollLeft + caretPosition, textArea.offsetWidth - resultsDiv.offsetWidth);
|
||||
|
||||
resultsDiv.style.left = `${offset}px`;
|
||||
let offset = Math.min(textArea.offsetLeft - textArea.scrollLeft + caretPosition, textArea.offsetWidth - parentDiv.offsetWidth);
|
||||
|
||||
parentDiv.style.left = `${offset}px`;
|
||||
} else {
|
||||
if (resultsDiv.style.left)
|
||||
resultsDiv.style.removeProperty("left");
|
||||
if (parentDiv.style.left)
|
||||
parentDiv.style.removeProperty("left");
|
||||
}
|
||||
// Reset here too to make absolutely sure the browser registers it
|
||||
resultsDiv.scrollTop = 0;
|
||||
parentDiv.scrollTop = 0;
|
||||
|
||||
// Ensure preview is hidden
|
||||
let previewDiv = gradioApp().querySelector(`.autocompleteParent${textAreaId} .sideInfo`);
|
||||
previewDiv.style.display = "none";
|
||||
}
|
||||
function hideResults(textArea) {
|
||||
let textAreaId = getTextAreaIdentifier(textArea);
|
||||
let resultsDiv = gradioApp().querySelector('.autocompleteResults' + textAreaId);
|
||||
|
||||
let resultsDiv = gradioApp().querySelector('.autocompleteParent' + textAreaId);
|
||||
|
||||
if (!resultsDiv) return;
|
||||
|
||||
|
||||
resultsDiv.style.display = "none";
|
||||
selectedTag = null;
|
||||
}
|
||||
@@ -322,12 +360,12 @@ function isEnabled() {
|
||||
if (TAC_CFG.activeIn.global) {
|
||||
// Skip check if the current model was not correctly detected, since it could wrongly disable the script otherwise
|
||||
if (!currentModelName || !currentModelHash) return true;
|
||||
|
||||
|
||||
let modelList = TAC_CFG.activeIn.modelList
|
||||
.split(",")
|
||||
.map(x => x.trim())
|
||||
.filter(x => x.length > 0);
|
||||
|
||||
|
||||
let shortHash = currentModelHash.substring(0, 10);
|
||||
let modelNameWithoutHash = currentModelName.replace(/\[.*\]$/g, "").trim();
|
||||
if (TAC_CFG.activeIn.modelListMode.toLowerCase() === "blacklist") {
|
||||
@@ -375,7 +413,7 @@ async function insertTextAtCursor(textArea, result, tagword, tabCompletedWithout
|
||||
}
|
||||
}
|
||||
|
||||
if (tagType === ResultType.wildcardFile
|
||||
if ((tagType === ResultType.wildcardFile || tagType === ResultType.yamlWildcard)
|
||||
&& tabCompletedWithoutChoice
|
||||
&& TAC_CFG.wildcardCompletionMode !== "Always fully"
|
||||
&& sanitizedText.includes("/")) {
|
||||
@@ -402,9 +440,11 @@ async function insertTextAtCursor(textArea, result, tagword, tabCompletedWithout
|
||||
}
|
||||
}
|
||||
// Don't cut off the __ at the end if it is already the full path
|
||||
if (firstDifference < longestResult) {
|
||||
if (firstDifference > 0 && firstDifference < longestResult) {
|
||||
// +2 because the sanitized text already has the __ at the start but the matched text doesn't
|
||||
sanitizedText = sanitizedText.substring(0, firstDifference + 2);
|
||||
} else if (firstDifference === 0) {
|
||||
sanitizedText = tagword;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -420,7 +460,7 @@ async function insertTextAtCursor(textArea, result, tagword, tabCompletedWithout
|
||||
|
||||
var optionalSeparator = "";
|
||||
let extraNetworkTypes = [ResultType.hypernetwork, ResultType.lora];
|
||||
let noCommaTypes = [ResultType.wildcardFile, ResultType.yamlWildcard].concat(extraNetworkTypes);
|
||||
let noCommaTypes = [ResultType.wildcardFile, ResultType.yamlWildcard, ResultType.umiWildcard].concat(extraNetworkTypes);
|
||||
if (!noCommaTypes.includes(tagType)) {
|
||||
// Append comma if enabled and not already present
|
||||
let beforeComma = surrounding.match(new RegExp(`${escapeRegExp(tagword)}[,:]`, "i")) !== null;
|
||||
@@ -445,9 +485,18 @@ async function insertTextAtCursor(textArea, result, tagword, tabCompletedWithout
|
||||
|
||||
// Add lora/lyco keywords if enabled and found
|
||||
let keywordsLength = 0;
|
||||
if (TAC_CFG.modelKeywordCompletion !== "Never" && modelKeywordPath.length > 0 && (tagType === ResultType.lora || tagType === ResultType.lyco)) {
|
||||
if (result.hash && result.hash !== "NOFILE" && result.hash.length > 0) {
|
||||
let keywords = null;
|
||||
|
||||
if (TAC_CFG.modelKeywordCompletion !== "Never" && (tagType === ResultType.lora || tagType === ResultType.lyco)) {
|
||||
let keywords = null;
|
||||
// Check built-in activation words first
|
||||
if (tagType === ResultType.lora || tagType === ResultType.lyco) {
|
||||
let info = await fetchAPI(`tacapi/v1/lora-info/${result.text}`)
|
||||
if (info && info["activation text"]) {
|
||||
keywords = info["activation text"];
|
||||
}
|
||||
}
|
||||
|
||||
if (!keywords && modelKeywordPath.length > 0 && result.hash && result.hash !== "NOFILE" && result.hash.length > 0) {
|
||||
let nameDict = modelKeywordDict.get(result.hash);
|
||||
let names = [result.text + ".safetensors", result.text + ".pt", result.text + ".ckpt"];
|
||||
|
||||
@@ -459,25 +508,33 @@ async function insertTextAtCursor(textArea, result, tagword, tabCompletedWithout
|
||||
keywords = nameDict.get(name);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
if (!found)
|
||||
keywords = nameDict.get("none");
|
||||
}
|
||||
}
|
||||
|
||||
if (keywords && keywords.length > 0) {
|
||||
textBeforeKeywordInsertion = newPrompt;
|
||||
|
||||
if (keywords && keywords.length > 0) {
|
||||
textBeforeKeywordInsertion = newPrompt;
|
||||
|
||||
if (TAC_CFG.modelKeywordLocation === "Start of prompt")
|
||||
newPrompt = `${keywords}, ${newPrompt}`; // Insert keywords
|
||||
|
||||
textAfterKeywordInsertion = newPrompt;
|
||||
keywordInsertionUndone = false;
|
||||
setTimeout(() => lastEditWasKeywordInsertion = true, 200)
|
||||
|
||||
keywordsLength = keywords.length + 2; // +2 for the comma and space
|
||||
else if (TAC_CFG.modelKeywordLocation === "End of prompt")
|
||||
newPrompt = `${newPrompt}, ${keywords}`; // Insert keywords
|
||||
else {
|
||||
let keywordStart = prompt[editStart - 1] === " " ? editStart - 1 : editStart;
|
||||
newPrompt = prompt.substring(0, keywordStart) + `, ${keywords} ${insert}` + prompt.substring(editEnd);
|
||||
}
|
||||
|
||||
|
||||
textAfterKeywordInsertion = newPrompt;
|
||||
keywordInsertionUndone = false;
|
||||
setTimeout(() => lastEditWasKeywordInsertion = true, 200)
|
||||
|
||||
keywordsLength = keywords.length + 2; // +2 for the comma and space
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Insert into prompt textbox and reposition cursor
|
||||
textArea.value = newPrompt;
|
||||
textArea.selectionStart = afterInsertCursorPos + optionalSeparator.length + keywordsLength;
|
||||
@@ -583,7 +640,8 @@ function addResultsToList(textArea, results, tagword, resetList) {
|
||||
// Print search term bolded in result
|
||||
itemText.innerHTML = displayText.replace(tagword, `<b>${tagword}</b>`);
|
||||
|
||||
if (result.type === ResultType.wildcardFile && itemText.innerHTML.includes("/")) {
|
||||
const splitTypes = [ResultType.wildcardFile, ResultType.yamlWildcard]
|
||||
if (splitTypes.includes(result.type) && itemText.innerHTML.includes("/")) {
|
||||
let parts = itemText.innerHTML.split("/");
|
||||
let lastPart = parts[parts.length - 1];
|
||||
parts = parts.slice(0, parts.length - 1);
|
||||
@@ -603,7 +661,12 @@ function addResultsToList(textArea, results, tagword, resetList) {
|
||||
// Only use alias result if it is one
|
||||
if (displayText.includes("➝"))
|
||||
linkPart = displayText.split(" ➝ ")[1];
|
||||
|
||||
|
||||
// Remove any trailing translations
|
||||
if (linkPart.includes("[")) {
|
||||
linkPart = linkPart.split("[")[0]
|
||||
}
|
||||
|
||||
// Set link based on selected file
|
||||
let tagFileNameLower = tagFileName.toLowerCase();
|
||||
if (tagFileNameLower.startsWith("danbooru")) {
|
||||
@@ -611,7 +674,7 @@ function addResultsToList(textArea, results, tagword, resetList) {
|
||||
} else if (tagFileNameLower.startsWith("e621")) {
|
||||
wikiLink.href = `https://e621.net/wiki_pages/${linkPart}`;
|
||||
}
|
||||
|
||||
|
||||
wikiLink.target = "_blank";
|
||||
flexDiv.appendChild(wikiLink);
|
||||
}
|
||||
@@ -664,7 +727,7 @@ function addResultsToList(textArea, results, tagword, resetList) {
|
||||
else if (result.meta.startsWith("v2"))
|
||||
itemText.classList.add("acEmbeddingV2");
|
||||
}
|
||||
|
||||
|
||||
flexDiv.appendChild(metaDiv);
|
||||
}
|
||||
|
||||
@@ -682,7 +745,7 @@ function addResultsToList(textArea, results, tagword, resetList) {
|
||||
}
|
||||
}
|
||||
|
||||
function updateSelectionStyle(textArea, newIndex, oldIndex) {
|
||||
async function updateSelectionStyle(textArea, newIndex, oldIndex) {
|
||||
let textAreaId = getTextAreaIdentifier(textArea);
|
||||
let resultDiv = gradioApp().querySelector('.autocompleteResults' + textAreaId);
|
||||
let resultsList = resultDiv.querySelector('ul');
|
||||
@@ -694,13 +757,52 @@ function updateSelectionStyle(textArea, newIndex, oldIndex) {
|
||||
|
||||
// make it safer
|
||||
if (newIndex !== null) {
|
||||
items[newIndex].classList.add('selected');
|
||||
let selected = items[newIndex];
|
||||
selected.classList.add('selected');
|
||||
|
||||
// Set scrolltop to selected item
|
||||
resultDiv.scrollTop = selected.offsetTop - resultDiv.offsetTop;
|
||||
}
|
||||
|
||||
// Set scrolltop to selected item if we are showing more than max results
|
||||
if (items.length > TAC_CFG.maxResults) {
|
||||
// Show preview if enabled and the selected type supports it
|
||||
if (newIndex !== null) {
|
||||
let selected = items[newIndex];
|
||||
resultDiv.scrollTop = selected.offsetTop - resultDiv.offsetTop;
|
||||
let previewTypes = ["v1 Embedding", "v2 Embedding", "Hypernetwork", "Lora", "Lyco"];
|
||||
let selectedType = selected.querySelector(".acMetaText").innerText;
|
||||
let selectedFilename = selected.querySelector(".acListItem").innerText;
|
||||
|
||||
let previewDiv = gradioApp().querySelector(`.autocompleteParent${textAreaId} .sideInfo`);
|
||||
|
||||
if (TAC_CFG.showExtraNetworkPreviews && previewTypes.includes(selectedType)) {
|
||||
let shorthandType = "";
|
||||
switch (selectedType) {
|
||||
case "v1 Embedding":
|
||||
case "v2 Embedding":
|
||||
shorthandType = "embed";
|
||||
break;
|
||||
case "Hypernetwork":
|
||||
shorthandType = "hyper";
|
||||
break;
|
||||
case "Lora":
|
||||
shorthandType = "lora";
|
||||
break;
|
||||
case "Lyco":
|
||||
shorthandType = "lyco";
|
||||
break;
|
||||
}
|
||||
|
||||
let img = previewDiv.querySelector("img");
|
||||
|
||||
let url = await getExtraNetworkPreviewURL(selectedFilename, shorthandType);
|
||||
if (url) {
|
||||
img.src = url;
|
||||
previewDiv.style.display = "block";
|
||||
} else {
|
||||
previewDiv.style.display = "none";
|
||||
}
|
||||
} else {
|
||||
previewDiv.style.display = "none";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -716,7 +818,7 @@ function updateRuby(textArea, prompt) {
|
||||
ruby.setAttribute("class", `acRuby${typeClass} notranslate`);
|
||||
textArea.parentNode.appendChild(ruby);
|
||||
}
|
||||
|
||||
|
||||
ruby.innerText = prompt;
|
||||
|
||||
let bracketEscapedPrompt = prompt.replaceAll("\\(", "$").replaceAll("\\)", "%");
|
||||
@@ -734,9 +836,9 @@ function updateRuby(textArea, prompt) {
|
||||
.replaceAll(" ", "_")
|
||||
.replaceAll("\\(", "(")
|
||||
.replaceAll("\\)", ")");
|
||||
|
||||
|
||||
const translation = translations?.get(tag) || translations?.get(unsanitizedTag);
|
||||
|
||||
|
||||
let escapedTag = escapeRegExp(tag);
|
||||
return { tag, escapedTag, translation };
|
||||
}
|
||||
@@ -752,14 +854,14 @@ function updateRuby(textArea, prompt) {
|
||||
// First try to find direct matches
|
||||
[...rubyTags].forEach(tag => {
|
||||
let tuple = prepareTag(tag);
|
||||
|
||||
|
||||
if (tuple.translation) {
|
||||
html = replaceOccurences(html, tuple);
|
||||
} else {
|
||||
let subTags = tuple.tag.split(" ").filter(x => x.trim().length > 0);
|
||||
// Return if there is only one word
|
||||
if (subTags.length === 1) return;
|
||||
|
||||
|
||||
let subHtml = tag.replaceAll("$", "\\(").replaceAll("%", "\\)");
|
||||
|
||||
let translateNgram = (windows) => {
|
||||
@@ -774,14 +876,14 @@ function updateRuby(textArea, prompt) {
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Perform n-gram sliding window search
|
||||
translateNgram(toNgrams(subTags, 3));
|
||||
translateNgram(toNgrams(subTags, 2));
|
||||
translateNgram(toNgrams(subTags, 1));
|
||||
|
||||
let escapedTag = escapeRegExp(tuple.tag);
|
||||
|
||||
|
||||
let searchRegex = new RegExp(`(?<!<ruby>)(?:\\b)${escapedTag}(?:\\b|$|(?=[,|: \\t\\n\\r]))(?!<rt>)`, "g");
|
||||
html = html.replaceAll(searchRegex, subHtml);
|
||||
}
|
||||
@@ -893,6 +995,7 @@ async function autocomplete(textArea, prompt, fixedTag = null) {
|
||||
}
|
||||
|
||||
results = [];
|
||||
resultCountBeforeNormalTags = 0;
|
||||
tagword = tagword.toLowerCase().replace(/[\n\r]/g, "");
|
||||
|
||||
// Process all parsers
|
||||
@@ -932,7 +1035,13 @@ async function autocomplete(textArea, prompt, fixedTag = null) {
|
||||
});
|
||||
}
|
||||
}
|
||||
} else { // Else search the normal tag list
|
||||
}
|
||||
// Else search the normal tag list
|
||||
if (!resultCandidates || resultCandidates.length === 0
|
||||
|| (TAC_CFG.includeEmbeddingsInNormalResults && !(tagword.startsWith("<") || tagword.startsWith("*<")))
|
||||
) {
|
||||
resultCountBeforeNormalTags = results.length;
|
||||
|
||||
// Create escaped search regex with support for * as a start placeholder
|
||||
let searchRegex;
|
||||
if (tagword.startsWith("*")) {
|
||||
@@ -947,7 +1056,7 @@ async function autocomplete(textArea, prompt, fixedTag = null) {
|
||||
let aliasFilter = (x) => x[3] && x[3].toLowerCase().search(searchRegex) > -1;
|
||||
let translationFilter = (x) => (translations.has(x[0]) && translations.get(x[0]).toLowerCase().search(searchRegex) > -1)
|
||||
|| x[3] && x[3].split(",").some(y => translations.has(y) && translations.get(y).toLowerCase().search(searchRegex) > -1);
|
||||
|
||||
|
||||
let fil;
|
||||
if (TAC_CFG.alias.searchByAlias && TAC_CFG.translation.searchByTranslation)
|
||||
fil = (x) => baseFilter(x) || aliasFilter(x) || translationFilter(x);
|
||||
@@ -985,10 +1094,10 @@ async function autocomplete(textArea, prompt, fixedTag = null) {
|
||||
results = results.concat(extraResults);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Slice if the user has set a max result count
|
||||
if (!TAC_CFG.showAllResults) {
|
||||
results = results.slice(0, TAC_CFG.maxResults);
|
||||
results = results.slice(0, TAC_CFG.maxResults + resultCountBeforeNormalTags);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1006,7 +1115,7 @@ async function autocomplete(textArea, prompt, fixedTag = null) {
|
||||
function navigateInList(textArea, event) {
|
||||
// Return if the function is deactivated in the UI or the current model is excluded due to white/blacklist settings
|
||||
if (!isEnabled()) return;
|
||||
|
||||
|
||||
let keys = TAC_CFG.keymap;
|
||||
|
||||
// Close window if Home or End is pressed while not a keybinding, since it would break completion on leaving the original tag
|
||||
@@ -1022,7 +1131,7 @@ function navigateInList(textArea, event) {
|
||||
if (!validKeys.includes(event.key)) return;
|
||||
if (!isVisible(textArea)) return
|
||||
// Return if ctrl key is pressed to not interfere with weight editing shortcut
|
||||
if (event.ctrlKey || event.altKey) return;
|
||||
if (event.ctrlKey || event.altKey || event.shiftKey || event.metaKey) return;
|
||||
|
||||
oldSelectedTag = selectedTag;
|
||||
|
||||
@@ -1056,10 +1165,25 @@ function navigateInList(textArea, event) {
|
||||
}
|
||||
break;
|
||||
case keys["JumpToStart"]:
|
||||
selectedTag = 0;
|
||||
if (TAC_CFG.includeEmbeddingsInNormalResults &&
|
||||
selectedTag > resultCountBeforeNormalTags &&
|
||||
resultCountBeforeNormalTags > 0
|
||||
) {
|
||||
selectedTag = resultCountBeforeNormalTags;
|
||||
} else {
|
||||
selectedTag = 0;
|
||||
}
|
||||
break;
|
||||
case keys["JumpToEnd"]:
|
||||
selectedTag = resultCount - 1;
|
||||
// Jump to the end of the list, or the end of embeddings if they are included in the normal results
|
||||
if (TAC_CFG.includeEmbeddingsInNormalResults &&
|
||||
selectedTag < resultCountBeforeNormalTags &&
|
||||
resultCountBeforeNormalTags > 0
|
||||
) {
|
||||
selectedTag = Math.min(resultCountBeforeNormalTags, resultCount - 1);
|
||||
} else {
|
||||
selectedTag = resultCount - 1;
|
||||
}
|
||||
break;
|
||||
case keys["ChooseSelected"]:
|
||||
if (selectedTag !== null) {
|
||||
@@ -1100,7 +1224,7 @@ async function refreshTacTempFiles() {
|
||||
setTimeout(async () => {
|
||||
wildcardFiles = [];
|
||||
wildcardExtFiles = [];
|
||||
yamlWildcards = [];
|
||||
umiWildcards = [];
|
||||
embeddings = [];
|
||||
hypernetworks = [];
|
||||
loras = [];
|
||||
@@ -1213,8 +1337,8 @@ async function setup() {
|
||||
// Not found, we're on a page without prompt textareas
|
||||
if (textAreas.every(v => v === null || v === undefined)) return;
|
||||
// Already added or unnecessary to add
|
||||
if (gradioApp().querySelector('.autocompleteResults.p')) {
|
||||
if (gradioApp().querySelector('.autocompleteResults.n') || !TAC_CFG.activeIn.negativePrompts) {
|
||||
if (gradioApp().querySelector('.autocompleteParent.p')) {
|
||||
if (gradioApp().querySelector('.autocompleteParent.n') || !TAC_CFG.activeIn.negativePrompts) {
|
||||
return;
|
||||
}
|
||||
} else if (!TAC_CFG.activeIn.txt2img && !TAC_CFG.activeIn.img2img) {
|
||||
@@ -1228,7 +1352,7 @@ async function setup() {
|
||||
let mode = (document.querySelector(".dark") || gradioApp().querySelector(".dark")) ? 0 : 1;
|
||||
// Check if we are on webkit
|
||||
let browser = navigator.userAgent.toLowerCase().indexOf('firefox') > -1 ? "firefox" : "other";
|
||||
|
||||
|
||||
let css = autocompleteCSS;
|
||||
// Replace vars with actual values (can't use actual css vars because of the way we inject the css)
|
||||
Object.keys(styleColors).forEach((key) => {
|
||||
@@ -1237,7 +1361,7 @@ async function setup() {
|
||||
Object.keys(browserVars).forEach((key) => {
|
||||
css = css.replaceAll(`var(${key})`, browserVars[key][browser]);
|
||||
})
|
||||
|
||||
|
||||
if (acStyle.styleSheet) {
|
||||
acStyle.styleSheet.cssText = css;
|
||||
} else {
|
||||
|
||||
@@ -28,9 +28,10 @@ def load_hash_cache():
|
||||
def update_hash_cache():
|
||||
global file_needs_update
|
||||
if file_needs_update:
|
||||
with open(known_hashes_file, "w", encoding="utf-8") as file:
|
||||
with open(known_hashes_file, "w", encoding="utf-8", newline='') as file:
|
||||
writer = csv.writer(file)
|
||||
for name, (hash, mtime) in hash_dict.items():
|
||||
file.write(f'"{name}",{hash},{mtime}\n')
|
||||
writer.writerow([name, hash, mtime])
|
||||
|
||||
|
||||
# Copy of the fast inaccurate hash function from the extension
|
||||
@@ -75,6 +76,6 @@ def write_model_keyword_path():
|
||||
return True
|
||||
else:
|
||||
print(
|
||||
"Tag Autocomplete: Could not locate model-keyword extension, LORA/LYCO trigger word completion will be unavailable."
|
||||
"Tag Autocomplete: Could not locate model-keyword extension, Lora trigger word completion will be limited to those added through the extra networks menu."
|
||||
)
|
||||
return False
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from pathlib import Path
|
||||
|
||||
from modules import scripts, shared
|
||||
|
||||
try:
|
||||
@@ -37,6 +38,21 @@ except AttributeError:
|
||||
def find_ext_wildcard_paths():
|
||||
"""Returns the path to the extension wildcards folder"""
|
||||
found = list(EXT_PATH.glob("*/wildcards/"))
|
||||
# Try to find the wildcard path from the shared opts
|
||||
try:
|
||||
from modules.shared import opts
|
||||
except ImportError: # likely not in an a1111 context
|
||||
opts = None
|
||||
|
||||
# Append custom wildcard paths
|
||||
custom_paths = [
|
||||
getattr(shared.cmd_opts, "wildcards_dir", None), # Cmd arg from the wildcard extension
|
||||
getattr(opts, "wildcard_dir", None), # Custom path from sd-dynamic-prompts
|
||||
]
|
||||
for path in [Path(p) for p in custom_paths if p is not None]:
|
||||
if path.exists():
|
||||
found.append(path)
|
||||
|
||||
return found
|
||||
|
||||
|
||||
|
||||
@@ -2,10 +2,14 @@
|
||||
# to a temporary file to expose it to the javascript side
|
||||
|
||||
import glob
|
||||
import json
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
|
||||
import gradio as gr
|
||||
import yaml
|
||||
from fastapi import FastAPI
|
||||
from fastapi.responses import FileResponse, JSONResponse
|
||||
from modules import script_callbacks, sd_hijack, shared
|
||||
|
||||
from scripts.model_keyword_support import (get_lora_simple_hash,
|
||||
@@ -13,6 +17,12 @@ from scripts.model_keyword_support import (get_lora_simple_hash,
|
||||
write_model_keyword_path)
|
||||
from scripts.shared_paths import *
|
||||
|
||||
# Attempt to get embedding load function, using the same call as api.
|
||||
try:
|
||||
load_textual_inversion_embeddings = sd_hijack.model_hijack.embedding_db.load_textual_inversion_embeddings
|
||||
except Exception as e: # Not supported.
|
||||
load_textual_inversion_embeddings = lambda *args, **kwargs: None
|
||||
print("Tag Autocomplete: Cannot reload embeddings instantly:", e)
|
||||
|
||||
def get_wildcards():
|
||||
"""Returns a list of all wildcards. Works on nested folders."""
|
||||
@@ -33,37 +43,73 @@ def get_ext_wildcards():
|
||||
|
||||
return wildcard_files
|
||||
|
||||
def is_umi_format(data):
|
||||
"""Returns True if the YAML file is in UMI format."""
|
||||
issue_found = False
|
||||
for item in data:
|
||||
if not (data[item] and 'Tags' in data[item] and isinstance(data[item]['Tags'], list)):
|
||||
issue_found = True
|
||||
break
|
||||
return not issue_found
|
||||
|
||||
def get_ext_wildcard_tags():
|
||||
def parse_umi_format(umi_tags, count, data):
|
||||
for item in data:
|
||||
umi_tags[count] = ','.join(data[item]['Tags'])
|
||||
count += 1
|
||||
|
||||
|
||||
def parse_dynamic_prompt_format(yaml_wildcards, data, path):
|
||||
# Recurse subkeys, delete those without string lists as values
|
||||
def recurse_dict(d: dict):
|
||||
for key, value in d.copy().items():
|
||||
if isinstance(value, dict):
|
||||
recurse_dict(value)
|
||||
elif not (isinstance(value, list) and all(isinstance(v, str) for v in value)):
|
||||
del d[key]
|
||||
|
||||
recurse_dict(data)
|
||||
# Add to yaml_wildcards
|
||||
yaml_wildcards[path.name] = data
|
||||
|
||||
|
||||
def get_yaml_wildcards():
|
||||
"""Returns a list of all tags found in extension YAML files found under a Tags: key."""
|
||||
wildcard_tags = {} # { tag: count }
|
||||
yaml_files = []
|
||||
for path in WILDCARD_EXT_PATHS:
|
||||
yaml_files.extend(p for p in path.rglob("*.yml"))
|
||||
yaml_files.extend(p for p in path.rglob("*.yaml"))
|
||||
|
||||
yaml_wildcards = {}
|
||||
|
||||
umi_tags = {} # { tag: count }
|
||||
count = 0
|
||||
|
||||
for path in yaml_files:
|
||||
try:
|
||||
with open(path, encoding="utf8") as file:
|
||||
data = yaml.safe_load(file)
|
||||
if data:
|
||||
for item in data:
|
||||
if data[item] and 'Tags' in data[item] and isinstance(data[item]['Tags'], list):
|
||||
wildcard_tags[count] = ','.join(data[item]['Tags'])
|
||||
count += 1
|
||||
else:
|
||||
print('Issue with tags found in ' + path.name + ' at item ' + item)
|
||||
if (data):
|
||||
if (is_umi_format(data)):
|
||||
parse_umi_format(umi_tags, count, data)
|
||||
else:
|
||||
parse_dynamic_prompt_format(yaml_wildcards, data, path)
|
||||
else:
|
||||
print('No data found in ' + path.name)
|
||||
except yaml.YAMLError:
|
||||
print('Issue in parsing YAML file ' + path.name )
|
||||
print('Issue in parsing YAML file ' + path.name)
|
||||
continue
|
||||
|
||||
# Sort by count
|
||||
sorted_tags = sorted(wildcard_tags.items(), key=lambda item: item[1], reverse=True)
|
||||
output = []
|
||||
for tag, count in sorted_tags:
|
||||
output.append(f"{tag},{count}")
|
||||
return output
|
||||
umi_sorted = sorted(umi_tags.items(), key=lambda item: item[1], reverse=True)
|
||||
umi_output = []
|
||||
for tag, count in umi_sorted:
|
||||
umi_output.append(f"{tag},{count}")
|
||||
|
||||
if (len(umi_output) > 0):
|
||||
write_to_temp_file('umi_tags.txt', umi_output)
|
||||
|
||||
with open(TEMP_PATH.joinpath("wc_yaml.json"), "w", encoding="utf-8") as file:
|
||||
json.dump(yaml_wildcards, file, ensure_ascii=False)
|
||||
|
||||
|
||||
def get_embeddings(sd_model):
|
||||
@@ -142,12 +188,11 @@ def get_lora():
|
||||
valid_loras = [lf for lf in lora_paths if lf.suffix in {".safetensors", ".ckpt", ".pt"}]
|
||||
hashes = {}
|
||||
for l in valid_loras:
|
||||
name = l.name[:l.name.rfind('.')]
|
||||
name = l.relative_to(LORA_PATH).as_posix()
|
||||
if model_keyword_installed:
|
||||
hashes[name] = get_lora_simple_hash(l)
|
||||
else:
|
||||
hashes[name] = ""
|
||||
|
||||
# Sort
|
||||
sorted_loras = dict(sorted(hashes.items()))
|
||||
# Add hashes and return
|
||||
@@ -159,14 +204,17 @@ def get_lyco():
|
||||
|
||||
# Get a list of all LyCORIS in the folder
|
||||
lyco_paths = [Path(ly) for ly in glob.glob(LYCO_PATH.joinpath("**/*").as_posix(), recursive=True)]
|
||||
|
||||
|
||||
# Get hashes
|
||||
valid_lycos = [lyf for lyf in lyco_paths if lyf.suffix in {".safetensors", ".ckpt", ".pt"}]
|
||||
hashes = {}
|
||||
for ly in valid_lycos:
|
||||
name = ly.name[:ly.name.rfind('.')]
|
||||
hashes[name] = get_lora_simple_hash(ly)
|
||||
|
||||
name = ly.relative_to(LYCO_PATH).as_posix()
|
||||
if model_keyword_installed:
|
||||
hashes[name] = get_lora_simple_hash(ly)
|
||||
else:
|
||||
hashes[name] = ""
|
||||
|
||||
# Sort
|
||||
sorted_lycos = dict(sorted(hashes.items()))
|
||||
# Add hashes and return
|
||||
@@ -221,7 +269,8 @@ if not TEMP_PATH.exists():
|
||||
# even if no wildcards or embeddings are found
|
||||
write_to_temp_file('wc.txt', [])
|
||||
write_to_temp_file('wce.txt', [])
|
||||
write_to_temp_file('wcet.txt', [])
|
||||
write_to_temp_file('wc_yaml.json', [])
|
||||
write_to_temp_file('umi_tags.txt', [])
|
||||
write_to_temp_file('hyp.txt', [])
|
||||
write_to_temp_file('lora.txt', [])
|
||||
write_to_temp_file('lyco.txt', [])
|
||||
@@ -235,6 +284,9 @@ if EMB_PATH.exists():
|
||||
script_callbacks.on_model_loaded(get_embeddings)
|
||||
|
||||
def refresh_temp_files():
|
||||
global WILDCARD_EXT_PATHS
|
||||
WILDCARD_EXT_PATHS = find_ext_wildcard_paths()
|
||||
load_textual_inversion_embeddings(force_reload = True) # Instant embedding reload.
|
||||
write_temp_files()
|
||||
get_embeddings(shared.sd_model)
|
||||
|
||||
@@ -250,10 +302,8 @@ def write_temp_files():
|
||||
wildcards_ext = get_ext_wildcards()
|
||||
if wildcards_ext:
|
||||
write_to_temp_file('wce.txt', wildcards_ext)
|
||||
# Write yaml extension wildcards to wcet.txt if found
|
||||
wildcards_yaml_ext = get_ext_wildcard_tags()
|
||||
if wildcards_yaml_ext:
|
||||
write_to_temp_file('wcet.txt', wildcards_yaml_ext)
|
||||
# Write yaml extension wildcards to umi_tags.txt and wc_yaml.json if found
|
||||
get_yaml_wildcards()
|
||||
|
||||
if HYP_PATH.exists():
|
||||
hypernets = get_hypernetworks()
|
||||
@@ -268,7 +318,7 @@ def write_temp_files():
|
||||
lora = get_lora()
|
||||
if lora:
|
||||
write_to_temp_file('lora.txt', lora)
|
||||
|
||||
|
||||
lyco_exists = LYCO_PATH is not None and LYCO_PATH.exists()
|
||||
if lyco_exists and not (lora_exists and LYCO_PATH.samefile(LORA_PATH)):
|
||||
lyco = get_lyco()
|
||||
@@ -317,18 +367,22 @@ def on_ui_settings():
|
||||
"tac_resultStepLength": shared.OptionInfo(100, "How many results to load at once"),
|
||||
"tac_delayTime": shared.OptionInfo(100, "Time in ms to wait before triggering completion again").needs_restart(),
|
||||
"tac_useWildcards": shared.OptionInfo(True, "Search for wildcards"),
|
||||
"tac_sortWildcardResults": shared.OptionInfo(True, "Sort wildcard file contents alphabetically").info("If your wildcard files have a specific custom order, disable this to keep it"),
|
||||
"tac_useEmbeddings": shared.OptionInfo(True, "Search for embeddings"),
|
||||
"tac_includeEmbeddingsInNormalResults": shared.OptionInfo(False, "Include embeddings in normal tag results").info("The 'JumpTo...' keybinds (End & Home key by default) will select the first non-embedding result of their direction on the first press for quick navigation in longer lists."),
|
||||
"tac_useHypernetworks": shared.OptionInfo(True, "Search for hypernetworks"),
|
||||
"tac_useLoras": shared.OptionInfo(True, "Search for Loras"),
|
||||
"tac_useLycos": shared.OptionInfo(True, "Search for LyCORIS/LoHa"),
|
||||
"tac_showWikiLinks": shared.OptionInfo(False, "Show '?' next to tags, linking to its Danbooru or e621 wiki page").info("Warning: This is an external site and very likely contains NSFW examples!"),
|
||||
"tac_showExtraNetworkPreviews": shared.OptionInfo(True, "Show preview thumbnails for extra networks if available"),
|
||||
# Insertion related settings
|
||||
"tac_replaceUnderscores": shared.OptionInfo(True, "Replace underscores with spaces on insertion"),
|
||||
"tac_escapeParentheses": shared.OptionInfo(True, "Escape parentheses on insertion"),
|
||||
"tac_appendComma": shared.OptionInfo(True, "Append comma on tag autocompletion"),
|
||||
"tac_appendSpace": shared.OptionInfo(True, "Append space on tag autocompletion").info("will append after comma if the above is enabled"),
|
||||
"tac_alwaysSpaceAtEnd": shared.OptionInfo(True, "Always append space if inserting at the end of the textbox").info("takes precedence over the regular space setting for that position"),
|
||||
"tac_modelKeywordCompletion": shared.OptionInfo("Never", "Try to add known trigger words for LORA/LyCO models", gr.Dropdown, lambda: {"interactive": model_keyword_installed, "choices": ["Never","Only user list","Always"]}).info("Requires the <a href=\"https://github.com/mix1009/model-keyword\" target=\"_blank\">model-keyword</a> extension to be installed, but will work with it disabled.").needs_restart(),
|
||||
"tac_modelKeywordCompletion": shared.OptionInfo("Never", "Try to add known trigger words for LORA/LyCO models", gr.Dropdown, lambda: {"choices": ["Never","Only user list","Always"]}).info("Will use & prefer the native activation keywords settable in the extra networks UI. Other functionality requires the <a href=\"https://github.com/mix1009/model-keyword\" target=\"_blank\">model-keyword</a> extension to be installed, but will work with it disabled.").needs_restart(),
|
||||
"tac_modelKeywordLocation": shared.OptionInfo("Start of prompt", "Where to insert the trigger keyword", gr.Dropdown, lambda: {"choices": ["Start of prompt","End of prompt","Before LORA/LyCO"]}).info("Only relevant if the above option is enabled"),
|
||||
"tac_wildcardCompletionMode": shared.OptionInfo("To next folder level", "How to complete nested wildcard paths", gr.Dropdown, lambda: {"choices": ["To next folder level","To first difference","Always fully"]}).info("e.g. \"hair/colours/light/...\""),
|
||||
# Alias settings
|
||||
"tac_alias.searchByAlias": shared.OptionInfo(True, "Search by alias"),
|
||||
@@ -399,5 +453,81 @@ def on_ui_settings():
|
||||
shared.opts.add_option("tac_colormap", shared.OptionInfo(colorDefault, colorLabel, gr.Textbox, section=TAC_SECTION))
|
||||
|
||||
shared.opts.add_option("tac_refreshTempFiles", shared.OptionInfo("Refresh TAC temp files", "Refresh internal temp files", gr.HTML, {}, refresh=refresh_temp_files, section=TAC_SECTION))
|
||||
|
||||
|
||||
script_callbacks.on_ui_settings(on_ui_settings)
|
||||
|
||||
def api_tac(_: gr.Blocks, app: FastAPI):
|
||||
async def get_json_info(base_path: Path, filename: str = None):
|
||||
if base_path is None or (not base_path.exists()):
|
||||
return JSONResponse({}, status_code=404)
|
||||
|
||||
try:
|
||||
json_candidates = glob.glob(base_path.as_posix() + f"/**/{filename}.json", recursive=True)
|
||||
if json_candidates is not None and len(json_candidates) > 0:
|
||||
return FileResponse(json_candidates[0])
|
||||
except Exception as e:
|
||||
return JSONResponse({"error": e}, status_code=500)
|
||||
|
||||
async def get_preview_thumbnail(base_path: Path, filename: str = None, blob: bool = False):
|
||||
if base_path is None or (not base_path.exists()):
|
||||
return JSONResponse({}, status_code=404)
|
||||
|
||||
try:
|
||||
img_glob = glob.glob(base_path.as_posix() + f"/**/{filename}.*", recursive=True)
|
||||
img_candidates = [img for img in img_glob if Path(img).suffix in [".png", ".jpg", ".jpeg", ".webp", ".gif"]]
|
||||
if img_candidates is not None and len(img_candidates) > 0:
|
||||
if blob:
|
||||
return FileResponse(img_candidates[0])
|
||||
else:
|
||||
return JSONResponse({"url": urllib.parse.quote(img_candidates[0])})
|
||||
except Exception as e:
|
||||
return JSONResponse({"error": e}, status_code=500)
|
||||
|
||||
@app.get("/tacapi/v1/lora-info/{lora_name}")
|
||||
async def get_lora_info(lora_name):
|
||||
return await get_json_info(LORA_PATH, lora_name)
|
||||
|
||||
@app.get("/tacapi/v1/lyco-info/{lyco_name}")
|
||||
async def get_lyco_info(lyco_name):
|
||||
return await get_json_info(LYCO_PATH, lyco_name)
|
||||
|
||||
def get_path_for_type(type):
|
||||
if type == "lora":
|
||||
return LORA_PATH
|
||||
elif type == "lyco":
|
||||
return LYCO_PATH
|
||||
elif type == "hyper":
|
||||
return HYP_PATH
|
||||
elif type == "embed":
|
||||
return EMB_PATH
|
||||
else:
|
||||
return None
|
||||
|
||||
@app.get("/tacapi/v1/thumb-preview/{filename}")
|
||||
async def get_thumb_preview(filename, type):
|
||||
return await get_preview_thumbnail(get_path_for_type(type), filename, False)
|
||||
|
||||
@app.get("/tacapi/v1/thumb-preview-blob/{filename}")
|
||||
async def get_thumb_preview_blob(filename, type):
|
||||
return await get_preview_thumbnail(get_path_for_type(type), filename, True)
|
||||
|
||||
@app.get("/tacapi/v1/wildcard-contents")
|
||||
async def get_wildcard_contents(basepath: str, filename: str):
|
||||
if basepath is None or basepath == "":
|
||||
return JSONResponse({}, status_code=404)
|
||||
|
||||
base = Path(basepath)
|
||||
if base is None or (not base.exists()):
|
||||
return JSONResponse({}, status_code=404)
|
||||
|
||||
try:
|
||||
wildcard_path = base.joinpath(filename)
|
||||
if wildcard_path.exists():
|
||||
return FileResponse(wildcard_path)
|
||||
else:
|
||||
return JSONResponse({}, status_code=404)
|
||||
except Exception as e:
|
||||
return JSONResponse({"error": e}, status_code=500)
|
||||
|
||||
|
||||
script_callbacks.on_app_started(api_tac)
|
||||
|
||||
Reference in New Issue
Block a user