optimization: initial scan speed(batching tags)

This commit is contained in:
bigcat88
2025-09-17 13:40:08 +03:00
parent 5b6810a2c6
commit 85ef08449d
4 changed files with 29 additions and 17 deletions

View File

@@ -6,6 +6,7 @@ from .projection import is_scalar, project_kv
from .tags import (
add_missing_tag_for_asset_id,
ensure_tags_exist,
insert_tags_from_batch,
remove_missing_tag_for_asset_id,
)
@@ -19,5 +20,6 @@ __all__ = [
"ensure_tags_exist",
"add_missing_tag_for_asset_id",
"remove_missing_tag_for_asset_id",
"insert_tags_from_batch",
"visible_owner_clause",
]

View File

@@ -88,3 +88,19 @@ async def remove_missing_tag_for_asset_id(
AssetInfoTag.tag_name == "missing",
)
)
async def insert_tags_from_batch(session: AsyncSession, *, tag_rows: list[dict]) -> None:
if session.bind.dialect.name == "sqlite":
ins_links = (
d_sqlite.insert(AssetInfoTag)
.values(tag_rows)
.on_conflict_do_nothing(index_elements=[AssetInfoTag.asset_info_id, AssetInfoTag.tag_name])
)
else:
ins_links = (
d_pg.insert(AssetInfoTag)
.values(tag_rows)
.on_conflict_do_nothing(index_elements=[AssetInfoTag.asset_info_id, AssetInfoTag.tag_name])
)
await session.execute(ins_links)