[3d] add recording video support (#3749)

Co-authored-by: github-actions <github-actions@github.com>
This commit is contained in:
Terry Jia
2025-05-03 23:00:07 -04:00
committed by GitHub
parent 8ae36e2c8d
commit 77ac4a415c
15 changed files with 542 additions and 9 deletions

View File

@@ -57,6 +57,21 @@
@upload-texture="handleUploadTexture"
@export-model="handleExportModel"
/>
<div
v-if="showRecordingControls"
class="absolute top-12 right-2 z-20 pointer-events-auto"
>
<RecordingControls
:node="node"
:is-recording="isRecording"
:has-recording="hasRecording"
:recording-duration="recordingDuration"
@start-recording="handleStartRecording"
@stop-recording="handleStopRecording"
@export-recording="handleExportRecording"
@clear-recording="handleClearRecording"
/>
</div>
</div>
</template>
@@ -66,6 +81,7 @@ import { useI18n } from 'vue-i18n'
import Load3DControls from '@/components/load3d/Load3DControls.vue'
import Load3DScene from '@/components/load3d/Load3DScene.vue'
import RecordingControls from '@/components/load3d/controls/RecordingControls.vue'
import Load3dUtils from '@/extensions/core/load3d/Load3dUtils'
import {
CameraType,
@@ -101,6 +117,10 @@ const upDirection = ref<UpDirection>('original')
const materialMode = ref<MaterialMode>('original')
const edgeThreshold = ref(85)
const load3DSceneRef = ref<InstanceType<typeof Load3DScene> | null>(null)
const isRecording = ref(false)
const hasRecording = ref(false)
const recordingDuration = ref(0)
const showRecordingControls = ref(!inputSpec.isPreview)
const showPreviewButton = computed(() => {
return !type.includes('Preview')
@@ -118,6 +138,38 @@ const handleMouseLeave = () => {
}
}
const handleStartRecording = async () => {
if (load3DSceneRef.value?.load3d) {
await load3DSceneRef.value.load3d.startRecording()
isRecording.value = true
}
}
const handleStopRecording = () => {
if (load3DSceneRef.value?.load3d) {
load3DSceneRef.value.load3d.stopRecording()
isRecording.value = false
hasRecording.value = true
recordingDuration.value = load3DSceneRef.value.load3d.getRecordingDuration()
}
}
const handleExportRecording = () => {
if (load3DSceneRef.value?.load3d) {
const timestamp = new Date().toISOString().replace(/[:.]/g, '-')
const filename = `${timestamp}-scene-recording.mp4`
load3DSceneRef.value.load3d.exportRecording(filename)
}
}
const handleClearRecording = () => {
if (load3DSceneRef.value?.load3d) {
load3DSceneRef.value.load3d.clearRecording()
hasRecording.value = false
recordingDuration.value = 0
}
}
const switchCamera = () => {
cameraType.value =
cameraType.value === 'perspective' ? 'orthographic' : 'perspective'

View File

@@ -0,0 +1,176 @@
<template>
<div class="relative bg-gray-700 bg-opacity-30 rounded-lg">
<div class="flex flex-col gap-2">
<Button
class="p-button-rounded p-button-text"
@click="resizeNodeMatchOutput"
>
<i
v-tooltip.right="{
value: t('load3d.resizeNodeMatchOutput'),
showDelay: 300
}"
class="pi pi-window-maximize text-white text-lg"
/>
</Button>
<Button
class="p-button-rounded p-button-text"
:class="{ 'p-button-danger': isRecording }"
@click="toggleRecording"
>
<i
v-tooltip.right="{
value: isRecording
? t('load3d.stopRecording')
: t('load3d.startRecording'),
showDelay: 300
}"
:class="[
'pi',
isRecording ? 'pi-circle-fill' : 'pi-video',
'text-white text-lg'
]"
/>
</Button>
<Button
v-if="hasRecording && !isRecording"
class="p-button-rounded p-button-text"
@click="exportRecording"
>
<i
v-tooltip.right="{
value: t('load3d.exportRecording'),
showDelay: 300
}"
class="pi pi-download text-white text-lg"
/>
</Button>
<Button
v-if="hasRecording && !isRecording"
class="p-button-rounded p-button-text"
@click="clearRecording"
>
<i
v-tooltip.right="{
value: t('load3d.clearRecording'),
showDelay: 300
}"
class="pi pi-trash text-white text-lg"
/>
</Button>
<div
v-if="recordingDuration > 0 && !isRecording"
class="text-xs text-white text-center mt-1"
>
{{ formatDuration(recordingDuration) }}
</div>
</div>
</div>
</template>
<script setup lang="ts">
import { IWidget, LGraphNode } from '@comfyorg/litegraph'
import { Tooltip } from 'primevue'
import Button from 'primevue/button'
import { ref, watch } from 'vue'
import { t } from '@/i18n'
const vTooltip = Tooltip
const props = defineProps<{
node: LGraphNode
isRecording: boolean
hasRecording: boolean
recordingDuration: number
}>()
const emit = defineEmits<{
(e: 'startRecording'): void
(e: 'stopRecording'): void
(e: 'exportRecording'): void
(e: 'clearRecording'): void
}>()
const node = ref(props.node)
const isRecording = ref(props.isRecording)
const hasRecording = ref(props.hasRecording)
const recordingDuration = ref(props.recordingDuration)
watch(
() => props.isRecording,
(newValue) => {
isRecording.value = newValue
}
)
watch(
() => props.hasRecording,
(newValue) => {
hasRecording.value = newValue
}
)
watch(
() => props.recordingDuration,
(newValue) => {
recordingDuration.value = newValue
}
)
const resizeNodeMatchOutput = () => {
console.log('resizeNodeMatchOutput')
const outputWidth = node.value.widgets?.find(
(w: IWidget) => w.name === 'width'
)
const outputHeight = node.value.widgets?.find(
(w: IWidget) => w.name === 'height'
)
if (outputWidth && outputHeight && outputHeight.value && outputWidth.value) {
const [oldWidth, oldHeight] = node.value.size
const scene = node.value.widgets?.find((w: IWidget) => w.name === 'image')
const sceneHeight = scene?.computedHeight
if (sceneHeight) {
const sceneWidth = oldWidth - 20
const outputRatio = Number(outputHeight.value) / Number(outputWidth.value)
const expectSceneHeight = sceneWidth * outputRatio
node.value.setSize([
oldWidth,
oldHeight + (expectSceneHeight - sceneHeight)
])
}
}
}
const toggleRecording = () => {
if (isRecording.value) {
emit('stopRecording')
} else {
emit('startRecording')
}
}
const exportRecording = () => {
emit('exportRecording')
}
const clearRecording = () => {
emit('clearRecording')
}
const formatDuration = (seconds: number): string => {
const minutes = Math.floor(seconds / 60)
const remainingSeconds = Math.floor(seconds % 60)
return `${minutes.toString().padStart(2, '0')}:${remainingSeconds.toString().padStart(2, '0')}`
}
</script>

View File

@@ -215,6 +215,8 @@ useExtensionService().registerExtension({
sceneWidget.serializeValue = async () => {
node.properties['Camera Info'] = load3d.getCameraState()
load3d.stopRecording()
const {
scene: imageData,
mask: maskData,
@@ -234,13 +236,26 @@ useExtensionService().registerExtension({
load3d.handleResize()
return {
const returnVal = {
image: `threed/${data.name} [temp]`,
mask: `threed/${dataMask.name} [temp]`,
normal: `threed/${dataNormal.name} [temp]`,
lineart: `threed/${dataLineart.name} [temp]`,
camera_info: node.properties['Camera Info']
camera_info: node.properties['Camera Info'],
recording: ''
}
const recordingData = load3d.getRecordingData()
if (recordingData) {
const [recording] = await Promise.all([
Load3dUtils.uploadTempImage(recordingData, 'recording', 'mp4')
])
returnVal['recording'] = `threed/${recording.name} [temp]`
}
return returnVal
}
}
}

View File

@@ -12,6 +12,7 @@ import { ModelExporter } from './ModelExporter'
import { ModelManager } from './ModelManager'
import { NodeStorage } from './NodeStorage'
import { PreviewManager } from './PreviewManager'
import { RecordingManager } from './RecordingManager'
import { SceneManager } from './SceneManager'
import { ViewHelperManager } from './ViewHelperManager'
import {
@@ -38,6 +39,7 @@ class Load3d {
protected previewManager: PreviewManager
protected loaderManager: LoaderManager
protected modelManager: ModelManager
protected recordingManager: RecordingManager
STATUS_MOUSE_ON_NODE: boolean
STATUS_MOUSE_ON_SCENE: boolean
@@ -118,6 +120,11 @@ class Load3d {
this.loaderManager = new LoaderManager(this.modelManager, this.eventManager)
this.recordingManager = new RecordingManager(
this.sceneManager.scene,
this.renderer,
this.eventManager
)
this.sceneManager.init()
this.cameraManager.init()
this.controlsManager.init()
@@ -439,7 +446,39 @@ class Load3d {
return this.nodeStorage.loadNodeProperty(name, defaultValue)
}
remove(): void {
public async startRecording(): Promise<void> {
this.viewHelperManager.visibleViewHelper(false)
return this.recordingManager.startRecording()
}
public stopRecording(): void {
this.viewHelperManager.visibleViewHelper(true)
this.recordingManager.stopRecording()
}
public isRecording(): boolean {
return this.recordingManager.hasRecording()
}
public getRecordingDuration(): number {
return this.recordingManager.getRecordingDuration()
}
public getRecordingData(): string | null {
return this.recordingManager.getRecordingData()
}
public exportRecording(filename?: string): void {
this.recordingManager.exportRecording(filename)
}
public clearRecording(): void {
this.recordingManager.clearRecording()
}
public remove(): void {
if (this.animationFrameId !== null) {
cancelAnimationFrame(this.animationFrameId)
}
@@ -452,6 +491,7 @@ class Load3d {
this.previewManager.dispose()
this.loaderManager.dispose()
this.modelManager.dispose()
this.recordingManager.dispose()
this.renderer.dispose()
this.renderer.domElement.remove()

View File

@@ -4,10 +4,16 @@ import { app } from '@/scripts/app'
import { useToastStore } from '@/stores/toastStore'
class Load3dUtils {
static async uploadTempImage(imageData: string, prefix: string) {
static async uploadTempImage(
imageData: string,
prefix: string,
fileType: string = 'png'
) {
const blob = await fetch(imageData).then((r) => r.blob())
const name = `${prefix}_${Date.now()}.png`
const file = new File([blob], name)
const name = `${prefix}_${Date.now()}.${fileType}`
const file = new File([blob], name, {
type: fileType === 'mp4' ? 'video/mp4' : 'image/png'
})
const body = new FormData()
body.append('image', file)
@@ -20,7 +26,7 @@ class Load3dUtils {
})
if (resp.status !== 200) {
const err = `Error uploading temp image: ${resp.status} - ${resp.statusText}`
const err = `Error uploading temp file: ${resp.status} - ${resp.statusText}`
useToastStore().addAlert(err)
throw new Error(err)
}

View File

@@ -0,0 +1,183 @@
import * as THREE from 'three'
import { EventManagerInterface } from './interfaces'
export class RecordingManager {
private mediaRecorder: MediaRecorder | null = null
private recordedChunks: Blob[] = []
private isRecording: boolean = false
private recordingStream: MediaStream | null = null
private recordingIndicator: THREE.Sprite | null = null
private scene: THREE.Scene
private renderer: THREE.WebGLRenderer
private eventManager: EventManagerInterface
private recordingStartTime: number = 0
private recordingDuration: number = 0
private recordingCanvas: HTMLCanvasElement | null = null
constructor(
scene: THREE.Scene,
renderer: THREE.WebGLRenderer,
eventManager: EventManagerInterface
) {
this.scene = scene
this.renderer = renderer
this.eventManager = eventManager
this.setupRecordingIndicator()
}
private setupRecordingIndicator(): void {
const map = new THREE.TextureLoader().load(
'data:image/svg+xml;base64,' +
btoa(`<svg xmlns="http://www.w3.org/2000/svg" width="64" height="64" viewBox="0 0 64 64">
<circle cx="32" cy="32" r="24" fill="#4CAF50" opacity="0.8" />
<circle cx="32" cy="32" r="16" fill="#2E7D32" opacity="0.8" />
</svg>`)
)
const material = new THREE.SpriteMaterial({
map: map,
transparent: true,
depthTest: false,
depthWrite: false
})
this.recordingIndicator = new THREE.Sprite(material)
this.recordingIndicator.scale.set(0.5, 0.5, 0.5)
this.recordingIndicator.position.set(-0.8, 0.8, 0)
this.recordingIndicator.visible = false
this.scene.add(this.recordingIndicator)
}
public async startRecording(): Promise<void> {
if (this.isRecording) {
return
}
try {
this.recordingCanvas = this.renderer.domElement
this.recordingStream = this.recordingCanvas.captureStream(30)
if (!this.recordingStream) {
throw new Error('Failed to capture stream from canvas')
}
this.mediaRecorder = new MediaRecorder(this.recordingStream, {
mimeType: 'video/webm;codecs=vp9',
videoBitsPerSecond: 5000000
})
this.recordedChunks = []
this.mediaRecorder.ondataavailable = (event) => {
if (event.data.size > 0) {
this.recordedChunks.push(event.data)
}
}
this.mediaRecorder.onstop = () => {
this.recordingIndicator!.visible = false
this.isRecording = false
this.recordingStream = null
this.eventManager.emitEvent('recordingStopped', {
duration: this.recordingDuration,
hasRecording: this.recordedChunks.length > 0
})
}
if (this.recordingIndicator) {
this.recordingIndicator.visible = true
}
this.mediaRecorder.start(100)
this.isRecording = true
this.recordingStartTime = Date.now()
this.eventManager.emitEvent('recordingStarted', null)
} catch (error) {
console.error('Error starting recording:', error)
this.eventManager.emitEvent('recordingError', error)
}
}
public stopRecording(): void {
if (!this.isRecording || !this.mediaRecorder) {
return
}
this.recordingDuration = (Date.now() - this.recordingStartTime) / 1000 // In seconds
this.mediaRecorder.stop()
if (this.recordingStream) {
this.recordingStream.getTracks().forEach((track) => track.stop())
}
}
public hasRecording(): boolean {
return this.recordedChunks.length > 0
}
public getRecordingDuration(): number {
return this.recordingDuration
}
public getRecordingData(): string | null {
if (this.recordedChunks.length !== 0) {
const blob = new Blob(this.recordedChunks, { type: 'video/webm' })
return URL.createObjectURL(blob)
}
return null
}
public exportRecording(filename: string = 'scene-recording.mp4'): void {
if (this.recordedChunks.length === 0) {
this.eventManager.emitEvent(
'recordingError',
new Error('No recording available to export')
)
return
}
this.eventManager.emitEvent('exportingRecording', null)
try {
const blob = new Blob(this.recordedChunks, { type: 'video/webm' })
const url = URL.createObjectURL(blob)
const a = document.createElement('a')
document.body.appendChild(a)
a.style.display = 'none'
a.href = url
a.download = filename
a.click()
window.URL.revokeObjectURL(url)
document.body.removeChild(a)
this.eventManager.emitEvent('recordingExported', null)
} catch (error) {
console.error('Error exporting recording:', error)
this.eventManager.emitEvent('recordingError', error)
}
}
public clearRecording(): void {
this.recordedChunks = []
this.recordingDuration = 0
this.eventManager.emitEvent('recordingCleared', null)
}
public dispose(): void {
this.stopRecording()
this.clearRecording()
if (this.recordingIndicator) {
this.scene.remove(this.recordingIndicator)
;(this.recordingIndicator.material as THREE.SpriteMaterial).map?.dispose()
;(this.recordingIndicator.material as THREE.SpriteMaterial).dispose()
}
}
}

View File

@@ -89,6 +89,16 @@ export class ViewHelperManager implements ViewHelperManagerInterface {
handleResize(): void {}
visibleViewHelper(visible: boolean) {
if (visible) {
this.viewHelper.visible = true
this.viewHelperContainer.style.display = 'block'
} else {
this.viewHelper.visible = false
this.viewHelperContainer.style.display = 'none'
}
}
recreateViewHelper(): void {
if (this.viewHelper) {
this.viewHelper.dispose()

View File

@@ -177,3 +177,12 @@ export interface LoaderManagerInterface {
dispose(): void
loadModel(url: string, originalFileName?: string): Promise<void>
}
export interface RecordingManagerInterface extends BaseManager {
startRecording(): Promise<void>
stopRecording(): void
hasRecording(): boolean
getRecordingDuration(): number
exportRecording(filename?: string): void
clearRecording(): void
}

View File

@@ -1057,8 +1057,14 @@
"normal": "Normal",
"wireframe": "Wireframe",
"original": "Original",
"depth": "Depth"
}
"depth": "Depth",
"lineart": "Lineart"
},
"startRecording": "Start Recording",
"stopRecording": "Stop Recording",
"exportRecording": "Export Recording",
"clearRecording": "Clear Recording",
"resizeNodeMatchOutput": "Resize Node to match output"
},
"toastMessages": {
"no3dScene": "No 3D scene to apply texture",

View File

@@ -467,9 +467,11 @@
"applyingTexture": "Aplicando textura...",
"backgroundColor": "Color de fondo",
"camera": "Cámara",
"clearRecording": "Borrar grabación",
"edgeThreshold": "Umbral de borde",
"export": "Exportar",
"exportModel": "Exportar modelo",
"exportRecording": "Exportar grabación",
"exportingModel": "Exportando modelo...",
"fov": "FOV",
"light": "Luz",
@@ -478,6 +480,7 @@
"materialMode": "Modo de material",
"materialModes": {
"depth": "Profundidad",
"lineart": "Dibujo lineal",
"normal": "Normal",
"original": "Original",
"wireframe": "Malla"
@@ -485,8 +488,11 @@
"model": "Modelo",
"previewOutput": "Vista previa de salida",
"removeBackgroundImage": "Eliminar imagen de fondo",
"resizeNodeMatchOutput": "Redimensionar nodo para coincidir con la salida",
"scene": "Escena",
"showGrid": "Mostrar cuadrícula",
"startRecording": "Iniciar grabación",
"stopRecording": "Detener grabación",
"switchCamera": "Cambiar cámara",
"switchingMaterialMode": "Cambiando modo de material...",
"upDirection": "Dirección hacia arriba",

View File

@@ -467,9 +467,11 @@
"applyingTexture": "Application de la texture...",
"backgroundColor": "Couleur de fond",
"camera": "Caméra",
"clearRecording": "Effacer l'enregistrement",
"edgeThreshold": "Seuil de Bordure",
"export": "Exportation",
"exportModel": "Exportation du modèle",
"exportRecording": "Exporter l'enregistrement",
"exportingModel": "Exportation du modèle en cours...",
"fov": "FOV",
"light": "Lumière",
@@ -478,6 +480,7 @@
"materialMode": "Mode Matériel",
"materialModes": {
"depth": "Profondeur",
"lineart": "Dessin au trait",
"normal": "Normal",
"original": "Original",
"wireframe": "Fil de fer"
@@ -485,8 +488,11 @@
"model": "Modèle",
"previewOutput": "Aperçu de la sortie",
"removeBackgroundImage": "Supprimer l'image de fond",
"resizeNodeMatchOutput": "Redimensionner le nœud pour correspondre à la sortie",
"scene": "Scène",
"showGrid": "Afficher la grille",
"startRecording": "Démarrer l'enregistrement",
"stopRecording": "Arrêter l'enregistrement",
"switchCamera": "Changer de caméra",
"switchingMaterialMode": "Changement de mode de matériau...",
"upDirection": "Direction Haut",

View File

@@ -467,9 +467,11 @@
"applyingTexture": "テクスチャを適用中...",
"backgroundColor": "背景色",
"camera": "カメラ",
"clearRecording": "録画をクリア",
"edgeThreshold": "エッジ閾値",
"export": "エクスポート",
"exportModel": "モデルをエクスポート",
"exportRecording": "録画をエクスポート",
"exportingModel": "モデルをエクスポート中...",
"fov": "FOV",
"light": "ライト",
@@ -478,6 +480,7 @@
"materialMode": "マテリアルモード",
"materialModes": {
"depth": "深度",
"lineart": "線画",
"normal": "ノーマル",
"original": "オリジナル",
"wireframe": "ワイヤーフレーム"
@@ -485,8 +488,11 @@
"model": "モデル",
"previewOutput": "出力のプレビュー",
"removeBackgroundImage": "背景画像を削除",
"resizeNodeMatchOutput": "ノードを出力に合わせてリサイズ",
"scene": "シーン",
"showGrid": "グリッドを表示",
"startRecording": "録画開始",
"stopRecording": "録画停止",
"switchCamera": "カメラを切り替える",
"switchingMaterialMode": "マテリアルモードの切り替え中...",
"upDirection": "上方向",

View File

@@ -467,9 +467,11 @@
"applyingTexture": "텍스처 적용 중...",
"backgroundColor": "배경색",
"camera": "카메라",
"clearRecording": "녹화 지우기",
"edgeThreshold": "엣지 임계값",
"export": "내보내기",
"exportModel": "모델 내보내기",
"exportRecording": "녹화 내보내기",
"exportingModel": "모델 내보내기 중...",
"fov": "FOV",
"light": "빛",
@@ -478,6 +480,7 @@
"materialMode": "재질 모드",
"materialModes": {
"depth": "깊이",
"lineart": "라인아트",
"normal": "노멀(normal)",
"original": "원본",
"wireframe": "와이어프레임"
@@ -485,8 +488,11 @@
"model": "모델",
"previewOutput": "출력 미리보기",
"removeBackgroundImage": "배경 이미지 제거",
"resizeNodeMatchOutput": "노드 크기를 출력에 맞추기",
"scene": "장면",
"showGrid": "그리드 표시",
"startRecording": "녹화 시작",
"stopRecording": "녹화 중지",
"switchCamera": "카메라 전환",
"switchingMaterialMode": "재질 모드 전환 중...",
"upDirection": "위 방향",

View File

@@ -467,9 +467,11 @@
"applyingTexture": "Применение текстуры...",
"backgroundColor": "Цвет фона",
"camera": "Камера",
"clearRecording": "Очистить запись",
"edgeThreshold": "Пороговое значение края",
"export": "Экспорт",
"exportModel": "Экспорт модели",
"exportRecording": "Экспортировать запись",
"exportingModel": "Экспорт модели...",
"fov": "Угол обзора",
"light": "Свет",
@@ -478,6 +480,7 @@
"materialMode": "Режим Материала",
"materialModes": {
"depth": "Глубина",
"lineart": "Лайнарт",
"normal": "Нормальный",
"original": "Оригинал",
"wireframe": "Каркас"
@@ -485,8 +488,11 @@
"model": "Модель",
"previewOutput": "Предварительный просмотр",
"removeBackgroundImage": "Удалить фоновое изображение",
"resizeNodeMatchOutput": "Изменить размер узла под вывод",
"scene": "Сцена",
"showGrid": "Показать сетку",
"startRecording": "Начать запись",
"stopRecording": "Остановить запись",
"switchCamera": "Переключить камеру",
"switchingMaterialMode": "Переключение режима материала...",
"upDirection": "Направление Вверх",

View File

@@ -467,9 +467,11 @@
"applyingTexture": "应用纹理中...",
"backgroundColor": "背景颜色",
"camera": "摄影机",
"clearRecording": "清除录制",
"edgeThreshold": "边缘阈值",
"export": "导出",
"exportModel": "导出模型",
"exportRecording": "导出录制",
"exportingModel": "正在导出模型...",
"fov": "视场",
"light": "灯光",
@@ -478,6 +480,7 @@
"materialMode": "材质模式",
"materialModes": {
"depth": "深度",
"lineart": "线稿",
"normal": "法线",
"original": "原始",
"wireframe": "线框"
@@ -485,8 +488,11 @@
"model": "模型",
"previewOutput": "预览输出",
"removeBackgroundImage": "移除背景图片",
"resizeNodeMatchOutput": "调整节点以匹配输出",
"scene": "场景",
"showGrid": "显示网格",
"startRecording": "开始录制",
"stopRecording": "停止录制",
"switchCamera": "切换摄影机类型",
"switchingMaterialMode": "切换材质模式中...",
"upDirection": "上方向",