mirror of
https://github.com/Comfy-Org/ComfyUI_frontend.git
synced 2026-02-27 18:24:11 +00:00
fix
This commit is contained in:
@@ -3409,262 +3409,6 @@ LGraphTextureKuwaharaFilter.pixel_shader = "\n\
|
||||
}
|
||||
|
||||
LGraphLensFX.pixel_shader = "precision highp float;\n\
|
||||
=======
|
||||
LiteGraph.registerNodeType(
|
||||
"texture/kuwahara",
|
||||
LGraphTextureKuwaharaFilter
|
||||
);
|
||||
|
||||
// Texture Webcam *****************************************
|
||||
function LGraphTextureWebcam() {
|
||||
this.addOutput("Webcam", "Texture");
|
||||
this.properties = { texture_name: "", facingMode: "user" };
|
||||
this.boxcolor = "black";
|
||||
this.version = 0;
|
||||
}
|
||||
|
||||
LGraphTextureWebcam.title = "Webcam";
|
||||
LGraphTextureWebcam.desc = "Webcam texture";
|
||||
|
||||
LGraphTextureWebcam.is_webcam_open = false;
|
||||
|
||||
LGraphTextureWebcam.prototype.openStream = function() {
|
||||
if (!navigator.getUserMedia) {
|
||||
//console.log('getUserMedia() is not supported in your browser, use chrome and enable WebRTC from about://flags');
|
||||
return;
|
||||
}
|
||||
|
||||
this._waiting_confirmation = true;
|
||||
|
||||
// Not showing vendor prefixes.
|
||||
var constraints = {
|
||||
audio: false,
|
||||
video: { facingMode: this.properties.facingMode }
|
||||
};
|
||||
navigator.mediaDevices
|
||||
.getUserMedia(constraints)
|
||||
.then(this.streamReady.bind(this))
|
||||
.catch(onFailSoHard);
|
||||
|
||||
var that = this;
|
||||
function onFailSoHard(e) {
|
||||
LGraphTextureWebcam.is_webcam_open = false;
|
||||
console.log("Webcam rejected", e);
|
||||
that._webcam_stream = false;
|
||||
that.boxcolor = "red";
|
||||
that.trigger("stream_error");
|
||||
}
|
||||
};
|
||||
|
||||
LGraphTextureWebcam.prototype.closeStream = function() {
|
||||
if (this._webcam_stream) {
|
||||
var tracks = this._webcam_stream.getTracks();
|
||||
if (tracks.length) {
|
||||
for (var i = 0; i < tracks.length; ++i) tracks[i].stop();
|
||||
}
|
||||
LGraphTextureWebcam.is_webcam_open = false;
|
||||
this._webcam_stream = null;
|
||||
this._video = null;
|
||||
this.boxcolor = "black";
|
||||
this.trigger("stream_closed");
|
||||
}
|
||||
};
|
||||
|
||||
LGraphTextureWebcam.prototype.streamReady = function(localMediaStream) {
|
||||
this._webcam_stream = localMediaStream;
|
||||
//this._waiting_confirmation = false;
|
||||
this.boxcolor = "green";
|
||||
var video = this._video;
|
||||
if (!video) {
|
||||
video = document.createElement("video");
|
||||
video.autoplay = true;
|
||||
video.srcObject = localMediaStream;
|
||||
this._video = video;
|
||||
//document.body.appendChild( video ); //debug
|
||||
//when video info is loaded (size and so)
|
||||
video.onloadedmetadata = function(e) {
|
||||
// Ready to go. Do some stuff.
|
||||
LGraphTextureWebcam.is_webcam_open = true;
|
||||
console.log(e);
|
||||
};
|
||||
}
|
||||
this.trigger("stream_ready", video);
|
||||
};
|
||||
|
||||
LGraphTextureWebcam.prototype.onPropertyChanged = function(
|
||||
name,
|
||||
value
|
||||
) {
|
||||
if (name == "facingMode") {
|
||||
this.properties.facingMode = value;
|
||||
this.closeStream();
|
||||
this.openStream();
|
||||
}
|
||||
};
|
||||
|
||||
LGraphTextureWebcam.prototype.onRemoved = function() {
|
||||
if (!this._webcam_stream) return;
|
||||
|
||||
var tracks = this._webcam_stream.getTracks();
|
||||
if (tracks.length) {
|
||||
for (var i = 0; i < tracks.length; ++i) tracks[i].stop();
|
||||
}
|
||||
|
||||
this._webcam_stream = null;
|
||||
this._video = null;
|
||||
};
|
||||
|
||||
LGraphTextureWebcam.prototype.onDrawBackground = function(ctx) {
|
||||
if (this.flags.collapsed || this.size[1] <= 20) return;
|
||||
|
||||
if (!this._video) return;
|
||||
|
||||
//render to graph canvas
|
||||
ctx.save();
|
||||
if (!ctx.webgl)
|
||||
//reverse image
|
||||
ctx.drawImage(this._video, 0, 0, this.size[0], this.size[1]);
|
||||
else {
|
||||
if (this._video_texture)
|
||||
ctx.drawImage(
|
||||
this._video_texture,
|
||||
0,
|
||||
0,
|
||||
this.size[0],
|
||||
this.size[1]
|
||||
);
|
||||
}
|
||||
ctx.restore();
|
||||
};
|
||||
|
||||
LGraphTextureWebcam.prototype.onExecute = function() {
|
||||
if (this._webcam_stream == null && !this._waiting_confirmation)
|
||||
this.openStream();
|
||||
|
||||
if (!this._video || !this._video.videoWidth) return;
|
||||
|
||||
var width = this._video.videoWidth;
|
||||
var height = this._video.videoHeight;
|
||||
|
||||
var temp = this._video_texture;
|
||||
if (!temp || temp.width != width || temp.height != height)
|
||||
this._video_texture = new GL.Texture(width, height, {
|
||||
format: gl.RGB,
|
||||
filter: gl.LINEAR
|
||||
});
|
||||
|
||||
this._video_texture.uploadImage(this._video);
|
||||
this._video_texture.version = ++this.version;
|
||||
|
||||
if (this.properties.texture_name) {
|
||||
var container = LGraphTexture.getTexturesContainer();
|
||||
container[this.properties.texture_name] = this._video_texture;
|
||||
}
|
||||
|
||||
this.setOutputData(0, this._video_texture);
|
||||
for (var i = 1; i < this.outputs.length; ++i) {
|
||||
if (!this.outputs[i]) continue;
|
||||
switch (this.outputs[i].name) {
|
||||
case "width":
|
||||
this.setOutputData(i, this._video.videoWidth);
|
||||
break;
|
||||
case "height":
|
||||
this.setOutputData(i, this._video.videoHeight);
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
LGraphTextureWebcam.prototype.onGetOutputs = function() {
|
||||
return [
|
||||
["width", "number"],
|
||||
["height", "number"],
|
||||
["stream_ready", LiteGraph.EVENT],
|
||||
["stream_closed", LiteGraph.EVENT],
|
||||
["stream_error", LiteGraph.EVENT]
|
||||
];
|
||||
};
|
||||
|
||||
LiteGraph.registerNodeType("texture/webcam", LGraphTextureWebcam);
|
||||
|
||||
//from https://github.com/spite/Wagner
|
||||
function LGraphLensFX() {
|
||||
this.addInput("in", "Texture");
|
||||
this.addInput("f", "number");
|
||||
this.addOutput("out", "Texture");
|
||||
this.properties = {
|
||||
enabled: true,
|
||||
factor: 1,
|
||||
precision: LGraphTexture.LOW
|
||||
};
|
||||
|
||||
this._uniforms = { u_texture: 0, u_factor: 1 };
|
||||
}
|
||||
|
||||
LGraphLensFX.title = "Lens FX";
|
||||
LGraphLensFX.desc = "distortion and chromatic aberration";
|
||||
|
||||
LGraphLensFX.widgets_info = {
|
||||
precision: { widget: "combo", values: LGraphTexture.MODE_VALUES }
|
||||
};
|
||||
|
||||
LGraphLensFX.prototype.onGetInputs = function() {
|
||||
return [["enabled", "boolean"]];
|
||||
};
|
||||
|
||||
LGraphLensFX.prototype.onExecute = function() {
|
||||
var tex = this.getInputData(0);
|
||||
if (!tex) return;
|
||||
|
||||
if (!this.isOutputConnected(0)) return; //saves work
|
||||
|
||||
if (
|
||||
this.properties.precision === LGraphTexture.PASS_THROUGH ||
|
||||
this.getInputOrProperty("enabled") === false
|
||||
) {
|
||||
this.setOutputData(0, tex);
|
||||
return;
|
||||
}
|
||||
|
||||
var temp = this._temp_texture;
|
||||
if (
|
||||
!temp ||
|
||||
temp.width != tex.width ||
|
||||
temp.height != tex.height ||
|
||||
temp.type != tex.type
|
||||
)
|
||||
temp = this._temp_texture = new GL.Texture(
|
||||
tex.width,
|
||||
tex.height,
|
||||
{ type: tex.type, format: gl.RGBA, filter: gl.LINEAR }
|
||||
);
|
||||
|
||||
var shader = LGraphLensFX._shader;
|
||||
if (!shader)
|
||||
shader = LGraphLensFX._shader = new GL.Shader(
|
||||
GL.Shader.SCREEN_VERTEX_SHADER,
|
||||
LGraphLensFX.pixel_shader
|
||||
);
|
||||
|
||||
var factor = this.getInputData(1);
|
||||
if (factor == null) factor = this.properties.factor;
|
||||
|
||||
var uniforms = this._uniforms;
|
||||
uniforms.u_factor = factor;
|
||||
|
||||
//apply shader
|
||||
gl.disable(gl.DEPTH_TEST);
|
||||
temp.drawTo(function() {
|
||||
tex.bind(0);
|
||||
shader.uniforms(uniforms).draw(GL.Mesh.getScreenQuad());
|
||||
});
|
||||
|
||||
this.setOutputData(0, temp);
|
||||
};
|
||||
|
||||
LGraphLensFX.pixel_shader =
|
||||
"precision highp float;\n\
|
||||
>>>>>>> c6b16f04e0c2859d0b9dfe07104a594713305912
|
||||
varying vec2 v_coord;\n\
|
||||
uniform sampler2D u_texture;\n\
|
||||
uniform float u_factor;\n\
|
||||
|
||||
Reference in New Issue
Block a user