forked from vgaNAR6ta/drags-and-nerds
416 lines
14 KiB
JavaScript
416 lines
14 KiB
JavaScript
import resizeToFit from 'intrinsic-scale'
|
|
|
|
import noiseUrl from "../noise-power.png"
|
|
|
|
import baseVertSource from "./base.vert?raw"
|
|
import baseFragSource from "./base.frag?raw"
|
|
|
|
import glitchVertSource from "./glitch.vert?raw"
|
|
import glitchFragSource from "./glitch.frag?raw"
|
|
|
|
export class Glitcher {
|
|
|
|
/** @type {HTMLCanvasElement} */
|
|
canvas
|
|
|
|
glitches = []
|
|
|
|
/**
|
|
* @param {HTMLCanvasElement} canvas
|
|
*/
|
|
constructor(canvas) {
|
|
this.canvas = canvas
|
|
this.videoCanvas = new OffscreenCanvas(10, 10);
|
|
this.video = document.createElement("video")
|
|
this.init()
|
|
}
|
|
|
|
init(){
|
|
this.video.muted = true
|
|
this.video.loop = true
|
|
this.video.autoplay = true
|
|
this.video.src = "/background.webm"
|
|
this.video.addEventListener("canplay", e => this.video.play())
|
|
|
|
this.ctx = this.canvas.getContext("webgl")
|
|
if(!this.ctx){
|
|
throw new Error("WebGL isn't supported")
|
|
}
|
|
|
|
let baseProgram = createProgram(this.ctx, baseVertSource, baseFragSource);
|
|
|
|
let panelPositionBuffer = this.ctx.createBuffer()
|
|
this.ctx.bindBuffer(this.ctx.ARRAY_BUFFER, panelPositionBuffer)
|
|
const positions = [
|
|
-1.0, 1.0,
|
|
1.0, 1.0,
|
|
-1.0, -1.0,
|
|
1.0, -1.0,
|
|
]
|
|
this.ctx.bufferData(this.ctx.ARRAY_BUFFER, new Float32Array(positions), this.ctx.STATIC_DRAW)
|
|
|
|
let panelUVBuffer = this.ctx.createBuffer()
|
|
this.ctx.bindBuffer(this.ctx.ARRAY_BUFFER, panelUVBuffer)
|
|
const uv = [
|
|
0.0, 0.0,
|
|
1.0, 0.0,
|
|
0.0, 1.0,
|
|
1.0, 1.0,
|
|
]
|
|
this.ctx.bufferData(this.ctx.ARRAY_BUFFER, new Float32Array(uv), this.ctx.STATIC_DRAW)
|
|
|
|
let imageTexture = this.ctx.createTexture()
|
|
|
|
this.base = {
|
|
program: baseProgram,
|
|
aVertexPosition: this.ctx.getAttribLocation(baseProgram, "aVertexPosition"),
|
|
aTextureCoord: this.ctx.getAttribLocation(baseProgram, "aTextureCoord"),
|
|
uImageSampler: this.ctx.getUniformLocation(baseProgram, "uImageSampler"),
|
|
uWindowSize: this.ctx.getUniformLocation(baseProgram, "uWindowSize"),
|
|
imageTexture: imageTexture,
|
|
panelUVBuffer: panelUVBuffer,
|
|
panelPositionsBuffer: panelPositionBuffer,
|
|
}
|
|
this.clearImage();
|
|
|
|
let glitchPositionBuffer = this.ctx.createBuffer()
|
|
this.ctx.bindBuffer(this.ctx.ARRAY_BUFFER, glitchPositionBuffer)
|
|
const glitchPosition = [
|
|
-1.0, 1.0,
|
|
1.0, 1.0,
|
|
-1.0, -1.0,
|
|
1.0, -1.0,
|
|
]
|
|
this.ctx.bufferData(this.ctx.ARRAY_BUFFER, new Float32Array(glitchPosition), this.ctx.STATIC_DRAW)
|
|
|
|
let glitchProgram = createProgram(this.ctx, glitchVertSource, glitchFragSource)
|
|
let videoFrameTexture = this.ctx.createTexture()
|
|
|
|
let noiseTexture = this.ctx.createTexture()
|
|
this.ctx.bindTexture(this.ctx.TEXTURE_2D, noiseTexture)
|
|
this.ctx.texImage2D(
|
|
this.ctx.TEXTURE_2D,
|
|
0,
|
|
this.ctx.RGBA,
|
|
1, 1,
|
|
0,
|
|
this.ctx.RGBA,
|
|
this.ctx.UNSIGNED_BYTE,
|
|
new Uint8Array([0, 0, 0, 0])
|
|
)
|
|
|
|
let noiseImage = new Image()
|
|
noiseImage.addEventListener("load", () => {
|
|
this.ctx.bindTexture(this.ctx.TEXTURE_2D, this.glitch.noiseTexture)
|
|
this.ctx.texImage2D(
|
|
this.ctx.TEXTURE_2D,
|
|
0,
|
|
this.ctx.RGBA,
|
|
this.ctx.RGBA,
|
|
this.ctx.UNSIGNED_BYTE,
|
|
noiseImage
|
|
)
|
|
|
|
this.ctx.texParameteri(this.ctx.TEXTURE_2D, this.ctx.TEXTURE_WRAP_S, this.ctx.REPEAT);
|
|
this.ctx.texParameteri(this.ctx.TEXTURE_2D, this.ctx.TEXTURE_WRAP_T, this.ctx.REPEAT);
|
|
this.ctx.texParameteri(this.ctx.TEXTURE_2D, this.ctx.TEXTURE_MIN_FILTER, this.ctx.NEAREST);
|
|
this.ctx.texParameteri(this.ctx.TEXTURE_2D, this.ctx.TEXTURE_MAG_FILTER, this.ctx.NEAREST);
|
|
})
|
|
noiseImage.src = noiseUrl
|
|
|
|
this.glitch = {
|
|
program: glitchProgram,
|
|
aVertexPosition: this.ctx.getAttribLocation(glitchProgram, "aVertexPosition"),
|
|
uGlitchPosition: this.ctx.getUniformLocation(glitchProgram, "uGlitchPosition"),
|
|
uGlitchWidth: this.ctx.getUniformLocation(glitchProgram, "uGlitchWidth"),
|
|
uImageRatio: this.ctx.getUniformLocation(glitchProgram, "uImageRatio"),
|
|
uWindowSize: this.ctx.getUniformLocation(glitchProgram, "uWindowSize"),
|
|
uVideoSampler: this.ctx.getUniformLocation(glitchProgram, "uVideoSampler"),
|
|
uNoiseSampler: this.ctx.getUniformLocation(glitchProgram, "uNoiseSampler"),
|
|
uRandom: this.ctx.getUniformLocation(glitchProgram, "uRandom"),
|
|
glitchPositionBuffer: glitchPositionBuffer,
|
|
videoFrameTexture: videoFrameTexture,
|
|
noiseTexture: noiseTexture
|
|
}
|
|
|
|
console.log(this.glitch)
|
|
}
|
|
|
|
clearImage(){
|
|
this.ctx.bindTexture(this.ctx.TEXTURE_2D, this.base.imageTexture)
|
|
this.ctx.texImage2D(
|
|
this.ctx.TEXTURE_2D,
|
|
0,
|
|
this.ctx.RGBA,
|
|
1, 1,
|
|
0,
|
|
this.ctx.RGBA,
|
|
this.ctx.UNSIGNED_BYTE,
|
|
new Uint8Array([0, 0, 0, 0])
|
|
)
|
|
}
|
|
|
|
clearGlitch(){
|
|
this.glitches = []
|
|
}
|
|
|
|
addGlitch(x, y, width=1){
|
|
let computedStyle = window.getComputedStyle(this.canvas)
|
|
let clientWidth = parseFloat(computedStyle.width)
|
|
let clientHeight = parseFloat(computedStyle.height)
|
|
|
|
this.glitches.push({
|
|
x: ((x*2)/clientWidth)-1,
|
|
y: (((y*2)/clientHeight)-1)*-1,
|
|
width
|
|
})
|
|
}
|
|
|
|
/**
|
|
* @param {File} imageFile
|
|
*/
|
|
async setImage(imageFile){
|
|
if(imageFile == this.currentImageFile){
|
|
return
|
|
}
|
|
|
|
if(this.currentImage){
|
|
URL.revokeObjectURL(this.currentImage.src)
|
|
}
|
|
|
|
let image = new Image()
|
|
const loadProm = new Promise((res, rej) => {
|
|
function ok(){
|
|
image.removeEventListener("error", nok)
|
|
res()
|
|
}
|
|
|
|
function nok(){
|
|
image.removeEventListener("load", ok)
|
|
rej()
|
|
}
|
|
|
|
image.addEventListener("load", ok)
|
|
image.addEventListener("error", nok)
|
|
})
|
|
image.src = URL.createObjectURL(imageFile)
|
|
|
|
try {
|
|
await loadProm
|
|
} catch(e) {
|
|
URL.revokeObjectURL(image.src)
|
|
throw e
|
|
}
|
|
this.resize(image.width, image.height)
|
|
|
|
this.currentImageFile = imageFile
|
|
this.currentImage = image
|
|
|
|
this.ctx.bindTexture(this.ctx.TEXTURE_2D, this.base.imageTexture)
|
|
this.ctx.texImage2D(
|
|
this.ctx.TEXTURE_2D,
|
|
0,
|
|
this.ctx.RGBA,
|
|
this.ctx.RGBA,
|
|
this.ctx.UNSIGNED_BYTE,
|
|
this.currentImage
|
|
)
|
|
|
|
this.ctx.texParameteri(this.ctx.TEXTURE_2D, this.ctx.TEXTURE_WRAP_S, this.ctx.CLAMP_TO_EDGE);
|
|
this.ctx.texParameteri(this.ctx.TEXTURE_2D, this.ctx.TEXTURE_WRAP_T, this.ctx.CLAMP_TO_EDGE);
|
|
this.ctx.texParameteri(this.ctx.TEXTURE_2D, this.ctx.TEXTURE_MIN_FILTER, this.ctx.LINEAR);
|
|
}
|
|
|
|
async toBlob(type, quality) {
|
|
this.resize(this.currentImage.width, this.currentImage.height, true)
|
|
this.render()
|
|
let blob = await new Promise((res, rej) => this.canvas.toBlob((imageBlob) => {
|
|
if(imageBlob){
|
|
res(imageBlob)
|
|
} else {
|
|
rej()
|
|
}
|
|
}, type, quality))
|
|
this.resize(this.currentImage.width, this.currentImage.height, false)
|
|
return blob
|
|
}
|
|
|
|
resize(width, height, force=false){
|
|
if(!force){
|
|
|
|
let computedStyle = window.getComputedStyle(this.canvas)
|
|
let maxWidth = parseFloat(computedStyle["max-width"])
|
|
let maxHeight = parseFloat(computedStyle["max-height"])
|
|
|
|
if(window.devicePixelRatio){
|
|
maxWidth *= window.devicePixelRatio
|
|
maxHeight *= window.devicePixelRatio
|
|
}
|
|
|
|
if(Number.isNaN(maxWidth)){
|
|
maxWidth = width
|
|
}
|
|
|
|
if(Number.isNaN(maxHeight)){
|
|
maxHeight = height
|
|
}
|
|
|
|
let resize = resizeToFit("contain", {width, height}, {width: maxWidth, height: maxHeight})
|
|
|
|
this.canvas.width = resize.width
|
|
this.canvas.height = resize.height
|
|
|
|
} else {
|
|
this.canvas.width = width
|
|
this.canvas.height = height
|
|
}
|
|
|
|
this.videoCanvas.width = this.canvas.width
|
|
this.videoCanvas.height = this.canvas.height
|
|
this.ctx.viewport(0, 0, this.canvas.width, this.canvas.height);
|
|
}
|
|
|
|
updateVideoTexture(){
|
|
let vctx = this.videoCanvas.getContext("2d")
|
|
vctx.clearRect(0, 0, this.videoCanvas.width, this.videoCanvas.height)
|
|
vctx.drawImage(this.video, 0, 0, this.videoCanvas.width, this.videoCanvas.height);
|
|
|
|
this.ctx.bindTexture(this.ctx.TEXTURE_2D, this.glitch.videoFrameTexture)
|
|
this.ctx.texImage2D(
|
|
this.ctx.TEXTURE_2D,
|
|
0,
|
|
this.ctx.RGBA,
|
|
this.ctx.RGBA,
|
|
this.ctx.UNSIGNED_BYTE,
|
|
this.videoCanvas
|
|
)
|
|
|
|
this.ctx.texParameteri(this.ctx.TEXTURE_2D, this.ctx.TEXTURE_WRAP_S, this.ctx.CLAMP_TO_EDGE);
|
|
this.ctx.texParameteri(this.ctx.TEXTURE_2D, this.ctx.TEXTURE_WRAP_T, this.ctx.CLAMP_TO_EDGE);
|
|
this.ctx.texParameteri(this.ctx.TEXTURE_2D, this.ctx.TEXTURE_MIN_FILTER, this.ctx.LINEAR);
|
|
this.ctx.texParameteri(this.ctx.TEXTURE_2D, this.ctx.TEXTURE_MAG_FILTER, this.ctx.LINEAR);
|
|
}
|
|
|
|
render(){
|
|
if(!this.ctx){
|
|
throw new Error("Glitcher not initialized, please run init()")
|
|
}
|
|
|
|
this.updateVideoTexture()
|
|
|
|
this.ctx.clearColor(0.0, 0.0, 0.0, 0.0)
|
|
this.ctx.clearDepth(1.0)
|
|
this.ctx.enable(this.ctx.DEPTH_TEST)
|
|
this.ctx.enable(this.ctx.LEQUAL)
|
|
|
|
this.ctx.clear(this.ctx.COLOR_BUFFER_BIT | this.ctx.DEPTH_BUFFER_BIT)
|
|
|
|
{ // Base rendering
|
|
this.ctx.useProgram(this.base.program)
|
|
this.ctx.bindBuffer(this.ctx.ARRAY_BUFFER, this.base.panelPositionsBuffer)
|
|
this.ctx.vertexAttribPointer(
|
|
this.base.aVertexPosition,
|
|
2, // N components per iteration
|
|
this.ctx.FLOAT,
|
|
false, //Normalize,
|
|
0, // (stride) how many bytes to get from one set of values to the next
|
|
0, // Start offset
|
|
)
|
|
this.ctx.enableVertexAttribArray(this.base.aVertexPosition)
|
|
|
|
this.ctx.bindBuffer(this.ctx.ARRAY_BUFFER, this.base.panelUVBuffer)
|
|
this.ctx.vertexAttribPointer(
|
|
this.base.aTextureCoord,
|
|
2,
|
|
this.ctx.FLOAT,
|
|
false,
|
|
0,
|
|
0,
|
|
)
|
|
this.ctx.enableVertexAttribArray(this.base.aTextureCoord)
|
|
|
|
this.ctx.activeTexture(this.ctx.TEXTURE0)
|
|
this.ctx.bindTexture(this.ctx.TEXTURE_2D, this.base.imageTexture)
|
|
|
|
this.ctx.uniform2f(this.base.uWindowSize, this.canvas.width, this.canvas.height)
|
|
this.ctx.uniform1i(this.base.uImageSampler, 0)
|
|
|
|
this.ctx.drawArrays(
|
|
this.ctx.TRIANGLE_STRIP,
|
|
0, // Vertex offset
|
|
4, // Total vertex
|
|
);
|
|
}
|
|
|
|
{ // Glitch rendering
|
|
this.ctx.useProgram(this.glitch.program)
|
|
this.ctx.bindBuffer(this.ctx.ARRAY_BUFFER, this.glitch.glitchPositionBuffer)
|
|
this.ctx.vertexAttribPointer(
|
|
this.glitch.glitchPositionBuffer,
|
|
2, // N components per iteration
|
|
this.ctx.FLOAT,
|
|
false, //Normalize,
|
|
0, // (stride) how many bytes to get from one set of values to the next
|
|
0, // Start offset
|
|
)
|
|
this.ctx.enableVertexAttribArray(this.glitch.aVertexPosition)
|
|
|
|
this.ctx.activeTexture(this.ctx.TEXTURE0)
|
|
this.ctx.bindTexture(this.ctx.TEXTURE_2D, this.glitch.videoFrameTexture)
|
|
this.ctx.uniform1i(this.glitch.uVideoSampler, 0)
|
|
|
|
this.ctx.activeTexture(this.ctx.TEXTURE1)
|
|
this.ctx.bindTexture(this.ctx.TEXTURE_2D, this.glitch.noiseTexture)
|
|
this.ctx.uniform1i(this.glitch.uNoiseSampler, 1)
|
|
|
|
this.ctx.uniform2f(this.glitch.uWindowSize, this.canvas.width, this.canvas.height)
|
|
if(this.canvas.width > this.canvas.height){
|
|
this.ctx.uniform2f(this.glitch.uImageRatio, 1.0, this.canvas.width/this.canvas.height)
|
|
} else {
|
|
this.ctx.uniform2f(this.glitch.uImageRatio, this.canvas.height/this.canvas.width, 1.0)
|
|
}
|
|
|
|
for(let glitch of this.glitches) {
|
|
this.ctx.uniform4f(this.glitch.uRandom,
|
|
Math.random(), Math.random(), Math.random(), Math.random()
|
|
)
|
|
this.ctx.uniform2f(this.glitch.uGlitchPosition, glitch.x, glitch.y)
|
|
this.ctx.uniform1f(this.glitch.uGlitchWidth, glitch.width)
|
|
this.ctx.drawArrays(
|
|
this.ctx.TRIANGLE_STRIP,
|
|
0, // Vertex offset
|
|
4, // Total vertex
|
|
);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
function createProgram(gl, vertSource, fragSource){
|
|
let vertShader = gl.createShader(gl.VERTEX_SHADER)
|
|
gl.shaderSource(vertShader, vertSource)
|
|
gl.compileShader(vertShader)
|
|
|
|
if (!gl.getShaderParameter(vertShader, gl.COMPILE_STATUS)) {
|
|
throw new Error(`failed to compile vertex: ${gl.getShaderInfoLog(vertShader)}`)
|
|
}
|
|
|
|
let fragShader = gl.createShader(gl.FRAGMENT_SHADER)
|
|
gl.shaderSource(fragShader, fragSource)
|
|
gl.compileShader(fragShader)
|
|
|
|
if (!gl.getShaderParameter(fragShader, gl.COMPILE_STATUS)) {
|
|
throw new Error(`failed to compile fragment: ${gl.getShaderInfoLog(fragShader)}`)
|
|
}
|
|
|
|
let program = gl.createProgram()
|
|
gl.attachShader(program, vertShader)
|
|
gl.attachShader(program, fragShader)
|
|
gl.linkProgram(program)
|
|
|
|
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
|
|
throw new Error(`failed to link shader: ${gl.getProgramInfoLog(program)}`)
|
|
}
|
|
|
|
return program
|
|
} |