render video on top of the slide, if there is any declared

Adds video playback as a texture in WebGL. if there is some video
declared in the presentationInfo JSON from core and is accesible
through the public url. For such videos we create a video element
which can be then added as a texture to the WebGL and rendered on
top of the slide.

Signed-off-by: Tomaž Vajngerl <tomaz.vajngerl@collabora.co.uk>
Change-Id: Ie0b003e98c566c560280e8bf2dfbcc408d7d93f7
This commit is contained in:
Tomaž Vajngerl 2024-07-12 23:42:23 +09:00 committed by Szymon Kłos
parent 90e9ec22ae
commit a1de2aa11c
2 changed files with 120 additions and 5 deletions

View file

@ -10,11 +10,18 @@
declare var SlideShow: any;
class VideoRenderInfo {
public texture: WebGLTexture;
public videoElement: HTMLVideoElement;
public vao: WebGLVertexArrayObject;
}
class SlideRenderer {
public _context: RenderContext = null;
private _program: WebGLProgram = null;
private _vao: WebGLVertexArrayObject = null;
public _slideTexture: WebGLTexture;
private _videos: VideoRenderInfo[];
private _canvas: HTMLCanvasElement;
public getVertexShader(): string {
@ -49,8 +56,10 @@ class SlideRenderer {
const gl = this._context.gl;
const positions = new Float32Array([
-1.0, -1.0, 0, 0, 1, 1.0, -1.0, 0, 1, 1, -1.0, 1.0, 0, 0, 0, 1.0, 1.0, 0,
1, 0,
-1.0, -1.0, 0.0, 0.0, 1.0,
1.0, -1.0, 0.0, 1.0, 1.0,
-1.0, 1.0, 0.0, 0.0, 0.0,
1.0, 1.0, 0.0, 1.0, 0.0,
]);
const buffer = gl.createBuffer();
@ -92,11 +101,97 @@ class SlideRenderer {
return this._context.loadTexture(<any>image);
}
public renderFrame(currentSlideTexture: WebGLTexture) {
private setupVideo(url: string) : HTMLVideoElement {
const video = document.createElement("video");
video.playsInline = true;
video.muted = true;
video.loop = true;
video.addEventListener("playing", () => {
// todo
}, true);
video.addEventListener("timeupdate", () => {
// todo
}, true);
video.src = url;
video.play();
return video;
}
private initTexture() {
const gl = this._context.gl;
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
const pixel = new Uint8Array([0, 0, 255, 255]); // opaque blue
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, pixel);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
return texture;
}
private updateTexture(texture: WebGLTexture, video: HTMLVideoElement) {
const gl = this._context.gl;
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, video);
}
public renderSlide(currentSlideTexture: WebGLTexture, slideInfo: SlideInfo, docWidth: number, docHeight: number) {
this._slideTexture = currentSlideTexture;
this._videos = [];
for (var videoInfo of slideInfo.videos) {
const video = new VideoRenderInfo;
video.videoElement = this.setupVideo(videoInfo.url);
video.texture = this.initTexture();
video.vao = this.setupVideoPosition(videoInfo.x, videoInfo.y, videoInfo.width, videoInfo.height, docWidth, docHeight);
this._videos.push(video);
}
requestAnimationFrame(this.render.bind(this));
}
setupVideoPosition(x: number, y: number, width: number, height: number, docWidth: number, docHeight: number): WebGLVertexArrayObject {
const gl = this._context.gl;
var xMin = (x / docWidth) * 2.0 - 1.0;
var xMax = ((x + width) / docWidth) * 2.0 - 1.0;
var yMin = (y / docHeight) * 2.0 - 1.0;
var yMax = ((y + height) / docHeight) * 2.0 - 1.0;
const positions = new Float32Array([
xMin, -yMin, 0.0, 0.0, 1.0,
xMax, -yMin, 0.0, 1.0, 1.0,
xMin, -yMax, 0.0, 0.0, 0.0,
xMax, -yMax, 0.0, 1.0, 0.0,
]);
const buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
const vao = gl.createVertexArray();
gl.bindVertexArray(vao);
const positionLocation = gl.getAttribLocation(this._program, 'a_position');
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 3, gl.FLOAT, false, 5 * 4, 0);
const texCoordLocation = gl.getAttribLocation(this._program, 'a_texCoord');
gl.enableVertexAttribArray(texCoordLocation);
gl.vertexAttribPointer(texCoordLocation, 2, gl.FLOAT, false, 5 * 4, 3 * 4);
return vao;
}
private render() {
const gl = this._context.gl;
gl.viewport(0, 0, this._canvas.width, this._canvas.height);
@ -111,5 +206,14 @@ class SlideRenderer {
gl.bindVertexArray(this._vao);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
for (var video of this._videos) {
gl.bindVertexArray(video.vao);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
this.updateTexture(video.texture, video.videoElement);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
}
requestAnimationFrame(this.render.bind(this));
}
}

View file

@ -14,12 +14,22 @@
declare var SlideShow: any;
interface VideoInfo {
id: number;
url: string;
x: number;
y: number;
width: number;
height: number;
}
interface SlideInfo {
hash: string;
index: number;
empty: boolean;
masterPage: string;
masterPageObjectsVisibility: boolean;
videos: Array<VideoInfo>;
transitionDuration: number;
nextSlideDuration: number;
transitionDirection: boolean;
@ -165,7 +175,7 @@ class SlideShowPresenter {
transitionParameters.next = nextTexture;
transitionParameters.slideInfo = slideInfo;
transitionParameters.callback = () => {
this._slideRenderer.renderFrame(nextTexture);
this._slideRenderer.renderSlide(nextTexture, slideInfo, this._presentationInfo.docWidth, this._presentationInfo.docHeight);
};
SlideShow.PerformTransition(transitionParameters);
@ -182,7 +192,8 @@ class SlideShowPresenter {
this._slideCompositor.fetchAndRun(this._currentSlide, () => {
const slideImage = this._slideCompositor.getSlide(this._currentSlide);
const currentTexture = this._slideRenderer.createTexture(slideImage);
this._slideRenderer.renderFrame(currentTexture);
const slideInfo = this.getSlideInfo(this._currentSlide);
this._slideRenderer.renderSlide(currentTexture, slideInfo, this._presentationInfo.docWidth, this._presentationInfo.docHeight);
});
}