ArcGIS JSAPI 高级教程 - 通过RenderNode实现视频融合效果(不借助三方工具)
- 核心代码
- 完整代码
- 在线示例
地球中展示视频可以通过替换纹理的方式实现,但是随着摄像头和无人机的流行,需要视频和场景深度融合,简单的实现方式则不能满足需求。
三维视频融合技术将视频资源与三维模型场景相结合,将视频内容精准地映射在地图上,以提供更真实、全面的视觉体验。
对于 WebGL 引擎
来说,实现视频融合的方式有几种,而最常用的则是通过阴影
的方式实现。
本文主要介绍一下 WebGL 视频融合原理
以及如何实现,当然包括最重要的在线示例。
本文包括视频融合核心代码、完整代码以及在线示例。
核心代码
视频融合原理:
首先通过官方类构建视频对象(VideoElement
),
然后通过视点、倾斜角度等构建 相机对象(Camera
),再通过观察点、远近距离、垂直视角等构建 可视域对象(Viewshed
),
可视域构建成功之后,会自动构建 阴影纹理、阴影矩阵 等 ,
构建 GLSL 统一变量以及 GLSL 着色器代码,最后通过后处理(RenderNode
),
在着色器代码中,判断片元是否可见,可见片元修改为视频纹理,不可见片元修改为暗色,从而实现视频融合。
详细介绍见代码注释。
// 通过官方方法加载视频对象
async function loadVideoOfficial(url = "./WeChat_20240708093501.mp4") {
const videoParamTemp = new VideoElement({
video: url,
});
// 视频载入之后才能使用
await videoParamTemp.load();
return videoParamTemp.content;
}
// 创建视频对象
let video = await loadVideoOfficial();
const width = video instanceof HTMLImageElement ? video.naturalWidth : video.width,
height = video instanceof HTMLImageElement ? video.naturalHeight : video.height;
// 创建视频纹理
let texture = new Texture.Texture(video, {
width, height,
mipmap: !0,
reloadable: !0
});
// 载入纹理
texture.load(view._stage.renderView.renderingContext);
// ===================================================================
// 着色器代码,主函数
void main() {
// 从纹理中获取颜色
vec4 color = texture(colorTex, uv);
// 从纹理中获取深度
float depth = depthFromTexture(depthTex, uv);
// 在相机平面之外
if (depth >= 1.0 || depth <= 0.0) {
return;
}
// 将深度线性化
float linearDepth = linearizeDepth(depth);
// 重建相对于视图位置的局部位置
vec4 localPosition = reconstructLocalPosition(gl_FragCoord.xy, linearDepth);
ViewshedPoint point;
// 获取视图点
bool foundFace = getViewshedPointVideo(localPosition, point);
// 记录原纹理
fragColor = color;
// 在每个视图之外
if (!foundFace || !point.isWithin) {
return;
}
// 从阴影图中获取视图深度
float viewshedDepth = getDepthFromShadowMap(point.uv, point.face);
// 计算距离
float distance = point.orthographicDepth;
// 判断是否可见
bool visible = distance < viewshedDepth;
// 调整点的 UV
// 经验值
point.uv.x -= 0.0825;
point.uv.x *= 1.2;
// 混合视频颜色和原始颜色
vec4 videoColor = mix(texture(videoTex, point.uv), color, 0.1);
// 根据可见性使用视频纹理
fragColor = visible? videoColor : color;
// 计算线性深度和局部位置的法线余弦角
float cosAngle = normalCosAngle(linearDepth, localPosition.xyz);
// 所有背离的以及接近平行的都被认为是被遮挡的。
// 阈值对应大约 0.6 度,根据经验调整。
if (cosAngle > -0.01) {
fragColor = color;
}
}
完整代码
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8"/>
<meta name="viewport" content="initial-scale=1, maximum-scale=1,user-scalable=no"/>
<title>Custom RenderNode - 视频融合 | Sample | ArcGIS Maps SDK for JavaScript 4.29</title>
<link rel="stylesheet" href="https://js.arcgis.com/4.30/esri/themes/light/main.css"/>
<script src="https://js.arcgis.com/4.30/"></script>
<script src="./renderCommon.js"></script>
<script type="module" src="https://js.arcgis.com/calcite-components/2.5.1/calcite.esm.js"></script>
<link rel="stylesheet" type="text/css" href="https://js.arcgis.com/calcite-components/2.5.1/calcite.css"/>
<style>
html,
body,
#viewDiv {
padding: 0;
margin: 0;
height: 100%;
width: 100%;
}
</style>
</head>
<body>
<calcite-block open heading="Toggle Render Node" id="renderNodeUI">
<calcite-label layout="inline">
Color
<calcite-switch id="renderNodeToggle" checked></calcite-switch>
Grayscale
</calcite-label>
<calcite-label layout="inline">
播放
<calcite-switch id="renderNodeToggleViewshed" checked></calcite-switch>
暂停
</calcite-label>
</calcite-block>
<script>
require(["esri/Map", "esri/views/SceneView", "esri/views/3d/webgl/RenderNode",
"esri/Graphic", "esri/views/3d/webgl",
"esri/geometry/SpatialReference",
"esri/widgets/Home",
"esri/core/libs/gl-matrix-2/math/mat4",
"esri/chunks/vec42",
"esri/core/libs/gl-matrix-2/math/vec2",
"esri/core/libs/gl-matrix-2/factories/vec2f64",
"esri/core/libs/gl-matrix-2/factories/vec4f64",
"esri/core/libs/gl-matrix-2/factories/mat4f64",
"esri/layers/IntegratedMeshLayer",
"esri/analysis/Viewshed",
"esri/views/3d/webgl-engine/lib/Viewshed",
"esri/views/3d/analysis/Viewshed/ViewshedComputedData",
"esri/analysis/ViewshedAnalysis",
"esri/Camera",
"esri/geometry/Point",
"esri/layers/WebTileLayer",
'esri/layers/support/TileInfo',
"esri/views/3d/webgl-engine/core/shaderModules/ShaderBuilder.js",
"esri/views/3d/webgl-engine/core/shaderLibrary/NormalFromDepth.glsl.js",
"esri/views/3d/webgl-engine/core/shaderLibrary/util/LocalFromScreenSpace.glsl.js",
"esri/views/3d/webgl-engine/core/shaderLibrary/util/RgbaFloat16Encoding.glsl.js",
"esri/views/3d/webgl-engine/core/shaderLibrary/util/TextureAtlasLookup.glsl.js",
"esri/views/3d/webgl-engine/shaders/ViewshedTechnique.js",
"esri/core/reactiveUtils.js",
"esri/core/Collection",
"esri/layers/support/SceneModifications",
"esri/layers/support/SceneModification",
"esri/geometry/Polygon",
"esri/views/3d/webgl-engine/lib/Texture",
"esri/views/webgl/enums",
"esri/layers/support/VideoElement",
], function (
Map,
SceneView,
RenderNode,
Graphic,
webgl,
SpatialReference,
Home,
mat4,
vec42,
vec2,
vec2f64,
vec4f64,
mat4f64,
IntegratedMeshLayer,
Viewshed,
InnerViewshed,
ViewshedComputedData,
ViewshedAnalysis,
Camera,
Point,
WebTileLayer,
TileInfo,
ShaderBuilder,
NormalFromDepth,
LocalFromScreenSpace,
RgbaFloat16Encoding,
TextureAtlasLookup,
ViewshedTechnique,
reactiveUtils,
Collection,
SceneModifications,
SceneModification,
Polygon,
Texture,
enums,
VideoElement,
) {
const view = new SceneView({
container: "viewDiv",
camera: {
position: {
spatialReference: SpatialReference.WebMercator,
x: -9753837.742627423,
y: 5140806.202422867,
z: 995.4546383377165
},
heading: 1.2311944909542853,
tilt: 70.07900968078631
},
map: new Map({
// basemap: "hybrid",
// ground: "world-elevation"
}),
environment: {}
});
const layer = new IntegratedMeshLayer({
url: "https://gs3d.geosceneonline.cn/server/rest/services/Hosted/%E9%AB%98%E6%96%B0%E4%B9%9D%E5%8F%B7/SceneServer",
// Frankfurt integrated mesh data provided by Aerowest GmbH
copyright: "Aerowest GmbH",
title: "Integrated Mesh Frankfurt"
});
view.map.add(layer);
let imLayer = layer;
updateIntegratedMesh()
// 更新倾斜摄影显示范围
function updateIntegratedMesh() {
const geom = {
"hasZ": true,
"spatialReference": {"latestWkid": 3857, "wkid": 102100},
"rings": [[[12121950.326925978, 4061211.071907152, 466.23319461848587], [12122119.455563923, 4061527.424461947, 466.23319461848587], [12122324.574477604, 4061381.2400526297, 466.23319461848587], [12122159.396676224, 4061092.9049046407, 466.23319461848587], [12121950.326925978, 4061211.071907152, 466.23319461848587]]]
}
// create the modification collection with the geometry and attribute from the graphicsLayer
let modifications = new SceneModifications(
[
new SceneModification({
geometry: Polygon.fromJSON(geom),
type: 'replace'
})
]
);
// add the modifications to the IntegratedMesh
imLayer.modifications = modifications;
}
// 通过官方方法加载视频对象
async function loadVideoOfficial(url = "./WeChat_20240708093501.mp4") {
const videoParamTemp = new VideoElement({
video: url,
});
// 视频载入之后才能使用
await videoParamTemp.load();
return videoParamTemp.content;
}
view.when(async () => {
// 相机信息
const viewParams = {
// 远距离
"farDistance": 329.18556793671647,
// 水平方向
"heading": 211.78560080071696,
// 水平视角
"horizontalFieldOfView": 62.96905525251295,
// 观察点,相机位置
"observer": {
"spatialReference": {"latestWkid": 3857, "wkid": 102100},
"x": 12121997.388159065,
"y": 4061165.7843687492,
"z": 538.6676084687933
},
// 倾斜角度
"tilt": 20.67387007921511,
// 垂直视角
"verticalFieldOfView": 20.072508240756747
}
// 定义相机,用于定位视角
let cam = new Camera({
position: new Point({
...viewParams.observer,
}),
heading: viewParams.heading, // facing due south
tilt: viewParams.tilt // bird's eye view
});
// 定位
view.goTo(cam);
// 创建可视域
const viewshed = new Viewshed({
...viewParams
});
// 可视域工具,用于渲染可视域
const viewshedAnalysis = new ViewshedAnalysis({
viewsheds: [viewshed]
});
view.analyses.add(viewshedAnalysis);
const analysisView = await view.whenAnalysisView(viewshedAnalysis);
// 获取可视域视椎边框,关闭
const visualization =
analysisView._analysisVisualization._viewshedVisualizations.items[0].visualization;
visualization.visible = false;
// 获取地球渲染器
const renderer = view._stage.renderer;
// 创建 glsl 生成类,获取官方 glsl 代码
const shaderBuilder = new ShaderBuilder.ShaderBuilder,
fragment = shaderBuilder.fragment;
shaderBuilder.include(LocalFromScreenSpace.LocalFromScreenSpace);
shaderBuilder.include(TextureAtlasLookup.TextureAtlasLookup);
fragment.include(RgbaFloat16Encoding.Rgba4FloatEncoding);
shaderBuilder.include(NormalFromDepth.NormalFromDepth);
// 创建视频对象
let video = await loadVideoOfficial();
const width = video instanceof HTMLImageElement ? video.naturalWidth : video.width,
height = video instanceof HTMLImageElement ? video.naturalHeight : video.height;
// 创建视频纹理
let texture = new Texture.Texture(video, {
width, height,
mipmap: !0,
reloadable: !0
});
// 载入纹理
texture.load(view._stage.renderView.renderingContext);
view._stage.add(texture);
// 创建视频融合后处理类
const LuminanceRenderNode = RenderNode.createSubclass({
constructor: function (option) {
option = {
...option
}
// consumes and produces define the location of the the render node in the render pipeline
this.consumes = {required: ["composite-color"]};
this.produces = "composite-color";
// 获取可视域对象
this.renderer_viewshed = option.renderer_viewshed;
if (this.renderer_viewshed) {
// 生成 glsl 代码
this.commonGLSL = this.commonGLSL || shaderBuilder.generate();
const renderer_viewshed = this.renderer_viewshed;
// 重写默认渲染方法
// 主要是取消直接渲染可视域
this.renderer_viewshed.renderNode = function (a, b, c) {
const {bindParameters: d} = a;
if (renderer_viewshed.enabled && d.depth && null != c) {
b = renderer_viewshed._setupNormals(c);
if (null == renderer_viewshed._technique
|| renderer_viewshed._configuration.useNormalMap !== b) {
renderer_viewshed._configuration.useNormalMap = b;
renderer_viewshed._technique =
renderer_viewshed._pluginContext?.techniques.acquire(
ViewshedTechnique.ViewshedTechnique,
renderer_viewshed._configuration
);
}
if (renderer_viewshed._technique?.compiled) {
for (const n of renderer_viewshed._viewsheds) {
b = a.rctx.getBoundFramebufferObject();
c = renderer_viewshed._renderViewshedShadowCubeMap(d, n);
const p = renderer_viewshed._viewshedShadowMap;
// 注意,这里修改了渲染过程,不会渲染到屏幕上
c && null != p.depthTexture && !p.isTextureZero && (
renderer_viewshed._setPassParameters(n)
);
}
} else {
renderer_viewshed._pluginContext?.requestRender();
}
}
};
}
},
// Ensure resources are cleaned up when render node is removed
destroy() {
this.shaderProgram && this.gl?.deleteProgram(this.shaderProgram);
this.positionBuffer && this.gl?.deleteBuffer(this.positionBuffer);
this.vao && this.gl?.deleteVertexArray(this.vao);
},
properties: {
// Define getter and setter for class member enabled
enabled: {
get: function () {
return this.produces != null;
},
set: function (value) {
// Setting produces to null disables the render node
this.produces = value ? "composite-color" : null;
this.requestRender();
}
}
},
render(inputs) {
// The field input contains all available framebuffer objects
// We need color texture from the composite render target
const input = inputs.find(({name}) => name === "composite-color");
const color = input.getTexture();
const output = this.acquireOutputFramebuffer();
const gl = this.gl;
// Clear newly acquired framebuffer
gl.clearColor(0, 0, 0, 0);
gl.colorMask(true, true, true, true);
gl.clear(gl.COLOR_BUFFER_BIT);
// Prepare custom shaders and geometry for screenspace rendering
this.ensureShader(gl);
this.ensureScreenSpacePass(gl);
// Bind custom program
gl.useProgram(this.shaderProgram);
// Use composite-color render target to be modified in the shader
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, color.glName);
gl.uniform1i(this.textureUniformLocation, 0);
// ======================================================
// 绑定视频融合 uniform 变量
if (this.renderer_viewshed) {
const _viewshedParam = this.renderer_viewshed._parameters;
const viewshed2ShadowMap = this.renderer_viewshed._viewshedShadowMap;
// 可视域参数
gl.uniform3fv(this.viewshedTargetVectorLocation, _viewshedParam.targetVector);
gl.uniform3fv(this.viewshedUpVectorLocation, _viewshedParam.upVector);
gl.uniform2fv(this.viewshedFOVsLocation, _viewshedParam.fovs);
gl.uniform2fv(this.viewshedHeadingAndTiltLocation, _viewshedParam.headingAndTilt);
// 可视域矩阵
_viewshedParam.projectionMatrices && _viewshedParam.projectionMatrices.length > 0
&& gl.uniformMatrix4fv(this.projectionMatricesLocation, false,
_viewshedParam.projectionMatrices.flat());
_viewshedParam.viewMatrices && _viewshedParam.viewMatrices.length > 0 &&
gl.uniformMatrix4fv(this.viewMatricesLocation, false,
_viewshedParam.viewMatrices.flat());
// 可视域阴影对象
if (viewshed2ShadowMap) {
// 阴影对象参数
viewshed2ShadowMap.nearFar && gl.uniform2fv(this.viewshedNearFarLocation, viewshed2ShadowMap.nearFar);
// 阴影矩阵
viewshed2ShadowMap.viewshedProjectionMatrices && viewshed2ShadowMap.viewshedProjectionMatrices.length > 0
&& gl.uniformMatrix4fv(this.viewshedProjectionMatricesLocation, false,
viewshed2ShadowMap.viewshedProjectionMatrices.flat());
viewshed2ShadowMap.viewshedViewMatrices && viewshed2ShadowMap.viewshedViewMatrices.length > 0 &&
gl.uniformMatrix4fv(this.viewshedViewMatricesLocation, false,
viewshed2ShadowMap.viewshedViewMatrices.flat());
viewshed2ShadowMap.numActiveFaces &&
gl.uniform1i(this.viewshedNumFacesLocation, viewshed2ShadowMap.numActiveFaces);
viewshed2ShadowMap.atlasRegions && viewshed2ShadowMap.atlasRegions.length > 0 && gl.uniform1fv(this.viewshedAtlasRegionsLocation,
viewshed2ShadowMap.atlasRegions.flat());
}
// 阴影纹理使用 1 号纹理对象
gl.activeTexture(gl.TEXTURE1)
gl.bindTexture(gl.TEXTURE_2D, viewshed2ShadowMap.depthTexture?.glName);
gl.uniform1i(this.textureSamplerColor, 1);
// 计算视图逆矩阵
let programUniformInverseViewMatrix = gl.getUniformLocation(
this.shaderProgram,
'inverseViewMatrix'
);
// 计算矩阵的逆矩阵
let inverseMatrix = mat4f64.create();
mat4.translate(inverseMatrix, this.camera.viewMatrix, _viewshedParam.localOrigin);
mat4.invert(inverseMatrix, inverseMatrix);
gl.uniformMatrix4fv(
programUniformInverseViewMatrix,
false,
// this.camera.projectionMatrix
inverseMatrix
);
}
// ======================================================
// 使用三号纹理,这里使用视频渲染的深度纹理
gl.activeTexture(gl.TEXTURE2);
gl.bindTexture(gl.TEXTURE_2D, renderer._bindParameters.depth?.attachment?.glName);
gl.uniform1i(this.depthTexUniformLocation, 2);
// 视频纹理,帧循环更新纹理
texture?.frameUpdate();
// 只用四号号纹理单元并绑定到纹理对象
gl.activeTexture(gl.TEXTURE3);
//将帧缓冲区的颜色关联对象关联的纹理对象绑定到纹理单元
gl.bindTexture(gl.TEXTURE_2D, texture.glTexture?.glName)
// 视频纹理使用 1 号纹理对象
gl.uniform1i(this.textureSamplerVideo, 3);
// 激活视图逆转置法向量矩阵
let programUniformMatrix = gl.getUniformLocation(
this.shaderProgram,
'u_inverseViewNormalMatrix'
);
const temp = this.camera.viewInverseTransposeMatrix;
gl.uniformMatrix4fv(
programUniformMatrix,
false,
mat4.invertOrIdentity(mat4f64.create(), temp)
);
// 生成还原世界坐标参数
const l = vec4f64.create(),
f = vec2f64.create();
function projInfo(r) {
const x = r.projectionMatrix;
return 0 === x[11] ?
vec42.set(l, 2 / (r.fullWidth * x[0]), 2 / (r.fullHeight * x[5]), (1 + x[12]) / x[0], (1 + x[13]) / x[5])
: vec42.set(l, -2 / (r.fullWidth * x[0]), -2 / (r.fullHeight * x[5]), (1 - x[8]) / x[0], (1 - x[9]) / x[5])
}
function zScale(r) {
return 0 === r.projectionMatrix[11] ? vec2.set(f, 0, 1) : vec2.set(f, 1, 0)
}
// 投影矩阵
let programUniformCameraProjection = gl.getUniformLocation(
this.shaderProgram,
'projInfo'
);
gl.uniform4fv(
programUniformCameraProjection,
projInfo(this.camera)
);
// z 比例
let programUniformCameraZScale = gl.getUniformLocation(
this.shaderProgram,
'zScale'
);
gl.uniform2fv(
programUniformCameraZScale,
zScale(this.camera)
);
// 投影 z
let programUniformCameraZProjectionMap = gl.getUniformLocation(
this.shaderProgram,
'zProjectionMap'
);
const projectionMatrix = this.camera.projectionMatrix;
gl.uniform2fv(
programUniformCameraZProjectionMap,
[projectionMatrix[14], projectionMatrix[10]]
);
// Issue the render call for a screen space render pass
gl.bindVertexArray(this.vao);
gl.drawArrays(gl.TRIANGLES, 0, 3);
// use depth from input on output framebuffer
output.attachDepth(input.getAttachment(gl.DEPTH_STENCIL_ATTACHMENT));
// 开启帧循环
this.requestRender();
return output;
},
shaderProgram: null,
textureUniformLocation: null,
positionLocation: null,
vao: null,
positionBuffer: null,
// used to avoid allocating objects in each frame.
// Setup screen space filling triangle
ensureScreenSpacePass(gl) {
if (this.vao) {
return;
}
this.vao = gl.createVertexArray();
gl.bindVertexArray(this.vao);
this.positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.positionBuffer);
const vertices = new Float32Array([-1.0, -1.0, 3.0, -1.0, -1.0, 3.0]);
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
gl.vertexAttribPointer(this.positionLocation, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(this.positionLocation);
gl.bindVertexArray(null);
},
// Setup custom shader programs
ensureShader(gl) {
if (this.shaderProgram != null) {
return;
}
// The vertex shader program
// Sets position from 0..1 for fragment shader
// Forwards texture coordinates to fragment shader
const vshader = `#version 300 es
in vec2 position;
out vec2 uv;
void main() {
uv = position * 0.5 + vec2(0.5);
gl_Position = vec4(position, 0.0, 1.0);
}`;
// The fragment shader program applying a greyscsale conversion
const fshader = this.commonGLSL + `
// 场景颜色
uniform sampler2D colorTex;
// 视频纹理
uniform sampler2D videoTex;
in vec2 uv;
// 可视域阴影纹理
uniform sampler2D viewshedShadowMap;
// 渲染深度纹理
uniform sampler2D depthTex;
// 逆转置纹理矩阵
uniform mat4 u_inverseViewNormalMatrix;
// 可视域参数
uniform vec3 viewshedTargetVector;
uniform vec3 viewshedUpVector;
uniform vec2 viewshedFOVs;
uniform vec2 viewshedHeadingAndTilt;
uniform vec2 viewshedNearFar;
// 可视域矩阵
uniform mat4[6] viewshedProjectionMatrices;
uniform mat4[6] viewshedViewMatrices;
uniform mat4[6] projectionMatrices;
uniform mat4[6] viewMatrices;
uniform int viewshedNumFaces;
uniform float[24] viewshedAtlasRegions;
// 获取可视域 uv
vec2 getViewshedUv(vec4 worldPosition, int face) {
mat4 viewshedMatrix = viewshedProjectionMatrices[face];
vec4 viewshedUv4 = viewshedMatrix * worldPosition;
vec3 viewshedUv = viewshedUv4.xyz / viewshedUv4.w;
return viewshedUv.xy;
}
// 可视域三维坐标
vec3 getViewshedXyz(vec4 worldPosition, int face) {
mat4 viewshedMatrix = viewshedProjectionMatrices[face];
vec4 viewshedUv4 = viewshedMatrix * worldPosition;
vec3 viewshedUv = viewshedUv4.xyz / viewshedUv4.w;
return viewshedUv.xyz;
}
float viewshedDepthToFloat(float depth) {
return (depth - viewshedNearFar[0]) / (viewshedNearFar[1] - viewshedNearFar[0]);
}
// Orthographic depth to viewshed of given point and given cube map face in range [0, 1].
float getOrthographicDepthToViewshed(vec4 worldPosition, int face) {
mat4 viewshedViewMatrix = viewshedViewMatrices[face];
vec4 viewshedUv4 = viewshedViewMatrix * worldPosition;
vec3 viewshedUv = viewshedUv4.xyz / viewshedUv4.w;
float depth = -viewshedUv.z;
return viewshedDepthToFloat(depth);
}
// Read depth from shadow map given uv and cube map face
float getDepthFromShadowMap(vec2 uv, int face) {
int index = 4 * face;
float umin = viewshedAtlasRegions[index];
float umax = viewshedAtlasRegions[index + 1];
float vmin = viewshedAtlasRegions[index + 2];
float vmax = viewshedAtlasRegions[index + 3];
vec4 atlasRegion = vec4(umin, vmin, umax, vmax);
return rgba4ToFloat(textureAtlasLookup(viewshedShadowMap, uv, atlasRegion));
}
struct ViewshedPoint {
int face;
vec2 uv;
bool isWithin;
float orthographicDepth;
};
// 查找给定位置所在的立方体阴影并返回其相关信息
bool getViewshedPointVideo(vec4 worldPosition, out ViewshedPoint point) {
// 获取视线方向上的单位向量
vec3 nUp = normalize(viewshedUpVector);
int i = 0; // 初始化索引
// 检查投影后的点是否在阴影贴图纹理内
vec2 viewshedUv = getViewshedUv(worldPosition, i);
vec3 viewshedXyz = getViewshedXyz(worldPosition, i);
// 判断可视域矩阵范围
if (!(any(lessThan(viewshedXyz.xyz, vec3(0.0)))
|| any(greaterThan(viewshedXyz.xyz, vec3(1.0))))) {
float orthoDepth = getOrthographicDepthToViewshed(worldPosition, i);
if (orthoDepth >= 0.0) {
// 找到了一个立方体贴图面
// 检查点是否确实在视线范围内,不仅仅是在摄像机视锥内
// 不在远距离外
vec3 position = worldPosition.xyz;
// 检查是否在视线范围内
bool isWithin = true; // 假设在范围内
// 检查是否在视野的下半部分
float t = dot(nUp, position);
bool isBottomHalf = t > 0.0;
vec3 nProjVector = normalize(position - t * nUp);
if (isWithin) {
// 计算角度
float angle = acos(dot(normalize(viewshedTargetVector), nProjVector));
// 检查是否在视野的纵向范围内
if (angle > viewshedFOVs[0] / 2.0) {
isWithin = false;
}
}
point = ViewshedPoint(i, viewshedUv, isWithin, orthoDepth);
return true;
}
}
// 没有匹配的立方体面
return false;
}
// 计算法线和视线的点积,用于获取法线的余弦角度
float normalCosAngle(float linearDepth, vec3 localPosition) {
// 使用深度纹理和线性深度重建世界空间中的点
vec3 cameraSpacePosition = reconstructPosition(gl_FragCoord.xy, linearDepth);
// 从深度纹理中获取法线
vec3 normal = normalFromDepth(depthTex, cameraSpacePosition, gl_FragCoord.xy, uv);
// 将法线转换到视图空间
normal = (u_inverseViewNormalMatrix * vec4(normal, 1.0)).xyz;
// 获取视线的方向向量
vec3 viewingDir = normalize(localPosition);
// 计算法线和视线的点积,即法线的余弦角度
return dot(normal, viewingDir);
}
// 主函数
void main() {
// 从纹理中获取颜色
vec4 color = texture(colorTex, uv);
// 从纹理中获取深度
float depth = depthFromTexture(depthTex, uv);
// 在相机平面之外
if (depth >= 1.0 || depth <= 0.0) {
return;
}
// 将深度线性化
float linearDepth = linearizeDepth(depth);
// 重建相对于视图位置的局部位置
vec4 localPosition = reconstructLocalPosition(gl_FragCoord.xy, linearDepth);
ViewshedPoint point;
// 获取视图点
bool foundFace = getViewshedPointVideo(localPosition, point);
fragColor = color;
// 在每个视图之外
if (!foundFace || !point.isWithin) {
return;
}
// 从阴影图中获取视图深度
float viewshedDepth = getDepthFromShadowMap(point.uv, point.face);
// 计算距离
float distance = point.orthographicDepth;
// 判断是否可见
bool visible = distance < viewshedDepth;
// 调整点的 UV
point.uv.x -= 0.0825;
point.uv.x *= 1.2;
// 混合视频颜色和原始颜色
vec4 videoColor = mix(texture(videoTex, point.uv), color, 0.1);
// vec4 occludedColor = mix(vec4(0.0, 0.0, 0.0, 1.0), color, 0.7);
fragColor = visible? videoColor : color;
// 计算线性深度和局部位置的法线余弦角
float cosAngle = normalCosAngle(linearDepth, localPosition.xyz);
// // 所有背离的以及接近平行的都被认为是被遮挡的。
// // 阈值对应大约 0.6 度,根据经验调整。
if (cosAngle > -0.01) {
// fragColor = videoColor;
// fragColor = occludedColor;
fragColor = color;
}
}
`;
this.shaderProgram = initWebgl2Shaders(gl, vshader, fshader);
this.positionLocation = gl.getAttribLocation(this.shaderProgram, "position");
this.textureUniformLocation = gl.getUniformLocation(this.shaderProgram, "colorTex");
this.depthTexUniformLocation = gl.getUniformLocation(this.shaderProgram, "depthTex");
this.textureSamplerVideo = gl.getUniformLocation(this.shaderProgram, "videoTex");
this.textureSamplerColor = gl.getUniformLocation(this.shaderProgram, 'viewshedShadowMap');
this.viewshedTargetVectorLocation = gl.getUniformLocation(this.shaderProgram, "viewshedTargetVector");
this.viewshedUpVectorLocation = gl.getUniformLocation(this.shaderProgram, "viewshedUpVector");
this.viewshedFOVsLocation = gl.getUniformLocation(this.shaderProgram, "viewshedFOVs");
this.viewshedHeadingAndTiltLocation = gl.getUniformLocation(this.shaderProgram, "viewshedHeadingAndTilt");
this.viewshedNearFarLocation = gl.getUniformLocation(this.shaderProgram, "viewshedNearFar");
this.projectionMatricesLocation = gl.getUniformLocation(this.shaderProgram, "viewshedProjectionMatrices");
this.viewMatricesLocation = gl.getUniformLocation(this.shaderProgram, "viewshedViewMatrices");
this.viewshedProjectionMatricesLocation = gl.getUniformLocation(this.shaderProgram, "projectionMatrices");
this.viewshedViewMatricesLocation = gl.getUniformLocation(this.shaderProgram, "viewMatrices");
this.viewshedNumFacesLocation = gl.getUniformLocation(this.shaderProgram, "viewshedNumFaces");
this.viewshedAtlasRegionsLocation = gl.getUniformLocation(this.shaderProgram, "viewshedAtlasRegions");
}
});
const renderer_viewshed = renderer._viewshed;
const luminanceRenderNode = new LuminanceRenderNode({view, renderer_viewshed});
// Toggle button to enable/disable the custom render node
const renderNodeToggle = document.getElementById("renderNodeToggle");
renderNodeToggle.addEventListener("calciteSwitchChange", () => {
luminanceRenderNode.enabled = !luminanceRenderNode.enabled;
});
const renderNodeToggleViewshed = document.getElementById("renderNodeToggleViewshed");
renderNodeToggleViewshed.addEventListener("calciteSwitchChange", () => {
if (video) {
if (video.paused) {
video.play();
} else {
video.pause();
}
}
});
});
});
</script>
<div id="viewDiv"></div>
</body>
</html>
在线示例
ArcGIS JSAPI 在线示例:视频融合效果(视频投影)