ProjectedMaterial constructor
ProjectedMaterial({})
Implementation
ProjectedMaterial({
Camera? camera,
required Texture texture,
double textureScale = 1,
Vector2? textureOffset,
double backgroundOpacity = 1,
bool cover = false,
Map<String,dynamic>? options
}):super.fromMap(options){
if (backgroundOpacity < 1 && options?['transparent'] == false) {
console.warning('You have to pass "transparent: true" to the ProjectedMaterial for the backgroundOpacity option to work');
}
//Object.defineProperty(this, 'isProjectedMaterial', { 'value': true });
// save the private variables
_camera = camera ?? PerspectiveCamera();
_cover = cover;
_textureScale = textureScale;
textureOffset ??= Vector2();
// scale to keep the image proportions and apply textureScale
final [widthScaled, heightScaled] = ProjectedMaterialUtils.computeScaledDimensions(
texture,
camera!,
textureScale,
cover
);
this.uniforms = {
'projectedTexture': { 'value': texture },
'isTextureLoaded': { 'value': texture.image != null},
'isTextureProjected': { 'value': false },
'backgroundOpacity': { 'value': backgroundOpacity },
'viewMatrixCamera': { 'value': Matrix4.identity() },
'projectionMatrixCamera': { 'value': Matrix4.identity() },
'projPosition': { 'value': Vector3() },
'projDirection': { 'value': Vector3(0, 0, -1) },
'savedModelMatrix': { 'value': Matrix4.identity() },
'widthScaled': { 'value': widthScaled },
'heightScaled': { 'value': heightScaled },
'textureOffset': { 'value': textureOffset },
};
this.onBeforeCompile = (shader,t){
// expose also the material's uniforms
this.uniforms.addAll(shader.uniforms);
shader.uniforms = this.uniforms;
if (camera is OrthographicCamera) {
shader.defines?['ORTHOGRAPHIC'] = '';
}
shader.vertexShader = ProjectedMaterialUtils.monkeyPatch(shader.vertexShader,
header: '''
uniform mat4 viewMatrixCamera;
uniform mat4 projectionMatrixCamera;
#ifdef USE_INSTANCING
attribute vec4 savedModelMatrix0;
attribute vec4 savedModelMatrix1;
attribute vec4 savedModelMatrix2;
attribute vec4 savedModelMatrix3;
#else
uniform mat4 savedModelMatrix;
#endif
varying vec3 vSavedNormal;
varying vec4 vTexCoords;
#ifndef ORTHOGRAPHIC
varying vec4 vWorldPosition;
#endif
''',
main: '''
#ifdef USE_INSTANCING
mat4 savedModelMatrix = mat4(
savedModelMatrix0,
savedModelMatrix1,
savedModelMatrix2,
savedModelMatrix3
);
#endif
vSavedNormal = mat3(savedModelMatrix) * normal;
vTexCoords = projectionMatrixCamera * viewMatrixCamera * savedModelMatrix * vec4(position, 1.0);
#ifndef ORTHOGRAPHIC
vWorldPosition = savedModelMatrix * vec4(position, 1.0);
#endif
'''
);
shader.fragmentShader = ProjectedMaterialUtils.monkeyPatch(shader.fragmentShader,
header: '''
uniform sampler2D projectedTexture;
uniform bool isTextureLoaded;
uniform bool isTextureProjected;
uniform float backgroundOpacity;
uniform vec3 projPosition;
uniform vec3 projDirection;
uniform float widthScaled;
uniform float heightScaled;
uniform vec2 textureOffset;
varying vec3 vSavedNormal;
varying vec4 vTexCoords;
#ifndef ORTHOGRAPHIC
varying vec4 vWorldPosition;
#endif
float mapRange(float value, float min1, float max1, float min2, float max2) {
return min2 + (value - min1) * (max2 - min2) / (max1 - min1);
}
''',
replaces: {
'vec4 diffuseColor = vec4( diffuse, opacity );': '''
// clamp the w to make sure we don't project behind
float w = max(vTexCoords.w, 0.0);
vec2 uv = (vTexCoords.xy / w) * 0.5 + 0.5;
uv += textureOffset;
// apply the corrected width and height
uv.x = mapRange(uv.x, 0.0, 1.0, 0.5 - widthScaled / 2.0, 0.5 + widthScaled / 2.0);
uv.y = mapRange(uv.y, 0.0, 1.0, 0.5 - heightScaled / 2.0, 0.5 + heightScaled / 2.0);
// this makes sure we don't sample out of the texture
bool isInTexture = (max(uv.x, uv.y) <= 1.0 && min(uv.x, uv.y) >= 0.0);
// this makes sure we don't render also the back of the object
#ifdef ORTHOGRAPHIC
vec3 projectorDirection = projDirection;
#else
vec3 projectorDirection = normalize(projPosition - vWorldPosition.xyz);
#endif
float dotProduct = dot(vSavedNormal, projectorDirection);
bool isFacingProjector = dotProduct > 0.0000001;
vec4 diffuseColor = vec4(diffuse, opacity * backgroundOpacity);
if (isFacingProjector && isInTexture && isTextureLoaded && isTextureProjected) {
vec4 textureColor = texture2D(projectedTexture, uv);
// apply the material opacity
textureColor.a *= opacity;
// https://learnopengl.com/Advanced-OpenGL/Blending
diffuseColor = textureColor * textureColor.a + diffuseColor * (1.0 - textureColor.a);
}
'''
}
);
userData['shader'] = shader;
};
// Listen on resize if the camera used for the projection
// is the same used to render.
// We do this on window resize because there is no way to
// listen for the resize of the renderer
addEventListener('resize', _saveCameraProjectionMatrix);
// If the image texture passed hasn't loaded yet,
// wait for it to load and compute the correct proportions.
// This avoids rendering black while the texture is loading
// ProjectedMaterialUtils.addLoadListener(texture, (t){
// this.uniforms['isTextureLoaded']['value'] = true;
// this.dispatchEvent(Event( type: 'textureload'));
// _saveDimensions();
// });
}