first commit

This commit is contained in:
rafaeldpsilva
2025-12-10 12:32:12 +00:00
commit adbbf6bf50
3442 changed files with 2725681 additions and 0 deletions

View File

@@ -0,0 +1,54 @@
import { Fn } from '../tsl/TSLBase.js';
import { mix, min, step } from '../math/MathNode.js';
export const burn = /*@__PURE__*/ Fn( ( [ base, blend ] ) => {
return min( 1.0, base.oneMinus().div( blend ) ).oneMinus();
} ).setLayout( {
name: 'burnBlend',
type: 'vec3',
inputs: [
{ name: 'base', type: 'vec3' },
{ name: 'blend', type: 'vec3' }
]
} );
export const dodge = /*@__PURE__*/ Fn( ( [ base, blend ] ) => {
return min( base.div( blend.oneMinus() ), 1.0 );
} ).setLayout( {
name: 'dodgeBlend',
type: 'vec3',
inputs: [
{ name: 'base', type: 'vec3' },
{ name: 'blend', type: 'vec3' }
]
} );
export const screen = /*@__PURE__*/ Fn( ( [ base, blend ] ) => {
return base.oneMinus().mul( blend.oneMinus() ).oneMinus();
} ).setLayout( {
name: 'screenBlend',
type: 'vec3',
inputs: [
{ name: 'base', type: 'vec3' },
{ name: 'blend', type: 'vec3' }
]
} );
export const overlay = /*@__PURE__*/ Fn( ( [ base, blend ] ) => {
return mix( base.mul( 2.0 ).mul( blend ), base.oneMinus().mul( 2.0 ).mul( blend.oneMinus() ).oneMinus(), step( 0.5, base ) );
} ).setLayout( {
name: 'overlayBlend',
type: 'vec3',
inputs: [
{ name: 'base', type: 'vec3' },
{ name: 'blend', type: 'vec3' }
]
} );

View File

@@ -0,0 +1,81 @@
import TempNode from '../core/TempNode.js';
import { uv } from '../accessors/UV.js';
import { normalView } from '../accessors/Normal.js';
import { positionView } from '../accessors/Position.js';
import { faceDirection } from './FrontFacingNode.js';
import { Fn, nodeProxy, float, vec2 } from '../tsl/TSLBase.js';
// Bump Mapping Unparametrized Surfaces on the GPU by Morten S. Mikkelsen
// https://mmikk.github.io/papers3d/mm_sfgrad_bump.pdf
const dHdxy_fwd = Fn( ( { textureNode, bumpScale } ) => {
// It's used to preserve the same TextureNode instance
const sampleTexture = ( callback ) => textureNode.cache().context( { getUV: ( texNode ) => callback( texNode.uvNode || uv() ), forceUVContext: true } );
const Hll = float( sampleTexture( ( uvNode ) => uvNode ) );
return vec2(
float( sampleTexture( ( uvNode ) => uvNode.add( uvNode.dFdx() ) ) ).sub( Hll ),
float( sampleTexture( ( uvNode ) => uvNode.add( uvNode.dFdy() ) ) ).sub( Hll )
).mul( bumpScale );
} );
// Evaluate the derivative of the height w.r.t. screen-space using forward differencing (listing 2)
const perturbNormalArb = Fn( ( inputs ) => {
const { surf_pos, surf_norm, dHdxy } = inputs;
// normalize is done to ensure that the bump map looks the same regardless of the texture's scale
const vSigmaX = surf_pos.dFdx().normalize();
const vSigmaY = surf_pos.dFdy().normalize();
const vN = surf_norm; // normalized
const R1 = vSigmaY.cross( vN );
const R2 = vN.cross( vSigmaX );
const fDet = vSigmaX.dot( R1 ).mul( faceDirection );
const vGrad = fDet.sign().mul( dHdxy.x.mul( R1 ).add( dHdxy.y.mul( R2 ) ) );
return fDet.abs().mul( surf_norm ).sub( vGrad ).normalize();
} );
class BumpMapNode extends TempNode {
static get type() {
return 'BumpMapNode';
}
constructor( textureNode, scaleNode = null ) {
super( 'vec3' );
this.textureNode = textureNode;
this.scaleNode = scaleNode;
}
setup() {
const bumpScale = this.scaleNode !== null ? this.scaleNode : 1;
const dHdxy = dHdxy_fwd( { textureNode: this.textureNode, bumpScale } );
return perturbNormalArb( {
surf_pos: positionView,
surf_norm: normalView,
dHdxy
} );
}
}
export default BumpMapNode;
export const bumpMap = /*@__PURE__*/ nodeProxy( BumpMapNode );

View File

@@ -0,0 +1,95 @@
import { dot, max, mix } from '../math/MathNode.js';
import { add } from '../math/OperatorNode.js';
import { Fn, If, float, vec3, vec4 } from '../tsl/TSLBase.js';
import { ColorManagement } from '../../math/ColorManagement.js';
import { Vector3 } from '../../math/Vector3.js';
import { LinearSRGBColorSpace } from '../../constants.js';
export const grayscale = /*@__PURE__*/ Fn( ( [ color ] ) => {
return luminance( color.rgb );
} );
export const saturation = /*@__PURE__*/ Fn( ( [ color, adjustment = float( 1 ) ] ) => {
return adjustment.mix( luminance( color.rgb ), color.rgb );
} );
export const vibrance = /*@__PURE__*/ Fn( ( [ color, adjustment = float( 1 ) ] ) => {
const average = add( color.r, color.g, color.b ).div( 3.0 );
const mx = color.r.max( color.g.max( color.b ) );
const amt = mx.sub( average ).mul( adjustment ).mul( - 3.0 );
return mix( color.rgb, mx, amt );
} );
export const hue = /*@__PURE__*/ Fn( ( [ color, adjustment = float( 1 ) ] ) => {
const k = vec3( 0.57735, 0.57735, 0.57735 );
const cosAngle = adjustment.cos();
return vec3( color.rgb.mul( cosAngle ).add( k.cross( color.rgb ).mul( adjustment.sin() ).add( k.mul( dot( k, color.rgb ).mul( cosAngle.oneMinus() ) ) ) ) );
} );
export const luminance = (
color,
luminanceCoefficients = vec3( ColorManagement.getLuminanceCoefficients( new Vector3() ) )
) => dot( color, luminanceCoefficients );
export const threshold = ( color, threshold ) => mix( vec3( 0.0 ), color, luminance( color ).sub( threshold ).max( 0 ) );
/**
* Color Decision List (CDL) v1.2
*
* Compact representation of color grading information, defined by slope, offset, power, and
* saturation. The CDL should be typically be given input in a log space (such as LogC, ACEScc,
* or AgX Log), and will return output in the same space. Output may require clamping >=0.
*
* @param {vec4} color Input (-Infinity < input < +Infinity)
* @param {number | vec3} slope Slope (0 ≤ slope < +Infinity)
* @param {number | vec3} offset Offset (-Infinity < offset < +Infinity; typically -1 < offset < 1)
* @param {number | vec3} power Power (0 < power < +Infinity)
* @param {number} saturation Saturation (0 ≤ saturation < +Infinity; typically 0 ≤ saturation < 4)
* @param {vec3} luminanceCoefficients Luminance coefficients for saturation term, typically Rec. 709
* @return Output, -Infinity < output < +Infinity
*
* References:
* - ASC CDL v1.2
* - https://blender.stackexchange.com/a/55239/43930
* - https://docs.acescentral.com/specifications/acescc/
*/
export const cdl = /*@__PURE__*/ Fn( ( [
color,
slope = vec3( 1 ),
offset = vec3( 0 ),
power = vec3( 1 ),
saturation = float( 1 ),
// ASC CDL v1.2 explicitly requires Rec. 709 luminance coefficients.
luminanceCoefficients = vec3( ColorManagement.getLuminanceCoefficients( new Vector3(), LinearSRGBColorSpace ) )
] ) => {
// NOTE: The ASC CDL v1.2 defines a [0, 1] clamp on the slope+offset term, and another on the
// saturation term. Per the ACEScc specification and Filament, limits may be omitted to support
// values outside [0, 1], requiring a workaround for negative values in the power expression.
const luma = color.rgb.dot( vec3( luminanceCoefficients ) );
const v = max( color.rgb.mul( slope ).add( offset ), 0.0 ).toVar();
const pv = v.pow( power ).toVar();
If( v.r.greaterThan( 0.0 ), () => { v.r.assign( pv.r ); } ); // eslint-disable-line
If( v.g.greaterThan( 0.0 ), () => { v.g.assign( pv.g ); } ); // eslint-disable-line
If( v.b.greaterThan( 0.0 ), () => { v.b.assign( pv.b ); } ); // eslint-disable-line
v.assign( luma.add( v.sub( luma ).mul( saturation ) ) );
return vec4( v.rgb, color.a );
} );

View File

@@ -0,0 +1,38 @@
import { mix } from '../math/MathNode.js';
import { Fn } from '../tsl/TSLCore.js';
export const sRGBTransferEOTF = /*@__PURE__*/ Fn( ( [ color ] ) => {
const a = color.mul( 0.9478672986 ).add( 0.0521327014 ).pow( 2.4 );
const b = color.mul( 0.0773993808 );
const factor = color.lessThanEqual( 0.04045 );
const rgbResult = mix( a, b, factor );
return rgbResult;
} ).setLayout( {
name: 'sRGBTransferEOTF',
type: 'vec3',
inputs: [
{ name: 'color', type: 'vec3' }
]
} );
export const sRGBTransferOETF = /*@__PURE__*/ Fn( ( [ color ] ) => {
const a = color.pow( 0.41666 ).mul( 1.055 ).sub( 0.055 );
const b = color.mul( 12.92 );
const factor = color.lessThanEqual( 0.0031308 );
const rgbResult = mix( a, b, factor );
return rgbResult;
} ).setLayout( {
name: 'sRGBTransferOETF',
type: 'vec3',
inputs: [
{ name: 'color', type: 'vec3' }
]
} );

View File

@@ -0,0 +1,102 @@
import TempNode from '../core/TempNode.js';
import { addMethodChaining, mat3, nodeObject, vec4 } from '../tsl/TSLCore.js';
import { SRGBTransfer } from '../../constants.js';
import { ColorManagement } from '../../math/ColorManagement.js';
import { sRGBTransferEOTF, sRGBTransferOETF } from './ColorSpaceFunctions.js';
import { Matrix3 } from '../../math/Matrix3.js';
const WORKING_COLOR_SPACE = 'WorkingColorSpace';
const OUTPUT_COLOR_SPACE = 'OutputColorSpace';
class ColorSpaceNode extends TempNode {
static get type() {
return 'ColorSpaceNode';
}
constructor( colorNode, source, target ) {
super( 'vec4' );
this.colorNode = colorNode;
this.source = source;
this.target = target;
}
resolveColorSpace( builder, colorSpace ) {
if ( colorSpace === WORKING_COLOR_SPACE ) {
return ColorManagement.workingColorSpace;
} else if ( colorSpace === OUTPUT_COLOR_SPACE ) {
return builder.context.outputColorSpace || builder.renderer.outputColorSpace;
}
return colorSpace;
}
setup( builder ) {
const { colorNode } = this;
const source = this.resolveColorSpace( builder, this.source );
const target = this.resolveColorSpace( builder, this.target );
let outputNode = colorNode;
if ( ColorManagement.enabled === false || source === target || ! source || ! target ) {
return outputNode;
}
if ( ColorManagement.getTransfer( source ) === SRGBTransfer ) {
outputNode = vec4( sRGBTransferEOTF( outputNode.rgb ), outputNode.a );
}
if ( ColorManagement.getPrimaries( source ) !== ColorManagement.getPrimaries( target ) ) {
outputNode = vec4(
mat3( ColorManagement._getMatrix( new Matrix3(), source, target ) ).mul( outputNode.rgb ),
outputNode.a
);
}
if ( ColorManagement.getTransfer( target ) === SRGBTransfer ) {
outputNode = vec4( sRGBTransferOETF( outputNode.rgb ), outputNode.a );
}
return outputNode;
}
}
export default ColorSpaceNode;
export const toOutputColorSpace = ( node ) => nodeObject( new ColorSpaceNode( nodeObject( node ), WORKING_COLOR_SPACE, OUTPUT_COLOR_SPACE ) );
export const toWorkingColorSpace = ( node ) => nodeObject( new ColorSpaceNode( nodeObject( node ), OUTPUT_COLOR_SPACE, WORKING_COLOR_SPACE ) );
export const workingToColorSpace = ( node, colorSpace ) => nodeObject( new ColorSpaceNode( nodeObject( node ), WORKING_COLOR_SPACE, colorSpace ) );
export const colorSpaceToWorking = ( node, colorSpace ) => nodeObject( new ColorSpaceNode( nodeObject( node ), colorSpace, WORKING_COLOR_SPACE ) );
export const convertColorSpace = ( node, sourceColorSpace, targetColorSpace ) => nodeObject( new ColorSpaceNode( nodeObject( node ), sourceColorSpace, targetColorSpace ) );
addMethodChaining( 'toOutputColorSpace', toOutputColorSpace );
addMethodChaining( 'toWorkingColorSpace', toWorkingColorSpace );
addMethodChaining( 'workingToColorSpace', workingToColorSpace );
addMethodChaining( 'colorSpaceToWorking', colorSpaceToWorking );

View File

@@ -0,0 +1,45 @@
import Node from '../core/Node.js';
import { nodeImmutable, float } from '../tsl/TSLBase.js';
import { BackSide, WebGLCoordinateSystem } from '../../constants.js';
class FrontFacingNode extends Node {
static get type() {
return 'FrontFacingNode';
}
constructor() {
super( 'bool' );
this.isFrontFacingNode = true;
}
generate( builder ) {
const { renderer, material } = builder;
if ( renderer.coordinateSystem === WebGLCoordinateSystem ) {
if ( material.side === BackSide ) {
return 'false';
}
}
return builder.getFrontFacing();
}
}
export default FrontFacingNode;
export const frontFacing = /*@__PURE__*/ nodeImmutable( FrontFacingNode );
export const faceDirection = /*@__PURE__*/ float( frontFacing ).mul( 2.0 ).sub( 1.0 );

View File

@@ -0,0 +1,106 @@
import TempNode from '../core/TempNode.js';
import { add } from '../math/OperatorNode.js';
import { normalView, transformNormalToView } from '../accessors/Normal.js';
import { positionView } from '../accessors/Position.js';
import { TBNViewMatrix } from '../accessors/AccessorsUtils.js';
import { uv } from '../accessors/UV.js';
import { faceDirection } from './FrontFacingNode.js';
import { Fn, nodeProxy, vec3 } from '../tsl/TSLBase.js';
import { TangentSpaceNormalMap, ObjectSpaceNormalMap } from '../../constants.js';
// Normal Mapping Without Precomputed Tangents
// http://www.thetenthplanet.de/archives/1180
const perturbNormal2Arb = /*@__PURE__*/ Fn( ( inputs ) => {
const { eye_pos, surf_norm, mapN, uv } = inputs;
const q0 = eye_pos.dFdx();
const q1 = eye_pos.dFdy();
const st0 = uv.dFdx();
const st1 = uv.dFdy();
const N = surf_norm; // normalized
const q1perp = q1.cross( N );
const q0perp = N.cross( q0 );
const T = q1perp.mul( st0.x ).add( q0perp.mul( st1.x ) );
const B = q1perp.mul( st0.y ).add( q0perp.mul( st1.y ) );
const det = T.dot( T ).max( B.dot( B ) );
const scale = faceDirection.mul( det.inverseSqrt() );
return add( T.mul( mapN.x, scale ), B.mul( mapN.y, scale ), N.mul( mapN.z ) ).normalize();
} );
class NormalMapNode extends TempNode {
static get type() {
return 'NormalMapNode';
}
constructor( node, scaleNode = null ) {
super( 'vec3' );
this.node = node;
this.scaleNode = scaleNode;
this.normalMapType = TangentSpaceNormalMap;
}
setup( builder ) {
const { normalMapType, scaleNode } = this;
let normalMap = this.node.mul( 2.0 ).sub( 1.0 );
if ( scaleNode !== null ) {
normalMap = vec3( normalMap.xy.mul( scaleNode ), normalMap.z );
}
let outputNode = null;
if ( normalMapType === ObjectSpaceNormalMap ) {
outputNode = transformNormalToView( normalMap );
} else if ( normalMapType === TangentSpaceNormalMap ) {
const tangent = builder.hasGeometryAttribute( 'tangent' );
if ( tangent === true ) {
outputNode = TBNViewMatrix.mul( normalMap ).normalize();
} else {
outputNode = perturbNormal2Arb( {
eye_pos: positionView,
surf_norm: normalView,
mapN: normalMap,
uv: uv()
} );
}
}
return outputNode;
}
}
export default NormalMapNode;
export const normalMap = /*@__PURE__*/ nodeProxy( NormalMapNode );

View File

@@ -0,0 +1,380 @@
import TempNode from '../core/TempNode.js';
import { default as TextureNode/*, texture*/ } from '../accessors/TextureNode.js';
import { NodeUpdateType } from '../core/constants.js';
import { nodeObject } from '../tsl/TSLBase.js';
import { uniform } from '../core/UniformNode.js';
import { viewZToOrthographicDepth, perspectiveDepthToViewZ } from './ViewportDepthNode.js';
import { HalfFloatType/*, FloatType*/ } from '../../constants.js';
import { Vector2 } from '../../math/Vector2.js';
import { DepthTexture } from '../../textures/DepthTexture.js';
import { RenderTarget } from '../../core/RenderTarget.js';
const _size = /*@__PURE__*/ new Vector2();
class PassTextureNode extends TextureNode {
static get type() {
return 'PassTextureNode';
}
constructor( passNode, texture ) {
super( texture );
this.passNode = passNode;
this.setUpdateMatrix( false );
}
setup( builder ) {
if ( builder.object.isQuadMesh ) this.passNode.build( builder );
return super.setup( builder );
}
clone() {
return new this.constructor( this.passNode, this.value );
}
}
class PassMultipleTextureNode extends PassTextureNode {
static get type() {
return 'PassMultipleTextureNode';
}
constructor( passNode, textureName, previousTexture = false ) {
super( passNode, null );
this.textureName = textureName;
this.previousTexture = previousTexture;
}
updateTexture() {
this.value = this.previousTexture ? this.passNode.getPreviousTexture( this.textureName ) : this.passNode.getTexture( this.textureName );
}
setup( builder ) {
this.updateTexture();
return super.setup( builder );
}
clone() {
return new this.constructor( this.passNode, this.textureName, this.previousTexture );
}
}
class PassNode extends TempNode {
static get type() {
return 'PassNode';
}
constructor( scope, scene, camera, options = {} ) {
super( 'vec4' );
this.scope = scope;
this.scene = scene;
this.camera = camera;
this.options = options;
this._pixelRatio = 1;
this._width = 1;
this._height = 1;
const depthTexture = new DepthTexture();
depthTexture.isRenderTargetTexture = true;
//depthTexture.type = FloatType;
depthTexture.name = 'depth';
const renderTarget = new RenderTarget( this._width * this._pixelRatio, this._height * this._pixelRatio, { type: HalfFloatType, ...options, } );
renderTarget.texture.name = 'output';
renderTarget.depthTexture = depthTexture;
this.renderTarget = renderTarget;
this.updateBeforeType = NodeUpdateType.FRAME;
this._textures = {
output: renderTarget.texture,
depth: depthTexture
};
this._textureNodes = {};
this._linearDepthNodes = {};
this._viewZNodes = {};
this._previousTextures = {};
this._previousTextureNodes = {};
this._cameraNear = uniform( 0 );
this._cameraFar = uniform( 0 );
this._mrt = null;
this.isPassNode = true;
}
setMRT( mrt ) {
this._mrt = mrt;
return this;
}
getMRT() {
return this._mrt;
}
isGlobal() {
return true;
}
getTexture( name ) {
let texture = this._textures[ name ];
if ( texture === undefined ) {
const refTexture = this.renderTarget.texture;
texture = refTexture.clone();
texture.isRenderTargetTexture = true;
texture.name = name;
this._textures[ name ] = texture;
this.renderTarget.textures.push( texture );
}
return texture;
}
getPreviousTexture( name ) {
let texture = this._previousTextures[ name ];
if ( texture === undefined ) {
texture = this.getTexture( name ).clone();
texture.isRenderTargetTexture = true;
this._previousTextures[ name ] = texture;
}
return texture;
}
toggleTexture( name ) {
const prevTexture = this._previousTextures[ name ];
if ( prevTexture !== undefined ) {
const texture = this._textures[ name ];
const index = this.renderTarget.textures.indexOf( texture );
this.renderTarget.textures[ index ] = prevTexture;
this._textures[ name ] = prevTexture;
this._previousTextures[ name ] = texture;
this._textureNodes[ name ].updateTexture();
this._previousTextureNodes[ name ].updateTexture();
}
}
getTextureNode( name = 'output' ) {
let textureNode = this._textureNodes[ name ];
if ( textureNode === undefined ) {
textureNode = nodeObject( new PassMultipleTextureNode( this, name ) );
textureNode.updateTexture();
this._textureNodes[ name ] = textureNode;
}
return textureNode;
}
getPreviousTextureNode( name = 'output' ) {
let textureNode = this._previousTextureNodes[ name ];
if ( textureNode === undefined ) {
if ( this._textureNodes[ name ] === undefined ) this.getTextureNode( name );
textureNode = nodeObject( new PassMultipleTextureNode( this, name, true ) );
textureNode.updateTexture();
this._previousTextureNodes[ name ] = textureNode;
}
return textureNode;
}
getViewZNode( name = 'depth' ) {
let viewZNode = this._viewZNodes[ name ];
if ( viewZNode === undefined ) {
const cameraNear = this._cameraNear;
const cameraFar = this._cameraFar;
this._viewZNodes[ name ] = viewZNode = perspectiveDepthToViewZ( this.getTextureNode( name ), cameraNear, cameraFar );
}
return viewZNode;
}
getLinearDepthNode( name = 'depth' ) {
let linearDepthNode = this._linearDepthNodes[ name ];
if ( linearDepthNode === undefined ) {
const cameraNear = this._cameraNear;
const cameraFar = this._cameraFar;
const viewZNode = this.getViewZNode( name );
// TODO: just if ( builder.camera.isPerspectiveCamera )
this._linearDepthNodes[ name ] = linearDepthNode = viewZToOrthographicDepth( viewZNode, cameraNear, cameraFar );
}
return linearDepthNode;
}
setup( { renderer } ) {
this.renderTarget.samples = this.options.samples === undefined ? renderer.samples : this.options.samples;
// Disable MSAA for WebGL backend for now
if ( renderer.backend.isWebGLBackend === true ) {
this.renderTarget.samples = 0;
}
this.renderTarget.depthTexture.isMultisampleRenderTargetTexture = this.renderTarget.samples > 1;
return this.scope === PassNode.COLOR ? this.getTextureNode() : this.getLinearDepthNode();
}
updateBefore( frame ) {
const { renderer } = frame;
const { scene, camera } = this;
this._pixelRatio = renderer.getPixelRatio();
const size = renderer.getSize( _size );
this.setSize( size.width, size.height );
const currentRenderTarget = renderer.getRenderTarget();
const currentMRT = renderer.getMRT();
this._cameraNear.value = camera.near;
this._cameraFar.value = camera.far;
for ( const name in this._previousTextures ) {
this.toggleTexture( name );
}
renderer.setRenderTarget( this.renderTarget );
renderer.setMRT( this._mrt );
renderer.render( scene, camera );
renderer.setRenderTarget( currentRenderTarget );
renderer.setMRT( currentMRT );
}
setSize( width, height ) {
this._width = width;
this._height = height;
const effectiveWidth = this._width * this._pixelRatio;
const effectiveHeight = this._height * this._pixelRatio;
this.renderTarget.setSize( effectiveWidth, effectiveHeight );
}
setPixelRatio( pixelRatio ) {
this._pixelRatio = pixelRatio;
this.setSize( this._width, this._height );
}
dispose() {
this.renderTarget.dispose();
}
}
PassNode.COLOR = 'color';
PassNode.DEPTH = 'depth';
export default PassNode;
export const pass = ( scene, camera, options ) => nodeObject( new PassNode( PassNode.COLOR, scene, camera, options ) );
export const passTexture = ( pass, texture ) => nodeObject( new PassTextureNode( pass, texture ) );
export const depthPass = ( scene, camera ) => nodeObject( new PassNode( PassNode.DEPTH, scene, camera ) );

View File

@@ -0,0 +1,33 @@
import TempNode from '../core/TempNode.js';
import { nodeProxy } from '../tsl/TSLBase.js';
class PosterizeNode extends TempNode {
static get type() {
return 'PosterizeNode';
}
constructor( sourceNode, stepsNode ) {
super();
this.sourceNode = sourceNode;
this.stepsNode = stepsNode;
}
setup() {
const { sourceNode, stepsNode } = this;
return sourceNode.mul( stepsNode ).floor().div( stepsNode );
}
}
export default PosterizeNode;
export const posterize = /*@__PURE__*/ nodeProxy( PosterizeNode );

View File

@@ -0,0 +1,60 @@
import TempNode from '../core/TempNode.js';
import { addMethodChaining, nodeObject } from '../tsl/TSLCore.js';
import { NoColorSpace, NoToneMapping } from '../../constants.js';
import { ColorManagement } from '../../math/ColorManagement.js';
class RenderOutputNode extends TempNode {
static get type() {
return 'RenderOutputNode';
}
constructor( colorNode, toneMapping, outputColorSpace ) {
super( 'vec4' );
this.colorNode = colorNode;
this.toneMapping = toneMapping;
this.outputColorSpace = outputColorSpace;
this.isRenderOutput = true;
}
setup( { context } ) {
let outputNode = this.colorNode || context.color;
// tone mapping
const toneMapping = ( this.toneMapping !== null ? this.toneMapping : context.toneMapping ) || NoToneMapping;
const outputColorSpace = ( this.outputColorSpace !== null ? this.outputColorSpace : context.outputColorSpace ) || NoColorSpace;
if ( toneMapping !== NoToneMapping ) {
outputNode = outputNode.toneMapping( toneMapping );
}
// working to output color space
if ( outputColorSpace !== NoColorSpace && outputColorSpace !== ColorManagement.workingColorSpace ) {
outputNode = outputNode.workingToColorSpace( outputColorSpace );
}
return outputNode;
}
}
export default RenderOutputNode;
export const renderOutput = ( color, toneMapping = null, outputColorSpace = null ) => nodeObject( new RenderOutputNode( nodeObject( color ), toneMapping, outputColorSpace ) );
addMethodChaining( 'renderOutput', renderOutput );

View File

@@ -0,0 +1,181 @@
import Node from '../core/Node.js';
import { NodeUpdateType } from '../core/constants.js';
import { uniform } from '../core/UniformNode.js';
import { Fn, nodeImmutable, vec2 } from '../tsl/TSLBase.js';
import { Vector2 } from '../../math/Vector2.js';
import { Vector4 } from '../../math/Vector4.js';
let screenSizeVec, viewportVec;
class ScreenNode extends Node {
static get type() {
return 'ScreenNode';
}
constructor( scope ) {
super();
this.scope = scope;
this.isViewportNode = true;
}
getNodeType() {
if ( this.scope === ScreenNode.VIEWPORT ) return 'vec4';
else return 'vec2';
}
getUpdateType() {
let updateType = NodeUpdateType.NONE;
if ( this.scope === ScreenNode.SIZE || this.scope === ScreenNode.VIEWPORT ) {
updateType = NodeUpdateType.RENDER;
}
this.updateType = updateType;
return updateType;
}
update( { renderer } ) {
const renderTarget = renderer.getRenderTarget();
if ( this.scope === ScreenNode.VIEWPORT ) {
if ( renderTarget !== null ) {
viewportVec.copy( renderTarget.viewport );
} else {
renderer.getViewport( viewportVec );
viewportVec.multiplyScalar( renderer.getPixelRatio() );
}
} else {
if ( renderTarget !== null ) {
screenSizeVec.width = renderTarget.width;
screenSizeVec.height = renderTarget.height;
} else {
renderer.getDrawingBufferSize( screenSizeVec );
}
}
}
setup( /*builder*/ ) {
const scope = this.scope;
let output = null;
if ( scope === ScreenNode.SIZE ) {
output = uniform( screenSizeVec || ( screenSizeVec = new Vector2() ) );
} else if ( scope === ScreenNode.VIEWPORT ) {
output = uniform( viewportVec || ( viewportVec = new Vector4() ) );
} else {
output = vec2( screenCoordinate.div( screenSize ) );
}
return output;
}
generate( builder ) {
if ( this.scope === ScreenNode.COORDINATE ) {
let coord = builder.getFragCoord();
if ( builder.isFlipY() ) {
// follow webgpu standards
const size = builder.getNodeProperties( screenSize ).outputNode.build( builder );
coord = `${ builder.getType( 'vec2' ) }( ${ coord }.x, ${ size }.y - ${ coord }.y )`;
}
return coord;
}
return super.generate( builder );
}
}
ScreenNode.COORDINATE = 'coordinate';
ScreenNode.VIEWPORT = 'viewport';
ScreenNode.SIZE = 'size';
ScreenNode.UV = 'uv';
export default ScreenNode;
// Screen
export const screenUV = /*@__PURE__*/ nodeImmutable( ScreenNode, ScreenNode.UV );
export const screenSize = /*@__PURE__*/ nodeImmutable( ScreenNode, ScreenNode.SIZE );
export const screenCoordinate = /*@__PURE__*/ nodeImmutable( ScreenNode, ScreenNode.COORDINATE );
// Viewport
export const viewport = /*@__PURE__*/ nodeImmutable( ScreenNode, ScreenNode.VIEWPORT );
export const viewportSize = viewport.zw;
export const viewportCoordinate = /*@__PURE__*/ screenCoordinate.sub( viewport.xy );
export const viewportUV = /*@__PURE__*/ viewportCoordinate.div( viewportSize );
// Deprecated
export const viewportResolution = /*@__PURE__*/ ( Fn( () => { // @deprecated, r169
console.warn( 'TSL.ViewportNode: "viewportResolution" is deprecated. Use "screenSize" instead.' );
return screenSize;
}, 'vec2' ).once() )();
export const viewportTopLeft = /*@__PURE__*/ ( Fn( () => { // @deprecated, r168
console.warn( 'TSL.ViewportNode: "viewportTopLeft" is deprecated. Use "screenUV" instead.' );
return screenUV;
}, 'vec2' ).once() )();
export const viewportBottomLeft = /*@__PURE__*/ ( Fn( () => { // @deprecated, r168
console.warn( 'TSL.ViewportNode: "viewportBottomLeft" is deprecated. Use "screenUV.flipY()" instead.' );
return screenUV.flipY();
}, 'vec2' ).once() )();

View File

@@ -0,0 +1,190 @@
import { Fn, float, mat3, vec3, If } from '../tsl/TSLBase.js';
import { select } from '../math/ConditionalNode.js';
import { clamp, log2, max, min, pow, mix } from '../math/MathNode.js';
import { mul, sub, div } from '../math/OperatorNode.js';
// exposure only
export const linearToneMapping = /*@__PURE__*/ Fn( ( [ color, exposure ] ) => {
return color.mul( exposure ).clamp();
} ).setLayout( {
name: 'linearToneMapping',
type: 'vec3',
inputs: [
{ name: 'color', type: 'vec3' },
{ name: 'exposure', type: 'float' }
]
} );
// source: https://www.cs.utah.edu/docs/techreports/2002/pdf/UUCS-02-001.pdf
export const reinhardToneMapping = /*@__PURE__*/ Fn( ( [ color, exposure ] ) => {
color = color.mul( exposure );
return color.div( color.add( 1.0 ) ).clamp();
} ).setLayout( {
name: 'reinhardToneMapping',
type: 'vec3',
inputs: [
{ name: 'color', type: 'vec3' },
{ name: 'exposure', type: 'float' }
]
} );
// source: http://filmicworlds.com/blog/filmic-tonemapping-operators/
export const cineonToneMapping = /*@__PURE__*/ Fn( ( [ color, exposure ] ) => {
// filmic operator by Jim Hejl and Richard Burgess-Dawson
color = color.mul( exposure );
color = color.sub( 0.004 ).max( 0.0 );
const a = color.mul( color.mul( 6.2 ).add( 0.5 ) );
const b = color.mul( color.mul( 6.2 ).add( 1.7 ) ).add( 0.06 );
return a.div( b ).pow( 2.2 );
} ).setLayout( {
name: 'cineonToneMapping',
type: 'vec3',
inputs: [
{ name: 'color', type: 'vec3' },
{ name: 'exposure', type: 'float' }
]
} );
// source: https://github.com/selfshadow/ltc_code/blob/master/webgl/shaders/ltc/ltc_blit.fs
const RRTAndODTFit = /*@__PURE__*/ Fn( ( [ color ] ) => {
const a = color.mul( color.add( 0.0245786 ) ).sub( 0.000090537 );
const b = color.mul( color.add( 0.4329510 ).mul( 0.983729 ) ).add( 0.238081 );
return a.div( b );
} );
// source: https://github.com/selfshadow/ltc_code/blob/master/webgl/shaders/ltc/ltc_blit.fs
export const acesFilmicToneMapping = /*@__PURE__*/ Fn( ( [ color, exposure ] ) => {
// sRGB => XYZ => D65_2_D60 => AP1 => RRT_SAT
const ACESInputMat = mat3(
0.59719, 0.35458, 0.04823,
0.07600, 0.90834, 0.01566,
0.02840, 0.13383, 0.83777
);
// ODT_SAT => XYZ => D60_2_D65 => sRGB
const ACESOutputMat = mat3(
1.60475, - 0.53108, - 0.07367,
- 0.10208, 1.10813, - 0.00605,
- 0.00327, - 0.07276, 1.07602
);
color = color.mul( exposure ).div( 0.6 );
color = ACESInputMat.mul( color );
// Apply RRT and ODT
color = RRTAndODTFit( color );
color = ACESOutputMat.mul( color );
// Clamp to [0, 1]
return color.clamp();
} ).setLayout( {
name: 'acesFilmicToneMapping',
type: 'vec3',
inputs: [
{ name: 'color', type: 'vec3' },
{ name: 'exposure', type: 'float' }
]
} );
const LINEAR_REC2020_TO_LINEAR_SRGB = /*@__PURE__*/ mat3( vec3( 1.6605, - 0.1246, - 0.0182 ), vec3( - 0.5876, 1.1329, - 0.1006 ), vec3( - 0.0728, - 0.0083, 1.1187 ) );
const LINEAR_SRGB_TO_LINEAR_REC2020 = /*@__PURE__*/ mat3( vec3( 0.6274, 0.0691, 0.0164 ), vec3( 0.3293, 0.9195, 0.0880 ), vec3( 0.0433, 0.0113, 0.8956 ) );
const agxDefaultContrastApprox = /*@__PURE__*/ Fn( ( [ x_immutable ] ) => {
const x = vec3( x_immutable ).toVar();
const x2 = vec3( x.mul( x ) ).toVar();
const x4 = vec3( x2.mul( x2 ) ).toVar();
return float( 15.5 ).mul( x4.mul( x2 ) ).sub( mul( 40.14, x4.mul( x ) ) ).add( mul( 31.96, x4 ).sub( mul( 6.868, x2.mul( x ) ) ).add( mul( 0.4298, x2 ).add( mul( 0.1191, x ).sub( 0.00232 ) ) ) );
} );
export const agxToneMapping = /*@__PURE__*/ Fn( ( [ color, exposure ] ) => {
const colortone = vec3( color ).toVar();
const AgXInsetMatrix = mat3( vec3( 0.856627153315983, 0.137318972929847, 0.11189821299995 ), vec3( 0.0951212405381588, 0.761241990602591, 0.0767994186031903 ), vec3( 0.0482516061458583, 0.101439036467562, 0.811302368396859 ) );
const AgXOutsetMatrix = mat3( vec3( 1.1271005818144368, - 0.1413297634984383, - 0.14132976349843826 ), vec3( - 0.11060664309660323, 1.157823702216272, - 0.11060664309660294 ), vec3( - 0.016493938717834573, - 0.016493938717834257, 1.2519364065950405 ) );
const AgxMinEv = float( - 12.47393 );
const AgxMaxEv = float( 4.026069 );
colortone.mulAssign( exposure );
colortone.assign( LINEAR_SRGB_TO_LINEAR_REC2020.mul( colortone ) );
colortone.assign( AgXInsetMatrix.mul( colortone ) );
colortone.assign( max( colortone, 1e-10 ) );
colortone.assign( log2( colortone ) );
colortone.assign( colortone.sub( AgxMinEv ).div( AgxMaxEv.sub( AgxMinEv ) ) );
colortone.assign( clamp( colortone, 0.0, 1.0 ) );
colortone.assign( agxDefaultContrastApprox( colortone ) );
colortone.assign( AgXOutsetMatrix.mul( colortone ) );
colortone.assign( pow( max( vec3( 0.0 ), colortone ), vec3( 2.2 ) ) );
colortone.assign( LINEAR_REC2020_TO_LINEAR_SRGB.mul( colortone ) );
colortone.assign( clamp( colortone, 0.0, 1.0 ) );
return colortone;
} ).setLayout( {
name: 'agxToneMapping',
type: 'vec3',
inputs: [
{ name: 'color', type: 'vec3' },
{ name: 'exposure', type: 'float' }
]
} );
// https://modelviewer.dev/examples/tone-mapping
export const neutralToneMapping = /*@__PURE__*/ Fn( ( [ color, exposure ] ) => {
const StartCompression = float( 0.8 - 0.04 );
const Desaturation = float( 0.15 );
color = color.mul( exposure );
const x = min( color.r, min( color.g, color.b ) );
const offset = select( x.lessThan( 0.08 ), x.sub( mul( 6.25, x.mul( x ) ) ), 0.04 );
color.subAssign( offset );
const peak = max( color.r, max( color.g, color.b ) );
If( peak.lessThan( StartCompression ), () => {
return color;
} );
const d = sub( 1, StartCompression );
const newPeak = sub( 1, d.mul( d ).div( peak.add( d.sub( StartCompression ) ) ) );
color.mulAssign( newPeak.div( peak ) );
const g = sub( 1, div( 1, Desaturation.mul( peak.sub( newPeak ) ).add( 1 ) ) );
return mix( color, vec3( newPeak ), g );
} ).setLayout( {
name: 'neutralToneMapping',
type: 'vec3',
inputs: [
{ name: 'color', type: 'vec3' },
{ name: 'exposure', type: 'float' }
]
} );

View File

@@ -0,0 +1,67 @@
import TempNode from '../core/TempNode.js';
import { addMethodChaining, nodeObject, vec4 } from '../tsl/TSLCore.js';
import { rendererReference } from '../accessors/RendererReferenceNode.js';
import { NoToneMapping } from '../../constants.js';
import { hash } from '../core/NodeUtils.js';
class ToneMappingNode extends TempNode {
static get type() {
return 'ToneMappingNode';
}
constructor( toneMapping, exposureNode = toneMappingExposure, colorNode = null ) {
super( 'vec3' );
this.toneMapping = toneMapping;
this.exposureNode = exposureNode;
this.colorNode = colorNode;
}
getCacheKey() {
return hash( super.getCacheKey(), this.toneMapping );
}
setup( builder ) {
const colorNode = this.colorNode || builder.context.color;
const toneMapping = this.toneMapping;
if ( toneMapping === NoToneMapping ) return colorNode;
let outputNode = null;
const toneMappingFn = builder.renderer.library.getToneMappingFunction( toneMapping );
if ( toneMappingFn !== null ) {
outputNode = vec4( toneMappingFn( colorNode.rgb, this.exposureNode ), colorNode.a );
} else {
console.error( 'ToneMappingNode: Unsupported Tone Mapping configuration.', toneMapping );
outputNode = colorNode;
}
return outputNode;
}
}
export default ToneMappingNode;
export const toneMapping = ( mapping, exposure, color ) => nodeObject( new ToneMappingNode( mapping, nodeObject( exposure ), nodeObject( color ) ) );
export const toneMappingExposure = /*@__PURE__*/ rendererReference( 'toneMappingExposure', 'float' );
addMethodChaining( 'toneMapping', ( color, mapping, exposure ) => toneMapping( mapping, exposure, color ) );

View File

@@ -0,0 +1,111 @@
import { float, nodeObject, normalize, vec4 } from '../tsl/TSLBase.js';
import { Color } from '../../math/Color.js';
import NodeMaterial from '../../materials/nodes/NodeMaterial.js';
import { cameraProjectionMatrix } from '../../nodes/accessors/Camera.js';
import { modelViewMatrix } from '../../nodes/accessors/ModelNode.js';
import { positionLocal } from '../../nodes/accessors/Position.js';
import { normalLocal } from '../../nodes/accessors/Normal.js';
import { BackSide } from '../../constants.js';
import PassNode from './PassNode.js';
class ToonOutlinePassNode extends PassNode {
static get type() {
return 'ToonOutlinePassNode';
}
constructor( scene, camera, colorNode, thicknessNode, alphaNode ) {
super( PassNode.COLOR, scene, camera );
this.colorNode = colorNode;
this.thicknessNode = thicknessNode;
this.alphaNode = alphaNode;
this._materialCache = new WeakMap();
}
updateBefore( frame ) {
const { renderer } = frame;
const currentRenderObjectFunction = renderer.getRenderObjectFunction();
renderer.setRenderObjectFunction( ( object, scene, camera, geometry, material, group, lightsNode ) => {
// only render outline for supported materials
if ( material.isMeshToonMaterial || material.isMeshToonNodeMaterial ) {
if ( material.wireframe === false ) {
const outlineMaterial = this._getOutlineMaterial( material );
renderer.renderObject( object, scene, camera, geometry, outlineMaterial, group, lightsNode );
}
}
// default
renderer.renderObject( object, scene, camera, geometry, material, group, lightsNode );
} );
super.updateBefore( frame );
renderer.setRenderObjectFunction( currentRenderObjectFunction );
}
_createMaterial() {
const material = new NodeMaterial();
material.isMeshToonOutlineMaterial = true;
material.name = 'Toon_Outline';
material.side = BackSide;
// vertex node
const outlineNormal = normalLocal.negate();
const mvp = cameraProjectionMatrix.mul( modelViewMatrix );
const ratio = float( 1.0 ); // TODO: support outline thickness ratio for each vertex
const pos = mvp.mul( vec4( positionLocal, 1.0 ) );
const pos2 = mvp.mul( vec4( positionLocal.add( outlineNormal ), 1.0 ) );
const norm = normalize( pos.sub( pos2 ) ); // NOTE: subtract pos2 from pos because BackSide objectNormal is negative
material.vertexNode = pos.add( norm.mul( this.thicknessNode ).mul( pos.w ).mul( ratio ) );
// color node
material.colorNode = vec4( this.colorNode, this.alphaNode );
return material;
}
_getOutlineMaterial( originalMaterial ) {
let outlineMaterial = this._materialCache.get( originalMaterial );
if ( outlineMaterial === undefined ) {
outlineMaterial = this._createMaterial();
this._materialCache.set( originalMaterial, outlineMaterial );
}
return outlineMaterial;
}
}
export default ToonOutlinePassNode;
export const toonOutlinePass = ( scene, camera, color = new Color( 0, 0, 0 ), thickness = 0.003, alpha = 1 ) => nodeObject( new ToonOutlinePassNode( scene, camera, nodeObject( color ), nodeObject( thickness ), nodeObject( alpha ) ) );

View File

@@ -0,0 +1,157 @@
import Node from '../core/Node.js';
import { log2, nodeImmutable, nodeProxy } from '../tsl/TSLBase.js';
import { cameraNear, cameraFar } from '../accessors/Camera.js';
import { positionView } from '../accessors/Position.js';
import { viewportDepthTexture } from './ViewportDepthTextureNode.js';
class ViewportDepthNode extends Node {
static get type() {
return 'ViewportDepthNode';
}
constructor( scope, valueNode = null ) {
super( 'float' );
this.scope = scope;
this.valueNode = valueNode;
this.isViewportDepthNode = true;
}
generate( builder ) {
const { scope } = this;
if ( scope === ViewportDepthNode.DEPTH_BASE ) {
return builder.getFragDepth();
}
return super.generate( builder );
}
setup( { camera } ) {
const { scope } = this;
const value = this.valueNode;
let node = null;
if ( scope === ViewportDepthNode.DEPTH_BASE ) {
if ( value !== null ) {
node = depthBase().assign( value );
}
} else if ( scope === ViewportDepthNode.DEPTH ) {
if ( camera.isPerspectiveCamera ) {
node = viewZToPerspectiveDepth( positionView.z, cameraNear, cameraFar );
} else {
node = viewZToOrthographicDepth( positionView.z, cameraNear, cameraFar );
}
} else if ( scope === ViewportDepthNode.LINEAR_DEPTH ) {
if ( value !== null ) {
if ( camera.isPerspectiveCamera ) {
const viewZ = perspectiveDepthToViewZ( value, cameraNear, cameraFar );
node = viewZToOrthographicDepth( viewZ, cameraNear, cameraFar );
} else {
node = value;
}
} else {
node = viewZToOrthographicDepth( positionView.z, cameraNear, cameraFar );
}
}
return node;
}
}
ViewportDepthNode.DEPTH_BASE = 'depthBase';
ViewportDepthNode.DEPTH = 'depth';
ViewportDepthNode.LINEAR_DEPTH = 'linearDepth';
export default ViewportDepthNode;
// NOTE: viewZ, the z-coordinate in camera space, is negative for points in front of the camera
// -near maps to 0; -far maps to 1
export const viewZToOrthographicDepth = ( viewZ, near, far ) => viewZ.add( near ).div( near.sub( far ) );
// maps orthographic depth in [ 0, 1 ] to viewZ
export const orthographicDepthToViewZ = ( depth, near, far ) => near.sub( far ).mul( depth ).sub( near );
// NOTE: https://twitter.com/gonnavis/status/1377183786949959682
// -near maps to 0; -far maps to 1
export const viewZToPerspectiveDepth = ( viewZ, near, far ) => near.add( viewZ ).mul( far ).div( far.sub( near ).mul( viewZ ) );
// maps perspective depth in [ 0, 1 ] to viewZ
export const perspectiveDepthToViewZ = ( depth, near, far ) => near.mul( far ).div( far.sub( near ).mul( depth ).sub( far ) );
export const perspectiveDepthToLogarithmicDepth = ( perspectiveW, near, far ) => {
// The final logarithmic depth formula used here is adapted from one described in an
// article by Thatcher Ulrich (see http://tulrich.com/geekstuff/log_depth_buffer.txt),
// which was an improvement upon an earlier formula one described in an
// Outerra article (https://outerra.blogspot.com/2009/08/logarithmic-z-buffer.html).
// Ulrich's formula is the following:
// z = K * log( w / cameraNear ) / log( cameraFar / cameraNear )
// where K = 2^k - 1, and k is the number of bits in the depth buffer.
// The Outerra variant ignored the camera near plane (it assumed it was 0) and instead
// opted for a "C-constant" for resolution adjustment of objects near the camera.
// Outerra states: "Notice that the 'C' variant doesnt use a near plane distance, it has it
// set at 0" (quote from https://outerra.blogspot.com/2012/11/maximizing-depth-buffer-range-and.html).
// Ulrich's variant has the benefit of constant relative precision over the whole near-far range.
// It was debated here whether Outerra's "C-constant" or Ulrich's "near plane" variant should
// be used, and ultimately Ulrich's "near plane" version was chosen.
// Outerra eventually made another improvement to their original "C-constant" variant,
// but it still does not incorporate the camera near plane (for this version,
// see https://outerra.blogspot.com/2013/07/logarithmic-depth-buffer-optimizations.html).
// Here we make 4 changes to Ulrich's formula:
// 1. Clamp the camera near plane so we don't divide by 0.
// 2. Use log2 instead of log to avoid an extra multiply (shaders implement log using log2).
// 3. Assume K is 1 (K = maximum value in depth buffer; see Ulrich's formula above).
// 4. Add 1 to each division by cameraNear to ensure the depth curve is shifted to the left as cameraNear increases.
// For visual representation of this depth curve, see https://www.desmos.com/calculator/lz5rqfysih
near = near.max( 1e-6 ).toVar();
const numerator = log2( perspectiveW.div( near ).add( 1 ) );
const denominator = log2( far.div( near ).add( 1 ) );
return numerator.div( denominator );
};
const depthBase = /*@__PURE__*/ nodeProxy( ViewportDepthNode, ViewportDepthNode.DEPTH_BASE );
export const depth = /*@__PURE__*/ nodeImmutable( ViewportDepthNode, ViewportDepthNode.DEPTH );
export const linearDepth = /*@__PURE__*/ nodeProxy( ViewportDepthNode, ViewportDepthNode.LINEAR_DEPTH );
export const viewportLinearDepth = /*@__PURE__*/ linearDepth( viewportDepthTexture() );
depth.assign = ( value ) => depthBase( value );

View File

@@ -0,0 +1,33 @@
import ViewportTextureNode from './ViewportTextureNode.js';
import { nodeProxy } from '../tsl/TSLBase.js';
import { screenUV } from './ScreenNode.js';
import { DepthTexture } from '../../textures/DepthTexture.js';
let sharedDepthbuffer = null;
class ViewportDepthTextureNode extends ViewportTextureNode {
static get type() {
return 'ViewportDepthTextureNode';
}
constructor( uvNode = screenUV, levelNode = null ) {
if ( sharedDepthbuffer === null ) {
sharedDepthbuffer = new DepthTexture();
}
super( uvNode, levelNode, sharedDepthbuffer );
}
}
export default ViewportDepthTextureNode;
export const viewportDepthTexture = /*@__PURE__*/ nodeProxy( ViewportDepthTextureNode );

View File

@@ -0,0 +1,39 @@
import ViewportTextureNode from './ViewportTextureNode.js';
import { nodeProxy } from '../tsl/TSLBase.js';
import { screenUV } from './ScreenNode.js';
import { FramebufferTexture } from '../../textures/FramebufferTexture.js';
let _sharedFramebuffer = null;
class ViewportSharedTextureNode extends ViewportTextureNode {
static get type() {
return 'ViewportSharedTextureNode';
}
constructor( uvNode = screenUV, levelNode = null ) {
if ( _sharedFramebuffer === null ) {
_sharedFramebuffer = new FramebufferTexture();
}
super( uvNode, levelNode, _sharedFramebuffer );
}
updateReference() {
return this;
}
}
export default ViewportSharedTextureNode;
export const viewportSharedTexture = /*@__PURE__*/ nodeProxy( ViewportSharedTextureNode );

View File

@@ -0,0 +1,81 @@
import TextureNode from '../accessors/TextureNode.js';
import { NodeUpdateType } from '../core/constants.js';
import { nodeProxy } from '../tsl/TSLBase.js';
import { screenUV } from './ScreenNode.js';
import { Vector2 } from '../../math/Vector2.js';
import { FramebufferTexture } from '../../textures/FramebufferTexture.js';
import { LinearMipmapLinearFilter } from '../../constants.js';
const _size = /*@__PURE__*/ new Vector2();
class ViewportTextureNode extends TextureNode {
static get type() {
return 'ViewportTextureNode';
}
constructor( uvNode = screenUV, levelNode = null, framebufferTexture = null ) {
if ( framebufferTexture === null ) {
framebufferTexture = new FramebufferTexture();
framebufferTexture.minFilter = LinearMipmapLinearFilter;
}
super( framebufferTexture, uvNode, levelNode );
this.generateMipmaps = false;
this.isOutputTextureNode = true;
this.updateBeforeType = NodeUpdateType.FRAME;
}
updateBefore( frame ) {
const renderer = frame.renderer;
renderer.getDrawingBufferSize( _size );
//
const framebufferTexture = this.value;
if ( framebufferTexture.image.width !== _size.width || framebufferTexture.image.height !== _size.height ) {
framebufferTexture.image.width = _size.width;
framebufferTexture.image.height = _size.height;
framebufferTexture.needsUpdate = true;
}
//
const currentGenerateMipmaps = framebufferTexture.generateMipmaps;
framebufferTexture.generateMipmaps = this.generateMipmaps;
renderer.copyFramebufferToTexture( framebufferTexture );
framebufferTexture.generateMipmaps = currentGenerateMipmaps;
}
clone() {
const viewportTextureNode = new this.constructor( this.uvNode, this.levelNode, this.value );
viewportTextureNode.generateMipmaps = this.generateMipmaps;
return viewportTextureNode;
}
}
export default ViewportTextureNode;
export const viewportTexture = /*@__PURE__*/ nodeProxy( ViewportTextureNode );
export const viewportMipTexture = /*@__PURE__*/ nodeProxy( ViewportTextureNode, null, null, { generateMipmaps: true } );