/** * @license * Copyright 2010-2025 Three.js Authors * SPDX-License-Identifier: MIT */ 'use strict'; const REVISION = '175'; /** * Represents mouse buttons and interaction types in context of controls. * * @type {ConstantsMouse} * @constant */ const MOUSE = { LEFT: 0, MIDDLE: 1, RIGHT: 2, ROTATE: 0, DOLLY: 1, PAN: 2 }; /** * Represents touch interaction types in context of controls. * * @type {ConstantsTouch} * @constant */ const TOUCH = { ROTATE: 0, PAN: 1, DOLLY_PAN: 2, DOLLY_ROTATE: 3 }; /** * Disables face culling. * * @type {number} * @constant */ const CullFaceNone = 0; /** * Culls back faces. * * @type {number} * @constant */ const CullFaceBack = 1; /** * Culls front faces. * * @type {number} * @constant */ const CullFaceFront = 2; /** * Culls both front and back faces. * * @type {number} * @constant */ const CullFaceFrontBack = 3; /** * Gives unfiltered shadow maps - fastest, but lowest quality. * * @type {number} * @constant */ const BasicShadowMap = 0; /** * Filters shadow maps using the Percentage-Closer Filtering (PCF) algorithm. * * @type {number} * @constant */ const PCFShadowMap = 1; /** * Filters shadow maps using the Percentage-Closer Filtering (PCF) algorithm with * better soft shadows especially when using low-resolution shadow maps. * * @type {number} * @constant */ const PCFSoftShadowMap = 2; /** * Filters shadow maps using the Variance Shadow Map (VSM) algorithm. * When using VSMShadowMap all shadow receivers will also cast shadows. * * @type {number} * @constant */ const VSMShadowMap = 3; /** * Only front faces are rendered. * * @type {number} * @constant */ const FrontSide = 0; /** * Only back faces are rendered. * * @type {number} * @constant */ const BackSide = 1; /** * Both front and back faces are rendered. * * @type {number} * @constant */ const DoubleSide = 2; /** * No blending is performed which effectively disables * alpha transparency. * * @type {number} * @constant */ const NoBlending = 0; /** * The default blending. * * @type {number} * @constant */ const NormalBlending = 1; /** * Represents additive blending. * * @type {number} * @constant */ const AdditiveBlending = 2; /** * Represents subtractive blending. * * @type {number} * @constant */ const SubtractiveBlending = 3; /** * Represents multiply blending. * * @type {number} * @constant */ const MultiplyBlending = 4; /** * Represents custom blending. * * @type {number} * @constant */ const CustomBlending = 5; /** * A `source + destination` blending equation. * * @type {number} * @constant */ const AddEquation = 100; /** * A `source - destination` blending equation. * * @type {number} * @constant */ const SubtractEquation = 101; /** * A `destination - source` blending equation. * * @type {number} * @constant */ const ReverseSubtractEquation = 102; /** * A blend equation that uses the minimum of source and destination. * * @type {number} * @constant */ const MinEquation = 103; /** * A blend equation that uses the maximum of source and destination. * * @type {number} * @constant */ const MaxEquation = 104; /** * Multiplies all colors by `0`. * * @type {number} * @constant */ const ZeroFactor = 200; /** * Multiplies all colors by `1`. * * @type {number} * @constant */ const OneFactor = 201; /** * Multiplies all colors by the source colors. * * @type {number} * @constant */ const SrcColorFactor = 202; /** * Multiplies all colors by `1` minus each source color. * * @type {number} * @constant */ const OneMinusSrcColorFactor = 203; /** * Multiplies all colors by the source alpha value. * * @type {number} * @constant */ const SrcAlphaFactor = 204; /** * Multiplies all colors by 1 minus the source alpha value. * * @type {number} * @constant */ const OneMinusSrcAlphaFactor = 205; /** * Multiplies all colors by the destination alpha value. * * @type {number} * @constant */ const DstAlphaFactor = 206; /** * Multiplies all colors by `1` minus the destination alpha value. * * @type {number} * @constant */ const OneMinusDstAlphaFactor = 207; /** * Multiplies all colors by the destination color. * * @type {number} * @constant */ const DstColorFactor = 208; /** * Multiplies all colors by `1` minus each destination color. * * @type {number} * @constant */ const OneMinusDstColorFactor = 209; /** * Multiplies the RGB colors by the smaller of either the source alpha * value or the value of `1` minus the destination alpha value. The alpha * value is multiplied by `1`. * * @type {number} * @constant */ const SrcAlphaSaturateFactor = 210; /** * Multiplies all colors by a constant color. * * @type {number} * @constant */ const ConstantColorFactor = 211; /** * Multiplies all colors by `1` minus a constant color. * * @type {number} * @constant */ const OneMinusConstantColorFactor = 212; /** * Multiplies all colors by a constant alpha value. * * @type {number} * @constant */ const ConstantAlphaFactor = 213; /** * Multiplies all colors by 1 minus a constant alpha value. * * @type {number} * @constant */ const OneMinusConstantAlphaFactor = 214; /** * Never pass. * * @type {number} * @constant */ const NeverDepth = 0; /** * Always pass. * * @type {number} * @constant */ const AlwaysDepth = 1; /** * Pass if the incoming value is less than the depth buffer value. * * @type {number} * @constant */ const LessDepth = 2; /** * Pass if the incoming value is less than or equal to the depth buffer value. * * @type {number} * @constant */ const LessEqualDepth = 3; /** * Pass if the incoming value equals the depth buffer value. * * @type {number} * @constant */ const EqualDepth = 4; /** * Pass if the incoming value is greater than or equal to the depth buffer value. * * @type {number} * @constant */ const GreaterEqualDepth = 5; /** * Pass if the incoming value is greater than the depth buffer value. * * @type {number} * @constant */ const GreaterDepth = 6; /** * Pass if the incoming value is not equal to the depth buffer value. * * @type {number} * @constant */ const NotEqualDepth = 7; /** * Multiplies the environment map color with the surface color. * * @type {number} * @constant */ const MultiplyOperation = 0; /** * Uses reflectivity to blend between the two colors. * * @type {number} * @constant */ const MixOperation = 1; /** * Adds the two colors. * * @type {number} * @constant */ const AddOperation = 2; /** * No tone mapping is applied. * * @type {number} * @constant */ const NoToneMapping = 0; /** * Linear tone mapping. * * @type {number} * @constant */ const LinearToneMapping = 1; /** * Reinhard tone mapping. * * @type {number} * @constant */ const ReinhardToneMapping = 2; /** * Cineon tone mapping. * * @type {number} * @constant */ const CineonToneMapping = 3; /** * ACES Filmic tone mapping. * * @type {number} * @constant */ const ACESFilmicToneMapping = 4; /** * Custom tone mapping. * * Expects a custom implementation by modifying shader code of the material's fragment shader. * * @type {number} * @constant */ const CustomToneMapping = 5; /** * AgX tone mapping. * * @type {number} * @constant */ const AgXToneMapping = 6; /** * Neutral tone mapping. * * Implementation based on the Khronos 3D Commerce Group standard tone mapping. * * @type {number} * @constant */ const NeutralToneMapping = 7; /** * The skinned mesh shares the same world space as the skeleton. * * @type {string} * @constant */ const AttachedBindMode = 'attached'; /** * The skinned mesh does not share the same world space as the skeleton. * This is useful when a skeleton is shared across multiple skinned meshes. * * @type {string} * @constant */ const DetachedBindMode = 'detached'; /** * Maps textures using the geometry's UV coordinates. * * @type {number} * @constant */ const UVMapping = 300; /** * Reflection mapping for cube textures. * * @type {number} * @constant */ const CubeReflectionMapping = 301; /** * Refraction mapping for cube textures. * * @type {number} * @constant */ const CubeRefractionMapping = 302; /** * Reflection mapping for equirectangular textures. * * @type {number} * @constant */ const EquirectangularReflectionMapping = 303; /** * Refraction mapping for equirectangular textures. * * @type {number} * @constant */ const EquirectangularRefractionMapping = 304; /** * Reflection mapping for PMREM textures. * * @type {number} * @constant */ const CubeUVReflectionMapping = 306; /** * The texture will simply repeat to infinity. * * @type {number} * @constant */ const RepeatWrapping = 1000; /** * The last pixel of the texture stretches to the edge of the mesh. * * @type {number} * @constant */ const ClampToEdgeWrapping = 1001; /** * The texture will repeats to infinity, mirroring on each repeat. * * @type {number} * @constant */ const MirroredRepeatWrapping = 1002; /** * Returns the value of the texture element that is nearest (in Manhattan distance) * to the specified texture coordinates. * * @type {number} * @constant */ const NearestFilter = 1003; /** * Chooses the mipmap that most closely matches the size of the pixel being textured * and uses the `NearestFilter` criterion (the texel nearest to the center of the pixel) * to produce a texture value. * * @type {number} * @constant */ const NearestMipmapNearestFilter = 1004; const NearestMipMapNearestFilter = 1004; // legacy /** * Chooses the two mipmaps that most closely match the size of the pixel being textured and * uses the `NearestFilter` criterion to produce a texture value from each mipmap. * The final texture value is a weighted average of those two values. * * @type {number} * @constant */ const NearestMipmapLinearFilter = 1005; const NearestMipMapLinearFilter = 1005; // legacy /** * Returns the weighted average of the four texture elements that are closest to the specified * texture coordinates, and can include items wrapped or repeated from other parts of a texture, * depending on the values of `wrapS` and `wrapT`, and on the exact mapping. * * @type {number} * @constant */ const LinearFilter = 1006; /** * Chooses the mipmap that most closely matches the size of the pixel being textured and uses * the `LinearFilter` criterion (a weighted average of the four texels that are closest to the * center of the pixel) to produce a texture value. * * @type {number} * @constant */ const LinearMipmapNearestFilter = 1007; const LinearMipMapNearestFilter = 1007; // legacy /** * Chooses the two mipmaps that most closely match the size of the pixel being textured and uses * the `LinearFilter` criterion to produce a texture value from each mipmap. The final texture value * is a weighted average of those two values. * * @type {number} * @constant */ const LinearMipmapLinearFilter = 1008; const LinearMipMapLinearFilter = 1008; // legacy /** * An unsigned byte data type for textures. * * @type {number} * @constant */ const UnsignedByteType = 1009; /** * A byte data type for textures. * * @type {number} * @constant */ const ByteType = 1010; /** * A short data type for textures. * * @type {number} * @constant */ const ShortType = 1011; /** * An unsigned short data type for textures. * * @type {number} * @constant */ const UnsignedShortType = 1012; /** * An int data type for textures. * * @type {number} * @constant */ const IntType = 1013; /** * An unsigned int data type for textures. * * @type {number} * @constant */ const UnsignedIntType = 1014; /** * A float data type for textures. * * @type {number} * @constant */ const FloatType = 1015; /** * A half float data type for textures. * * @type {number} * @constant */ const HalfFloatType = 1016; /** * An unsigned short 4_4_4_4 (packed) data type for textures. * * @type {number} * @constant */ const UnsignedShort4444Type = 1017; /** * An unsigned short 5_5_5_1 (packed) data type for textures. * * @type {number} * @constant */ const UnsignedShort5551Type = 1018; /** * An unsigned int 24_8 data type for textures. * * @type {number} * @constant */ const UnsignedInt248Type = 1020; /** * An unsigned int 5_9_9_9 (packed) data type for textures. * * @type {number} * @constant */ const UnsignedInt5999Type = 35902; /** * Discards the red, green and blue components and reads just the alpha component. * * @type {number} * @constant */ const AlphaFormat = 1021; /** * Discards the alpha component and reads the red, green and blue component. * * @type {number} * @constant */ const RGBFormat = 1022; /** * Reads the red, green, blue and alpha components. * * @type {number} * @constant */ const RGBAFormat = 1023; /** * reads each element as a single luminance component. This is then converted to a floating point, * clamped to the range `[0,1]`, and then assembled into an RGBA element by placing the luminance value * in the red, green and blue channels, and attaching 1.0 to the alpha channel. * * @type {number} * @constant */ const LuminanceFormat = 1024; /** * Reads each element as a luminance/alpha double. The same process occurs as for the `LuminanceFormat`, * except that the alpha channel may have values other than `1.0`. * * @type {number} * @constant */ const LuminanceAlphaFormat = 1025; /** * Reads each element as a single depth value, converts it to floating point, and clamps to the range `[0,1]`. * * @type {number} * @constant */ const DepthFormat = 1026; /** * Reads each element is a pair of depth and stencil values. The depth component of the pair is interpreted as * in `DepthFormat`. The stencil component is interpreted based on the depth + stencil internal format. * * @type {number} * @constant */ const DepthStencilFormat = 1027; /** * Discards the green, blue and alpha components and reads just the red component. * * @type {number} * @constant */ const RedFormat = 1028; /** * Discards the green, blue and alpha components and reads just the red component. The texels are read as integers instead of floating point. * * @type {number} * @constant */ const RedIntegerFormat = 1029; /** * Discards the alpha, and blue components and reads the red, and green components. * * @type {number} * @constant */ const RGFormat = 1030; /** * Discards the alpha, and blue components and reads the red, and green components. The texels are read as integers instead of floating point. * * @type {number} * @constant */ const RGIntegerFormat = 1031; /** * Discards the alpha component and reads the red, green and blue component. The texels are read as integers instead of floating point. * * @type {number} * @constant */ const RGBIntegerFormat = 1032; /** * Reads the red, green, blue and alpha components. The texels are read as integers instead of floating point. * * @type {number} * @constant */ const RGBAIntegerFormat = 1033; /** * A DXT1-compressed image in an RGB image format. * * @type {number} * @constant */ const RGB_S3TC_DXT1_Format = 33776; /** * A DXT1-compressed image in an RGB image format with a simple on/off alpha value. * * @type {number} * @constant */ const RGBA_S3TC_DXT1_Format = 33777; /** * A DXT3-compressed image in an RGBA image format. Compared to a 32-bit RGBA texture, it offers 4:1 compression. * * @type {number} * @constant */ const RGBA_S3TC_DXT3_Format = 33778; /** * A DXT5-compressed image in an RGBA image format. It also provides a 4:1 compression, but differs to the DXT3 * compression in how the alpha compression is done. * * @type {number} * @constant */ const RGBA_S3TC_DXT5_Format = 33779; /** * PVRTC RGB compression in 4-bit mode. One block for each 4×4 pixels. * * @type {number} * @constant */ const RGB_PVRTC_4BPPV1_Format = 35840; /** * PVRTC RGB compression in 2-bit mode. One block for each 8×4 pixels. * * @type {number} * @constant */ const RGB_PVRTC_2BPPV1_Format = 35841; /** * PVRTC RGBA compression in 4-bit mode. One block for each 4×4 pixels. * * @type {number} * @constant */ const RGBA_PVRTC_4BPPV1_Format = 35842; /** * PVRTC RGBA compression in 2-bit mode. One block for each 8×4 pixels. * * @type {number} * @constant */ const RGBA_PVRTC_2BPPV1_Format = 35843; /** * ETC1 RGB format. * * @type {number} * @constant */ const RGB_ETC1_Format = 36196; /** * ETC2 RGB format. * * @type {number} * @constant */ const RGB_ETC2_Format = 37492; /** * ETC2 RGBA format. * * @type {number} * @constant */ const RGBA_ETC2_EAC_Format = 37496; /** * ASTC RGBA 4x4 format. * * @type {number} * @constant */ const RGBA_ASTC_4x4_Format = 37808; /** * ASTC RGBA 5x4 format. * * @type {number} * @constant */ const RGBA_ASTC_5x4_Format = 37809; /** * ASTC RGBA 5x5 format. * * @type {number} * @constant */ const RGBA_ASTC_5x5_Format = 37810; /** * ASTC RGBA 6x5 format. * * @type {number} * @constant */ const RGBA_ASTC_6x5_Format = 37811; /** * ASTC RGBA 6x6 format. * * @type {number} * @constant */ const RGBA_ASTC_6x6_Format = 37812; /** * ASTC RGBA 8x5 format. * * @type {number} * @constant */ const RGBA_ASTC_8x5_Format = 37813; /** * ASTC RGBA 8x6 format. * * @type {number} * @constant */ const RGBA_ASTC_8x6_Format = 37814; /** * ASTC RGBA 8x8 format. * * @type {number} * @constant */ const RGBA_ASTC_8x8_Format = 37815; /** * ASTC RGBA 10x5 format. * * @type {number} * @constant */ const RGBA_ASTC_10x5_Format = 37816; /** * ASTC RGBA 10x6 format. * * @type {number} * @constant */ const RGBA_ASTC_10x6_Format = 37817; /** * ASTC RGBA 10x8 format. * * @type {number} * @constant */ const RGBA_ASTC_10x8_Format = 37818; /** * ASTC RGBA 10x10 format. * * @type {number} * @constant */ const RGBA_ASTC_10x10_Format = 37819; /** * ASTC RGBA 12x10 format. * * @type {number} * @constant */ const RGBA_ASTC_12x10_Format = 37820; /** * ASTC RGBA 12x12 format. * * @type {number} * @constant */ const RGBA_ASTC_12x12_Format = 37821; /** * BPTC RGBA format. * * @type {number} * @constant */ const RGBA_BPTC_Format = 36492; /** * BPTC Signed RGB format. * * @type {number} * @constant */ const RGB_BPTC_SIGNED_Format = 36494; /** * BPTC Unsigned RGB format. * * @type {number} * @constant */ const RGB_BPTC_UNSIGNED_Format = 36495; /** * RGTC1 Red format. * * @type {number} * @constant */ const RED_RGTC1_Format = 36283; /** * RGTC1 Signed Red format. * * @type {number} * @constant */ const SIGNED_RED_RGTC1_Format = 36284; /** * RGTC2 Red Green format. * * @type {number} * @constant */ const RED_GREEN_RGTC2_Format = 36285; /** * RGTC2 Signed Red Green format. * * @type {number} * @constant */ const SIGNED_RED_GREEN_RGTC2_Format = 36286; /** * Animations are played once. * * @type {number} * @constant */ const LoopOnce = 2200; /** * Animations are played with a chosen number of repetitions, each time jumping from * the end of the clip directly to its beginning. * * @type {number} * @constant */ const LoopRepeat = 2201; /** * Animations are played with a chosen number of repetitions, alternately playing forward * and backward. * * @type {number} * @constant */ const LoopPingPong = 2202; /** * Discrete interpolation mode for keyframe tracks. * * @type {number} * @constant */ const InterpolateDiscrete = 2300; /** * Linear interpolation mode for keyframe tracks. * * @type {number} * @constant */ const InterpolateLinear = 2301; /** * Smooth interpolation mode for keyframe tracks. * * @type {number} * @constant */ const InterpolateSmooth = 2302; /** * Zero curvature ending for animations. * * @type {number} * @constant */ const ZeroCurvatureEnding = 2400; /** * Zero slope ending for animations. * * @type {number} * @constant */ const ZeroSlopeEnding = 2401; /** * Wrap around ending for animations. * * @type {number} * @constant */ const WrapAroundEnding = 2402; /** * Default animation blend mode. * * @type {number} * @constant */ const NormalAnimationBlendMode = 2500; /** * Additive animation blend mode. Can be used to layer motions on top of * each other to build complex performances from smaller re-usable assets. * * @type {number} * @constant */ const AdditiveAnimationBlendMode = 2501; /** * For every three vertices draw a single triangle. * * @type {number} * @constant */ const TrianglesDrawMode = 0; /** * For each vertex draw a triangle from the last three vertices. * * @type {number} * @constant */ const TriangleStripDrawMode = 1; /** * For each vertex draw a triangle from the first vertex and the last two vertices. * * @type {number} * @constant */ const TriangleFanDrawMode = 2; /** * Basic depth packing. * * @type {number} * @constant */ const BasicDepthPacking = 3200; /** * A depth value is packed into 32 bit RGBA. * * @type {number} * @constant */ const RGBADepthPacking = 3201; /** * A depth value is packed into 24 bit RGB. * * @type {number} * @constant */ const RGBDepthPacking = 3202; /** * A depth value is packed into 16 bit RG. * * @type {number} * @constant */ const RGDepthPacking = 3203; /** * Normal information is relative to the underlying surface. * * @type {number} * @constant */ const TangentSpaceNormalMap = 0; /** * Normal information is relative to the object orientation. * * @type {number} * @constant */ const ObjectSpaceNormalMap = 1; // Color space string identifiers, matching CSS Color Module Level 4 and WebGPU names where available. /** * No color space. * * @type {string} * @constant */ const NoColorSpace = ''; /** * sRGB color space. * * @type {string} * @constant */ const SRGBColorSpace = 'srgb'; /** * sRGB-linear color space. * * @type {string} * @constant */ const LinearSRGBColorSpace = 'srgb-linear'; /** * Linear transfer function. * * @type {string} * @constant */ const LinearTransfer = 'linear'; /** * sRGB transfer function. * * @type {string} * @constant */ const SRGBTransfer = 'srgb'; /** * Sets the stencil buffer value to `0`. * * @type {number} * @constant */ const ZeroStencilOp = 0; /** * Keeps the current value. * * @type {number} * @constant */ const KeepStencilOp = 7680; /** * Sets the stencil buffer value to the specified reference value. * * @type {number} * @constant */ const ReplaceStencilOp = 7681; /** * Increments the current stencil buffer value. Clamps to the maximum representable unsigned value. * * @type {number} * @constant */ const IncrementStencilOp = 7682; /** * Decrements the current stencil buffer value. Clamps to `0`. * * @type {number} * @constant */ const DecrementStencilOp = 7683; /** * Increments the current stencil buffer value. Wraps stencil buffer value to zero when incrementing * the maximum representable unsigned value. * * @type {number} * @constant */ const IncrementWrapStencilOp = 34055; /** * Decrements the current stencil buffer value. Wraps stencil buffer value to the maximum representable * unsigned value when decrementing a stencil buffer value of `0`. * * @type {number} * @constant */ const DecrementWrapStencilOp = 34056; /** * Inverts the current stencil buffer value bitwise. * * @type {number} * @constant */ const InvertStencilOp = 5386; /** * Will never return true. * * @type {number} * @constant */ const NeverStencilFunc = 512; /** * Will return true if the stencil reference value is less than the current stencil value. * * @type {number} * @constant */ const LessStencilFunc = 513; /** * Will return true if the stencil reference value is equal to the current stencil value. * * @type {number} * @constant */ const EqualStencilFunc = 514; /** * Will return true if the stencil reference value is less than or equal to the current stencil value. * * @type {number} * @constant */ const LessEqualStencilFunc = 515; /** * Will return true if the stencil reference value is greater than the current stencil value. * * @type {number} * @constant */ const GreaterStencilFunc = 516; /** * Will return true if the stencil reference value is not equal to the current stencil value. * * @type {number} * @constant */ const NotEqualStencilFunc = 517; /** * Will return true if the stencil reference value is greater than or equal to the current stencil value. * * @type {number} * @constant */ const GreaterEqualStencilFunc = 518; /** * Will always return true. * * @type {number} * @constant */ const AlwaysStencilFunc = 519; /** * Never pass. * * @type {number} * @constant */ const NeverCompare = 512; /** * Pass if the incoming value is less than the texture value. * * @type {number} * @constant */ const LessCompare = 513; /** * Pass if the incoming value equals the texture value. * * @type {number} * @constant */ const EqualCompare = 514; /** * Pass if the incoming value is less than or equal to the texture value. * * @type {number} * @constant */ const LessEqualCompare = 515; /** * Pass if the incoming value is greater than the texture value. * * @type {number} * @constant */ const GreaterCompare = 516; /** * Pass if the incoming value is not equal to the texture value. * * @type {number} * @constant */ const NotEqualCompare = 517; /** * Pass if the incoming value is greater than or equal to the texture value. * * @type {number} * @constant */ const GreaterEqualCompare = 518; /** * Always pass. * * @type {number} * @constant */ const AlwaysCompare = 519; /** * The contents are intended to be specified once by the application, and used many * times as the source for drawing and image specification commands. * * @type {number} * @constant */ const StaticDrawUsage = 35044; /** * The contents are intended to be respecified repeatedly by the application, and * used many times as the source for drawing and image specification commands. * * @type {number} * @constant */ const DynamicDrawUsage = 35048; /** * The contents are intended to be specified once by the application, and used at most * a few times as the source for drawing and image specification commands. * * @type {number} * @constant */ const StreamDrawUsage = 35040; /** * The contents are intended to be specified once by reading data from the 3D API, and queried * many times by the application. * * @type {number} * @constant */ const StaticReadUsage = 35045; /** * The contents are intended to be respecified repeatedly by reading data from the 3D API, and queried * many times by the application. * * @type {number} * @constant */ const DynamicReadUsage = 35049; /** * The contents are intended to be specified once by reading data from the 3D API, and queried at most * a few times by the application * * @type {number} * @constant */ const StreamReadUsage = 35041; /** * The contents are intended to be specified once by reading data from the 3D API, and used many times as * the source for WebGL drawing and image specification commands. * * @type {number} * @constant */ const StaticCopyUsage = 35046; /** * The contents are intended to be respecified repeatedly by reading data from the 3D API, and used many times * as the source for WebGL drawing and image specification commands. * * @type {number} * @constant */ const DynamicCopyUsage = 35050; /** * The contents are intended to be specified once by reading data from the 3D API, and used at most a few times * as the source for WebGL drawing and image specification commands. * * @type {number} * @constant */ const StreamCopyUsage = 35042; /** * GLSL 1 shader code. * * @type {string} * @constant */ const GLSL1 = '100'; /** * GLSL 3 shader code. * * @type {string} * @constant */ const GLSL3 = '300 es'; /** * WebGL coordinate system. * * @type {number} * @constant */ const WebGLCoordinateSystem = 2000; /** * WebGPU coordinate system. * * @type {number} * @constant */ const WebGPUCoordinateSystem = 2001; /** * Represents the different timestamp query types. * * @type {ConstantsTimestampQuery} * @constant */ const TimestampQuery = { COMPUTE: 'compute', RENDER: 'render' }; /** * This type represents mouse buttons and interaction types in context of controls. * * @typedef {Object} ConstantsMouse * @property {number} MIDDLE - The left mouse button. * @property {number} LEFT - The middle mouse button. * @property {number} RIGHT - The right mouse button. * @property {number} ROTATE - A rotate interaction. * @property {number} DOLLY - A dolly interaction. * @property {number} PAN - A pan interaction. **/ /** * This type represents touch interaction types in context of controls. * * @typedef {Object} ConstantsTouch * @property {number} ROTATE - A rotate interaction. * @property {number} PAN - A pan interaction. * @property {number} DOLLY_PAN - The dolly-pan interaction. * @property {number} DOLLY_ROTATE - A dolly-rotate interaction. **/ /** * This type represents the different timestamp query types. * * @typedef {Object} ConstantsTimestampQuery * @property {string} COMPUTE - A `compute` timestamp query. * @property {string} RENDER - A `render` timestamp query. **/ /** * This modules allows to dispatch event objects on custom JavaScript objects. * * Main repository: [eventdispatcher.js]{@link https://github.com/mrdoob/eventdispatcher.js/} * * Code Example: * ```js * class Car extends EventDispatcher { * start() { * this.dispatchEvent( { type: 'start', message: 'vroom vroom!' } ); * } *}; * * // Using events with the custom object * const car = new Car(); * car.addEventListener( 'start', function ( event ) { * alert( event.message ); * } ); * * car.start(); * ``` */ class EventDispatcher { /** * Adds the given event listener to the given event type. * * @param {string} type - The type of event to listen to. * @param {Function} listener - The function that gets called when the event is fired. */ addEventListener( type, listener ) { if ( this._listeners === undefined ) this._listeners = {}; const listeners = this._listeners; if ( listeners[ type ] === undefined ) { listeners[ type ] = []; } if ( listeners[ type ].indexOf( listener ) === -1 ) { listeners[ type ].push( listener ); } } /** * Returns `true` if the given event listener has been added to the given event type. * * @param {string} type - The type of event. * @param {Function} listener - The listener to check. * @return {boolean} Whether the given event listener has been added to the given event type. */ hasEventListener( type, listener ) { const listeners = this._listeners; if ( listeners === undefined ) return false; return listeners[ type ] !== undefined && listeners[ type ].indexOf( listener ) !== -1; } /** * Removes the given event listener from the given event type. * * @param {string} type - The type of event. * @param {Function} listener - The listener to remove. */ removeEventListener( type, listener ) { const listeners = this._listeners; if ( listeners === undefined ) return; const listenerArray = listeners[ type ]; if ( listenerArray !== undefined ) { const index = listenerArray.indexOf( listener ); if ( index !== -1 ) { listenerArray.splice( index, 1 ); } } } /** * Dispatches an event object. * * @param {Object} event - The event that gets fired. */ dispatchEvent( event ) { const listeners = this._listeners; if ( listeners === undefined ) return; const listenerArray = listeners[ event.type ]; if ( listenerArray !== undefined ) { event.target = this; // Make a copy, in case listeners are removed while iterating. const array = listenerArray.slice( 0 ); for ( let i = 0, l = array.length; i < l; i ++ ) { array[ i ].call( this, event ); } event.target = null; } } } const _lut = [ '00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '0a', '0b', '0c', '0d', '0e', '0f', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '1a', '1b', '1c', '1d', '1e', '1f', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '2a', '2b', '2c', '2d', '2e', '2f', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '3a', '3b', '3c', '3d', '3e', '3f', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '4a', '4b', '4c', '4d', '4e', '4f', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '5a', '5b', '5c', '5d', '5e', '5f', '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '6a', '6b', '6c', '6d', '6e', '6f', '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '7a', '7b', '7c', '7d', '7e', '7f', '80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '8a', '8b', '8c', '8d', '8e', '8f', '90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '9a', '9b', '9c', '9d', '9e', '9f', 'a0', 'a1', 'a2', 'a3', 'a4', 'a5', 'a6', 'a7', 'a8', 'a9', 'aa', 'ab', 'ac', 'ad', 'ae', 'af', 'b0', 'b1', 'b2', 'b3', 'b4', 'b5', 'b6', 'b7', 'b8', 'b9', 'ba', 'bb', 'bc', 'bd', 'be', 'bf', 'c0', 'c1', 'c2', 'c3', 'c4', 'c5', 'c6', 'c7', 'c8', 'c9', 'ca', 'cb', 'cc', 'cd', 'ce', 'cf', 'd0', 'd1', 'd2', 'd3', 'd4', 'd5', 'd6', 'd7', 'd8', 'd9', 'da', 'db', 'dc', 'dd', 'de', 'df', 'e0', 'e1', 'e2', 'e3', 'e4', 'e5', 'e6', 'e7', 'e8', 'e9', 'ea', 'eb', 'ec', 'ed', 'ee', 'ef', 'f0', 'f1', 'f2', 'f3', 'f4', 'f5', 'f6', 'f7', 'f8', 'f9', 'fa', 'fb', 'fc', 'fd', 'fe', 'ff' ]; let _seed = 1234567; const DEG2RAD = Math.PI / 180; const RAD2DEG = 180 / Math.PI; /** * Generate a [UUID]{@link https://en.wikipedia.org/wiki/Universally_unique_identifier} * (universally unique identifier). * * @return {string} The UUID. */ function generateUUID() { // http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript/21963136#21963136 const d0 = Math.random() * 0xffffffff | 0; const d1 = Math.random() * 0xffffffff | 0; const d2 = Math.random() * 0xffffffff | 0; const d3 = Math.random() * 0xffffffff | 0; const uuid = _lut[ d0 & 0xff ] + _lut[ d0 >> 8 & 0xff ] + _lut[ d0 >> 16 & 0xff ] + _lut[ d0 >> 24 & 0xff ] + '-' + _lut[ d1 & 0xff ] + _lut[ d1 >> 8 & 0xff ] + '-' + _lut[ d1 >> 16 & 0x0f | 0x40 ] + _lut[ d1 >> 24 & 0xff ] + '-' + _lut[ d2 & 0x3f | 0x80 ] + _lut[ d2 >> 8 & 0xff ] + '-' + _lut[ d2 >> 16 & 0xff ] + _lut[ d2 >> 24 & 0xff ] + _lut[ d3 & 0xff ] + _lut[ d3 >> 8 & 0xff ] + _lut[ d3 >> 16 & 0xff ] + _lut[ d3 >> 24 & 0xff ]; // .toLowerCase() here flattens concatenated strings to save heap memory space. return uuid.toLowerCase(); } /** * Clamps the given value between min and max. * * @param {number} value - The value to clamp. * @param {number} min - The min value. * @param {number} max - The max value. * @return {number} The clamped value. */ function clamp( value, min, max ) { return Math.max( min, Math.min( max, value ) ); } /** * Computes the Euclidean modulo of the given parameters that * is `( ( n % m ) + m ) % m`. * * @param {number} n - The first parameter. * @param {number} m - The second parameter. * @return {number} The Euclidean modulo. */ function euclideanModulo( n, m ) { // https://en.wikipedia.org/wiki/Modulo_operation return ( ( n % m ) + m ) % m; } /** * Performs a linear mapping from range `` to range `` * for the given value. * * @param {number} x - The value to be mapped. * @param {number} a1 - Minimum value for range A. * @param {number} a2 - Maximum value for range A. * @param {number} b1 - Minimum value for range B. * @param {number} b2 - Maximum value for range B. * @return {number} The mapped value. */ function mapLinear( x, a1, a2, b1, b2 ) { return b1 + ( x - a1 ) * ( b2 - b1 ) / ( a2 - a1 ); } /** * Returns the percentage in the closed interval `[0, 1]` of the given value * between the start and end point. * * @param {number} x - The start point * @param {number} y - The end point. * @param {number} value - A value between start and end. * @return {number} The interpolation factor. */ function inverseLerp( x, y, value ) { // https://www.gamedev.net/tutorials/programming/general-and-gameplay-programming/inverse-lerp-a-super-useful-yet-often-overlooked-function-r5230/ if ( x !== y ) { return ( value - x ) / ( y - x ); } else { return 0; } } /** * Returns a value linearly interpolated from two known points based on the given interval - * `t = 0` will return `x` and `t = 1` will return `y`. * * @param {number} x - The start point * @param {number} y - The end point. * @param {number} t - The interpolation factor in the closed interval `[0, 1]`. * @return {number} The interpolated value. */ function lerp( x, y, t ) { return ( 1 - t ) * x + t * y; } /** * Smoothly interpolate a number from `x` to `y` in a spring-like manner using a delta * time to maintain frame rate independent movement. For details, see * [Frame rate independent damping using lerp]{@link http://www.rorydriscoll.com/2016/03/07/frame-rate-independent-damping-using-lerp/}. * * @param {number} x - The current point. * @param {number} y - The target point. * @param {number} lambda - A higher lambda value will make the movement more sudden, * and a lower value will make the movement more gradual. * @param {number} dt - Delta time in seconds. * @return {number} The interpolated value. */ function damp( x, y, lambda, dt ) { return lerp( x, y, 1 - Math.exp( - lambda * dt ) ); } /** * Returns a value that alternates between `0` and the given `length` parameter. * * @param {number} x - The value to pingpong. * @param {number} [length=1] - The positive value the function will pingpong to. * @return {number} The alternated value. */ function pingpong( x, length = 1 ) { // https://www.desmos.com/calculator/vcsjnyz7x4 return length - Math.abs( euclideanModulo( x, length * 2 ) - length ); } /** * Returns a value in the range `[0,1]` that represents the percentage that `x` has * moved between `min` and `max`, but smoothed or slowed down the closer `x` is to * the `min` and `max`. * * See [Smoothstep]{@link http://en.wikipedia.org/wiki/Smoothstep} for more details. * * @param {number} x - The value to evaluate based on its position between min and max. * @param {number} min - The min value. Any x value below min will be `0`. * @param {number} max - The max value. Any x value above max will be `1`. * @return {number} The alternated value. */ function smoothstep( x, min, max ) { if ( x <= min ) return 0; if ( x >= max ) return 1; x = ( x - min ) / ( max - min ); return x * x * ( 3 - 2 * x ); } /** * A [variation on smoothstep]{@link https://en.wikipedia.org/wiki/Smoothstep#Variations} * that has zero 1st and 2nd order derivatives at x=0 and x=1. * * @param {number} x - The value to evaluate based on its position between min and max. * @param {number} min - The min value. Any x value below min will be `0`. * @param {number} max - The max value. Any x value above max will be `1`. * @return {number} The alternated value. */ function smootherstep( x, min, max ) { if ( x <= min ) return 0; if ( x >= max ) return 1; x = ( x - min ) / ( max - min ); return x * x * x * ( x * ( x * 6 - 15 ) + 10 ); } /** * Returns a random integer from `` interval. * * @param {number} low - The lower value boundary. * @param {number} high - The upper value boundary * @return {number} A random integer. */ function randInt( low, high ) { return low + Math.floor( Math.random() * ( high - low + 1 ) ); } /** * Returns a random float from `` interval. * * @param {number} low - The lower value boundary. * @param {number} high - The upper value boundary * @return {number} A random float. */ function randFloat( low, high ) { return low + Math.random() * ( high - low ); } /** * Returns a random integer from `<-range/2, range/2>` interval. * * @param {number} range - Defines the value range. * @return {number} A random float. */ function randFloatSpread( range ) { return range * ( 0.5 - Math.random() ); } /** * Returns a deterministic pseudo-random float in the interval `[0, 1]`. * * @param {number} [s] - The integer seed. * @return {number} A random float. */ function seededRandom( s ) { if ( s !== undefined ) _seed = s; // Mulberry32 generator let t = _seed += 0x6D2B79F5; t = Math.imul( t ^ t >>> 15, t | 1 ); t ^= t + Math.imul( t ^ t >>> 7, t | 61 ); return ( ( t ^ t >>> 14 ) >>> 0 ) / 4294967296; } /** * Converts degrees to radians. * * @param {number} degrees - A value in degrees. * @return {number} The converted value in radians. */ function degToRad( degrees ) { return degrees * DEG2RAD; } /** * Converts radians to degrees. * * @param {number} radians - A value in radians. * @return {number} The converted value in degrees. */ function radToDeg( radians ) { return radians * RAD2DEG; } /** * Returns `true` if the given number is a power of two. * * @param {number} value - The value to check. * @return {boolean} Whether the given number is a power of two or not. */ function isPowerOfTwo( value ) { return ( value & ( value - 1 ) ) === 0 && value !== 0; } /** * Returns the smallest power of two that is greater than or equal to the given number. * * @param {number} value - The value to find a POT for. * @return {number} The smallest power of two that is greater than or equal to the given number. */ function ceilPowerOfTwo( value ) { return Math.pow( 2, Math.ceil( Math.log( value ) / Math.LN2 ) ); } /** * Returns the largest power of two that is less than or equal to the given number. * * @param {number} value - The value to find a POT for. * @return {number} The largest power of two that is less than or equal to the given number. */ function floorPowerOfTwo( value ) { return Math.pow( 2, Math.floor( Math.log( value ) / Math.LN2 ) ); } /** * Sets the given quaternion from the [Intrinsic Proper Euler Angles]{@link https://en.wikipedia.org/wiki/Euler_angles} * defined by the given angles and order. * * Rotations are applied to the axes in the order specified by order: * rotation by angle `a` is applied first, then by angle `b`, then by angle `c`. * * @param {Quaternion} q - The quaternion to set. * @param {number} a - The rotation applied to the first axis, in radians. * @param {number} b - The rotation applied to the second axis, in radians. * @param {number} c - The rotation applied to the third axis, in radians. * @param {('XYX'|'XZX'|'YXY'|'YZY'|'ZXZ'|'ZYZ')} order - A string specifying the axes order. */ function setQuaternionFromProperEuler( q, a, b, c, order ) { const cos = Math.cos; const sin = Math.sin; const c2 = cos( b / 2 ); const s2 = sin( b / 2 ); const c13 = cos( ( a + c ) / 2 ); const s13 = sin( ( a + c ) / 2 ); const c1_3 = cos( ( a - c ) / 2 ); const s1_3 = sin( ( a - c ) / 2 ); const c3_1 = cos( ( c - a ) / 2 ); const s3_1 = sin( ( c - a ) / 2 ); switch ( order ) { case 'XYX': q.set( c2 * s13, s2 * c1_3, s2 * s1_3, c2 * c13 ); break; case 'YZY': q.set( s2 * s1_3, c2 * s13, s2 * c1_3, c2 * c13 ); break; case 'ZXZ': q.set( s2 * c1_3, s2 * s1_3, c2 * s13, c2 * c13 ); break; case 'XZX': q.set( c2 * s13, s2 * s3_1, s2 * c3_1, c2 * c13 ); break; case 'YXY': q.set( s2 * c3_1, c2 * s13, s2 * s3_1, c2 * c13 ); break; case 'ZYZ': q.set( s2 * s3_1, s2 * c3_1, c2 * s13, c2 * c13 ); break; default: console.warn( 'THREE.MathUtils: .setQuaternionFromProperEuler() encountered an unknown order: ' + order ); } } /** * Denormalizes the given value according to the given typed array. * * @param {number} value - The value to denormalize. * @param {TypedArray} array - The typed array that defines the data type of the value. * @return {number} The denormalize (float) value in the range `[0,1]`. */ function denormalize( value, array ) { switch ( array.constructor ) { case Float32Array: return value; case Uint32Array: return value / 4294967295.0; case Uint16Array: return value / 65535.0; case Uint8Array: return value / 255.0; case Int32Array: return Math.max( value / 2147483647.0, -1 ); case Int16Array: return Math.max( value / 32767.0, -1 ); case Int8Array: return Math.max( value / 127.0, -1 ); default: throw new Error( 'Invalid component type.' ); } } /** * Normalizes the given value according to the given typed array. * * @param {number} value - The float value in the range `[0,1]` to normalize. * @param {TypedArray} array - The typed array that defines the data type of the value. * @return {number} The normalize value. */ function normalize( value, array ) { switch ( array.constructor ) { case Float32Array: return value; case Uint32Array: return Math.round( value * 4294967295.0 ); case Uint16Array: return Math.round( value * 65535.0 ); case Uint8Array: return Math.round( value * 255.0 ); case Int32Array: return Math.round( value * 2147483647.0 ); case Int16Array: return Math.round( value * 32767.0 ); case Int8Array: return Math.round( value * 127.0 ); default: throw new Error( 'Invalid component type.' ); } } /** * @class * @classdesc A collection of math utility functions. * @hideconstructor */ const MathUtils = { DEG2RAD: DEG2RAD, RAD2DEG: RAD2DEG, /** * Generate a [UUID]{@link https://en.wikipedia.org/wiki/Universally_unique_identifier} * (universally unique identifier). * * @static * @method * @return {string} The UUID. */ generateUUID: generateUUID, /** * Clamps the given value between min and max. * * @static * @method * @param {number} value - The value to clamp. * @param {number} min - The min value. * @param {number} max - The max value. * @return {number} The clamped value. */ clamp: clamp, /** * Computes the Euclidean modulo of the given parameters that * is `( ( n % m ) + m ) % m`. * * @static * @method * @param {number} n - The first parameter. * @param {number} m - The second parameter. * @return {number} The Euclidean modulo. */ euclideanModulo: euclideanModulo, /** * Performs a linear mapping from range `` to range `` * for the given value. * * @static * @method * @param {number} x - The value to be mapped. * @param {number} a1 - Minimum value for range A. * @param {number} a2 - Maximum value for range A. * @param {number} b1 - Minimum value for range B. * @param {number} b2 - Maximum value for range B. * @return {number} The mapped value. */ mapLinear: mapLinear, /** * Returns the percentage in the closed interval `[0, 1]` of the given value * between the start and end point. * * @static * @method * @param {number} x - The start point * @param {number} y - The end point. * @param {number} value - A value between start and end. * @return {number} The interpolation factor. */ inverseLerp: inverseLerp, /** * Returns a value linearly interpolated from two known points based on the given interval - * `t = 0` will return `x` and `t = 1` will return `y`. * * @static * @method * @param {number} x - The start point * @param {number} y - The end point. * @param {number} t - The interpolation factor in the closed interval `[0, 1]`. * @return {number} The interpolated value. */ lerp: lerp, /** * Smoothly interpolate a number from `x` to `y` in a spring-like manner using a delta * time to maintain frame rate independent movement. For details, see * [Frame rate independent damping using lerp]{@link http://www.rorydriscoll.com/2016/03/07/frame-rate-independent-damping-using-lerp/}. * * @static * @method * @param {number} x - The current point. * @param {number} y - The target point. * @param {number} lambda - A higher lambda value will make the movement more sudden, * and a lower value will make the movement more gradual. * @param {number} dt - Delta time in seconds. * @return {number} The interpolated value. */ damp: damp, /** * Returns a value that alternates between `0` and the given `length` parameter. * * @static * @method * @param {number} x - The value to pingpong. * @param {number} [length=1] - The positive value the function will pingpong to. * @return {number} The alternated value. */ pingpong: pingpong, /** * Returns a value in the range `[0,1]` that represents the percentage that `x` has * moved between `min` and `max`, but smoothed or slowed down the closer `x` is to * the `min` and `max`. * * See [Smoothstep]{@link http://en.wikipedia.org/wiki/Smoothstep} for more details. * * @static * @method * @param {number} x - The value to evaluate based on its position between min and max. * @param {number} min - The min value. Any x value below min will be `0`. * @param {number} max - The max value. Any x value above max will be `1`. * @return {number} The alternated value. */ smoothstep: smoothstep, /** * A [variation on smoothstep]{@link https://en.wikipedia.org/wiki/Smoothstep#Variations} * that has zero 1st and 2nd order derivatives at x=0 and x=1. * * @static * @method * @param {number} x - The value to evaluate based on its position between min and max. * @param {number} min - The min value. Any x value below min will be `0`. * @param {number} max - The max value. Any x value above max will be `1`. * @return {number} The alternated value. */ smootherstep: smootherstep, /** * Returns a random integer from `` interval. * * @static * @method * @param {number} low - The lower value boundary. * @param {number} high - The upper value boundary * @return {number} A random integer. */ randInt: randInt, /** * Returns a random float from `` interval. * * @static * @method * @param {number} low - The lower value boundary. * @param {number} high - The upper value boundary * @return {number} A random float. */ randFloat: randFloat, /** * Returns a random integer from `<-range/2, range/2>` interval. * * @static * @method * @param {number} range - Defines the value range. * @return {number} A random float. */ randFloatSpread: randFloatSpread, /** * Returns a deterministic pseudo-random float in the interval `[0, 1]`. * * @static * @method * @param {number} [s] - The integer seed. * @return {number} A random float. */ seededRandom: seededRandom, /** * Converts degrees to radians. * * @static * @method * @param {number} degrees - A value in degrees. * @return {number} The converted value in radians. */ degToRad: degToRad, /** * Converts radians to degrees. * * @static * @method * @param {number} radians - A value in radians. * @return {number} The converted value in degrees. */ radToDeg: radToDeg, /** * Returns `true` if the given number is a power of two. * * @static * @method * @param {number} value - The value to check. * @return {boolean} Whether the given number is a power of two or not. */ isPowerOfTwo: isPowerOfTwo, /** * Returns the smallest power of two that is greater than or equal to the given number. * * @static * @method * @param {number} value - The value to find a POT for. * @return {number} The smallest power of two that is greater than or equal to the given number. */ ceilPowerOfTwo: ceilPowerOfTwo, /** * Returns the largest power of two that is less than or equal to the given number. * * @static * @method * @param {number} value - The value to find a POT for. * @return {number} The largest power of two that is less than or equal to the given number. */ floorPowerOfTwo: floorPowerOfTwo, /** * Sets the given quaternion from the [Intrinsic Proper Euler Angles]{@link https://en.wikipedia.org/wiki/Euler_angles} * defined by the given angles and order. * * Rotations are applied to the axes in the order specified by order: * rotation by angle `a` is applied first, then by angle `b`, then by angle `c`. * * @static * @method * @param {Quaternion} q - The quaternion to set. * @param {number} a - The rotation applied to the first axis, in radians. * @param {number} b - The rotation applied to the second axis, in radians. * @param {number} c - The rotation applied to the third axis, in radians. * @param {('XYX'|'XZX'|'YXY'|'YZY'|'ZXZ'|'ZYZ')} order - A string specifying the axes order. */ setQuaternionFromProperEuler: setQuaternionFromProperEuler, /** * Normalizes the given value according to the given typed array. * * @static * @method * @param {number} value - The float value in the range `[0,1]` to normalize. * @param {TypedArray} array - The typed array that defines the data type of the value. * @return {number} The normalize value. */ normalize: normalize, /** * Denormalizes the given value according to the given typed array. * * @static * @method * @param {number} value - The value to denormalize. * @param {TypedArray} array - The typed array that defines the data type of the value. * @return {number} The denormalize (float) value in the range `[0,1]`. */ denormalize: denormalize }; /** * Class representing a 2D vector. A 2D vector is an ordered pair of numbers * (labeled x and y), which can be used to represent a number of things, such as: * * - A point in 2D space (i.e. a position on a plane). * - A direction and length across a plane. In three.js the length will * always be the Euclidean distance(straight-line distance) from `(0, 0)` to `(x, y)` * and the direction is also measured from `(0, 0)` towards `(x, y)`. * - Any arbitrary ordered pair of numbers. * * There are other things a 2D vector can be used to represent, such as * momentum vectors, complex numbers and so on, however these are the most * common uses in three.js. * * Iterating through a vector instance will yield its components `(x, y)` in * the corresponding order. * ```js * const a = new THREE.Vector2( 0, 1 ); * * //no arguments; will be initialised to (0, 0) * const b = new THREE.Vector2( ); * * const d = a.distanceTo( b ); * ``` */ class Vector2 { /** * Constructs a new 2D vector. * * @param {number} [x=0] - The x value of this vector. * @param {number} [y=0] - The y value of this vector. */ constructor( x = 0, y = 0 ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ Vector2.prototype.isVector2 = true; /** * The x value of this vector. * * @type {number} */ this.x = x; /** * The y value of this vector. * * @type {number} */ this.y = y; } /** * Alias for {@link Vector2#x}. * * @type {number} */ get width() { return this.x; } set width( value ) { this.x = value; } /** * Alias for {@link Vector2#y}. * * @type {number} */ get height() { return this.y; } set height( value ) { this.y = value; } /** * Sets the vector components. * * @param {number} x - The value of the x component. * @param {number} y - The value of the y component. * @return {Vector2} A reference to this vector. */ set( x, y ) { this.x = x; this.y = y; return this; } /** * Sets the vector components to the same value. * * @param {number} scalar - The value to set for all vector components. * @return {Vector2} A reference to this vector. */ setScalar( scalar ) { this.x = scalar; this.y = scalar; return this; } /** * Sets the vector's x component to the given value * * @param {number} x - The value to set. * @return {Vector2} A reference to this vector. */ setX( x ) { this.x = x; return this; } /** * Sets the vector's y component to the given value * * @param {number} y - The value to set. * @return {Vector2} A reference to this vector. */ setY( y ) { this.y = y; return this; } /** * Allows to set a vector component with an index. * * @param {number} index - The component index. `0` equals to x, `1` equals to y. * @param {number} value - The value to set. * @return {Vector2} A reference to this vector. */ setComponent( index, value ) { switch ( index ) { case 0: this.x = value; break; case 1: this.y = value; break; default: throw new Error( 'index is out of range: ' + index ); } return this; } /** * Returns the value of the vector component which matches the given index. * * @param {number} index - The component index. `0` equals to x, `1` equals to y. * @return {number} A vector component value. */ getComponent( index ) { switch ( index ) { case 0: return this.x; case 1: return this.y; default: throw new Error( 'index is out of range: ' + index ); } } /** * Returns a new vector with copied values from this instance. * * @return {Vector2} A clone of this instance. */ clone() { return new this.constructor( this.x, this.y ); } /** * Copies the values of the given vector to this instance. * * @param {Vector2} v - The vector to copy. * @return {Vector2} A reference to this vector. */ copy( v ) { this.x = v.x; this.y = v.y; return this; } /** * Adds the given vector to this instance. * * @param {Vector2} v - The vector to add. * @return {Vector2} A reference to this vector. */ add( v ) { this.x += v.x; this.y += v.y; return this; } /** * Adds the given scalar value to all components of this instance. * * @param {number} s - The scalar to add. * @return {Vector2} A reference to this vector. */ addScalar( s ) { this.x += s; this.y += s; return this; } /** * Adds the given vectors and stores the result in this instance. * * @param {Vector2} a - The first vector. * @param {Vector2} b - The second vector. * @return {Vector2} A reference to this vector. */ addVectors( a, b ) { this.x = a.x + b.x; this.y = a.y + b.y; return this; } /** * Adds the given vector scaled by the given factor to this instance. * * @param {Vector2} v - The vector. * @param {number} s - The factor that scales `v`. * @return {Vector2} A reference to this vector. */ addScaledVector( v, s ) { this.x += v.x * s; this.y += v.y * s; return this; } /** * Subtracts the given vector from this instance. * * @param {Vector2} v - The vector to subtract. * @return {Vector2} A reference to this vector. */ sub( v ) { this.x -= v.x; this.y -= v.y; return this; } /** * Subtracts the given scalar value from all components of this instance. * * @param {number} s - The scalar to subtract. * @return {Vector2} A reference to this vector. */ subScalar( s ) { this.x -= s; this.y -= s; return this; } /** * Subtracts the given vectors and stores the result in this instance. * * @param {Vector2} a - The first vector. * @param {Vector2} b - The second vector. * @return {Vector2} A reference to this vector. */ subVectors( a, b ) { this.x = a.x - b.x; this.y = a.y - b.y; return this; } /** * Multiplies the given vector with this instance. * * @param {Vector2} v - The vector to multiply. * @return {Vector2} A reference to this vector. */ multiply( v ) { this.x *= v.x; this.y *= v.y; return this; } /** * Multiplies the given scalar value with all components of this instance. * * @param {number} scalar - The scalar to multiply. * @return {Vector2} A reference to this vector. */ multiplyScalar( scalar ) { this.x *= scalar; this.y *= scalar; return this; } /** * Divides this instance by the given vector. * * @param {Vector2} v - The vector to divide. * @return {Vector2} A reference to this vector. */ divide( v ) { this.x /= v.x; this.y /= v.y; return this; } /** * Divides this vector by the given scalar. * * @param {number} scalar - The scalar to divide. * @return {Vector2} A reference to this vector. */ divideScalar( scalar ) { return this.multiplyScalar( 1 / scalar ); } /** * Multiplies this vector (with an implicit 1 as the 3rd component) by * the given 3x3 matrix. * * @param {Matrix3} m - The matrix to apply. * @return {Vector2} A reference to this vector. */ applyMatrix3( m ) { const x = this.x, y = this.y; const e = m.elements; this.x = e[ 0 ] * x + e[ 3 ] * y + e[ 6 ]; this.y = e[ 1 ] * x + e[ 4 ] * y + e[ 7 ]; return this; } /** * If this vector's x or y value is greater than the given vector's x or y * value, replace that value with the corresponding min value. * * @param {Vector2} v - The vector. * @return {Vector2} A reference to this vector. */ min( v ) { this.x = Math.min( this.x, v.x ); this.y = Math.min( this.y, v.y ); return this; } /** * If this vector's x or y value is less than the given vector's x or y * value, replace that value with the corresponding max value. * * @param {Vector2} v - The vector. * @return {Vector2} A reference to this vector. */ max( v ) { this.x = Math.max( this.x, v.x ); this.y = Math.max( this.y, v.y ); return this; } /** * If this vector's x or y value is greater than the max vector's x or y * value, it is replaced by the corresponding value. * If this vector's x or y value is less than the min vector's x or y value, * it is replaced by the corresponding value. * * @param {Vector2} min - The minimum x and y values. * @param {Vector2} max - The maximum x and y values in the desired range. * @return {Vector2} A reference to this vector. */ clamp( min, max ) { // assumes min < max, componentwise this.x = clamp( this.x, min.x, max.x ); this.y = clamp( this.y, min.y, max.y ); return this; } /** * If this vector's x or y values are greater than the max value, they are * replaced by the max value. * If this vector's x or y values are less than the min value, they are * replaced by the min value. * * @param {number} minVal - The minimum value the components will be clamped to. * @param {number} maxVal - The maximum value the components will be clamped to. * @return {Vector2} A reference to this vector. */ clampScalar( minVal, maxVal ) { this.x = clamp( this.x, minVal, maxVal ); this.y = clamp( this.y, minVal, maxVal ); return this; } /** * If this vector's length is greater than the max value, it is replaced by * the max value. * If this vector's length is less than the min value, it is replaced by the * min value. * * @param {number} min - The minimum value the vector length will be clamped to. * @param {number} max - The maximum value the vector length will be clamped to. * @return {Vector2} A reference to this vector. */ clampLength( min, max ) { const length = this.length(); return this.divideScalar( length || 1 ).multiplyScalar( clamp( length, min, max ) ); } /** * The components of this vector are rounded down to the nearest integer value. * * @return {Vector2} A reference to this vector. */ floor() { this.x = Math.floor( this.x ); this.y = Math.floor( this.y ); return this; } /** * The components of this vector are rounded up to the nearest integer value. * * @return {Vector2} A reference to this vector. */ ceil() { this.x = Math.ceil( this.x ); this.y = Math.ceil( this.y ); return this; } /** * The components of this vector are rounded to the nearest integer value * * @return {Vector2} A reference to this vector. */ round() { this.x = Math.round( this.x ); this.y = Math.round( this.y ); return this; } /** * The components of this vector are rounded towards zero (up if negative, * down if positive) to an integer value. * * @return {Vector2} A reference to this vector. */ roundToZero() { this.x = Math.trunc( this.x ); this.y = Math.trunc( this.y ); return this; } /** * Inverts this vector - i.e. sets x = -x and y = -y. * * @return {Vector2} A reference to this vector. */ negate() { this.x = - this.x; this.y = - this.y; return this; } /** * Calculates the dot product of the given vector with this instance. * * @param {Vector2} v - The vector to compute the dot product with. * @return {number} The result of the dot product. */ dot( v ) { return this.x * v.x + this.y * v.y; } /** * Calculates the cross product of the given vector with this instance. * * @param {Vector2} v - The vector to compute the cross product with. * @return {number} The result of the cross product. */ cross( v ) { return this.x * v.y - this.y * v.x; } /** * Computes the square of the Euclidean length (straight-line length) from * (0, 0) to (x, y). If you are comparing the lengths of vectors, you should * compare the length squared instead as it is slightly more efficient to calculate. * * @return {number} The square length of this vector. */ lengthSq() { return this.x * this.x + this.y * this.y; } /** * Computes the Euclidean length (straight-line length) from (0, 0) to (x, y). * * @return {number} The length of this vector. */ length() { return Math.sqrt( this.x * this.x + this.y * this.y ); } /** * Computes the Manhattan length of this vector. * * @return {number} The length of this vector. */ manhattanLength() { return Math.abs( this.x ) + Math.abs( this.y ); } /** * Converts this vector to a unit vector - that is, sets it equal to a vector * with the same direction as this one, but with a vector length of `1`. * * @return {Vector2} A reference to this vector. */ normalize() { return this.divideScalar( this.length() || 1 ); } /** * Computes the angle in radians of this vector with respect to the positive x-axis. * * @return {number} The angle in radians. */ angle() { const angle = Math.atan2( - this.y, - this.x ) + Math.PI; return angle; } /** * Returns the angle between the given vector and this instance in radians. * * @param {Vector2} v - The vector to compute the angle with. * @return {number} The angle in radians. */ angleTo( v ) { const denominator = Math.sqrt( this.lengthSq() * v.lengthSq() ); if ( denominator === 0 ) return Math.PI / 2; const theta = this.dot( v ) / denominator; // clamp, to handle numerical problems return Math.acos( clamp( theta, -1, 1 ) ); } /** * Computes the distance from the given vector to this instance. * * @param {Vector2} v - The vector to compute the distance to. * @return {number} The distance. */ distanceTo( v ) { return Math.sqrt( this.distanceToSquared( v ) ); } /** * Computes the squared distance from the given vector to this instance. * If you are just comparing the distance with another distance, you should compare * the distance squared instead as it is slightly more efficient to calculate. * * @param {Vector2} v - The vector to compute the squared distance to. * @return {number} The squared distance. */ distanceToSquared( v ) { const dx = this.x - v.x, dy = this.y - v.y; return dx * dx + dy * dy; } /** * Computes the Manhattan distance from the given vector to this instance. * * @param {Vector2} v - The vector to compute the Manhattan distance to. * @return {number} The Manhattan distance. */ manhattanDistanceTo( v ) { return Math.abs( this.x - v.x ) + Math.abs( this.y - v.y ); } /** * Sets this vector to a vector with the same direction as this one, but * with the specified length. * * @param {number} length - The new length of this vector. * @return {Vector2} A reference to this vector. */ setLength( length ) { return this.normalize().multiplyScalar( length ); } /** * Linearly interpolates between the given vector and this instance, where * alpha is the percent distance along the line - alpha = 0 will be this * vector, and alpha = 1 will be the given one. * * @param {Vector2} v - The vector to interpolate towards. * @param {number} alpha - The interpolation factor, typically in the closed interval `[0, 1]`. * @return {Vector2} A reference to this vector. */ lerp( v, alpha ) { this.x += ( v.x - this.x ) * alpha; this.y += ( v.y - this.y ) * alpha; return this; } /** * Linearly interpolates between the given vectors, where alpha is the percent * distance along the line - alpha = 0 will be first vector, and alpha = 1 will * be the second one. The result is stored in this instance. * * @param {Vector2} v1 - The first vector. * @param {Vector2} v2 - The second vector. * @param {number} alpha - The interpolation factor, typically in the closed interval `[0, 1]`. * @return {Vector2} A reference to this vector. */ lerpVectors( v1, v2, alpha ) { this.x = v1.x + ( v2.x - v1.x ) * alpha; this.y = v1.y + ( v2.y - v1.y ) * alpha; return this; } /** * Returns `true` if this vector is equal with the given one. * * @param {Vector2} v - The vector to test for equality. * @return {boolean} Whether this vector is equal with the given one. */ equals( v ) { return ( ( v.x === this.x ) && ( v.y === this.y ) ); } /** * Sets this vector's x value to be `array[ offset ]` and y * value to be `array[ offset + 1 ]`. * * @param {Array} array - An array holding the vector component values. * @param {number} [offset=0] - The offset into the array. * @return {Vector2} A reference to this vector. */ fromArray( array, offset = 0 ) { this.x = array[ offset ]; this.y = array[ offset + 1 ]; return this; } /** * Writes the components of this vector to the given array. If no array is provided, * the method returns a new instance. * * @param {Array} [array=[]] - The target array holding the vector components. * @param {number} [offset=0] - Index of the first element in the array. * @return {Array} The vector components. */ toArray( array = [], offset = 0 ) { array[ offset ] = this.x; array[ offset + 1 ] = this.y; return array; } /** * Sets the components of this vector from the given buffer attribute. * * @param {BufferAttribute} attribute - The buffer attribute holding vector data. * @param {number} index - The index into the attribute. * @return {Vector2} A reference to this vector. */ fromBufferAttribute( attribute, index ) { this.x = attribute.getX( index ); this.y = attribute.getY( index ); return this; } /** * Rotates this vector around the given center by the given angle. * * @param {Vector2} center - The point around which to rotate. * @param {number} angle - The angle to rotate, in radians. * @return {Vector2} A reference to this vector. */ rotateAround( center, angle ) { const c = Math.cos( angle ), s = Math.sin( angle ); const x = this.x - center.x; const y = this.y - center.y; this.x = x * c - y * s + center.x; this.y = x * s + y * c + center.y; return this; } /** * Sets each component of this vector to a pseudo-random value between `0` and * `1`, excluding `1`. * * @return {Vector2} A reference to this vector. */ random() { this.x = Math.random(); this.y = Math.random(); return this; } *[ Symbol.iterator ]() { yield this.x; yield this.y; } } /** * Represents a 3x3 matrix. * * A Note on Row-Major and Column-Major Ordering: * * The constructor and {@link Matrix3#set} method take arguments in * [row-major]{@link https://en.wikipedia.org/wiki/Row-_and_column-major_order#Column-major_order} * order, while internally they are stored in the {@link Matrix3#elements} array in column-major order. * This means that calling: * ```js * const m = new THREE.Matrix(); * m.set( 11, 12, 13, * 21, 22, 23, * 31, 32, 33 ); * ``` * will result in the elements array containing: * ```js * m.elements = [ 11, 21, 31, * 12, 22, 32, * 13, 23, 33 ]; * ``` * and internally all calculations are performed using column-major ordering. * However, as the actual ordering makes no difference mathematically and * most people are used to thinking about matrices in row-major order, the * three.js documentation shows matrices in row-major order. Just bear in * mind that if you are reading the source code, you'll have to take the * transpose of any matrices outlined here to make sense of the calculations. */ class Matrix3 { /** * Constructs a new 3x3 matrix. The arguments are supposed to be * in row-major order. If no arguments are provided, the constructor * initializes the matrix as an identity matrix. * * @param {number} [n11] - 1-1 matrix element. * @param {number} [n12] - 1-2 matrix element. * @param {number} [n13] - 1-3 matrix element. * @param {number} [n21] - 2-1 matrix element. * @param {number} [n22] - 2-2 matrix element. * @param {number} [n23] - 2-3 matrix element. * @param {number} [n31] - 3-1 matrix element. * @param {number} [n32] - 3-2 matrix element. * @param {number} [n33] - 3-3 matrix element. */ constructor( n11, n12, n13, n21, n22, n23, n31, n32, n33 ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ Matrix3.prototype.isMatrix3 = true; /** * A column-major list of matrix values. * * @type {Array} */ this.elements = [ 1, 0, 0, 0, 1, 0, 0, 0, 1 ]; if ( n11 !== undefined ) { this.set( n11, n12, n13, n21, n22, n23, n31, n32, n33 ); } } /** * Sets the elements of the matrix.The arguments are supposed to be * in row-major order. * * @param {number} [n11] - 1-1 matrix element. * @param {number} [n12] - 1-2 matrix element. * @param {number} [n13] - 1-3 matrix element. * @param {number} [n21] - 2-1 matrix element. * @param {number} [n22] - 2-2 matrix element. * @param {number} [n23] - 2-3 matrix element. * @param {number} [n31] - 3-1 matrix element. * @param {number} [n32] - 3-2 matrix element. * @param {number} [n33] - 3-3 matrix element. * @return {Matrix3} A reference to this matrix. */ set( n11, n12, n13, n21, n22, n23, n31, n32, n33 ) { const te = this.elements; te[ 0 ] = n11; te[ 1 ] = n21; te[ 2 ] = n31; te[ 3 ] = n12; te[ 4 ] = n22; te[ 5 ] = n32; te[ 6 ] = n13; te[ 7 ] = n23; te[ 8 ] = n33; return this; } /** * Sets this matrix to the 3x3 identity matrix. * * @return {Matrix3} A reference to this matrix. */ identity() { this.set( 1, 0, 0, 0, 1, 0, 0, 0, 1 ); return this; } /** * Copies the values of the given matrix to this instance. * * @param {Matrix3} m - The matrix to copy. * @return {Matrix3} A reference to this matrix. */ copy( m ) { const te = this.elements; const me = m.elements; te[ 0 ] = me[ 0 ]; te[ 1 ] = me[ 1 ]; te[ 2 ] = me[ 2 ]; te[ 3 ] = me[ 3 ]; te[ 4 ] = me[ 4 ]; te[ 5 ] = me[ 5 ]; te[ 6 ] = me[ 6 ]; te[ 7 ] = me[ 7 ]; te[ 8 ] = me[ 8 ]; return this; } /** * Extracts the basis of this matrix into the three axis vectors provided. * * @param {Vector3} xAxis - The basis's x axis. * @param {Vector3} yAxis - The basis's y axis. * @param {Vector3} zAxis - The basis's z axis. * @return {Matrix3} A reference to this matrix. */ extractBasis( xAxis, yAxis, zAxis ) { xAxis.setFromMatrix3Column( this, 0 ); yAxis.setFromMatrix3Column( this, 1 ); zAxis.setFromMatrix3Column( this, 2 ); return this; } /** * Set this matrix to the upper 3x3 matrix of the given 4x4 matrix. * * @param {Matrix4} m - The 4x4 matrix. * @return {Matrix3} A reference to this matrix. */ setFromMatrix4( m ) { const me = m.elements; this.set( me[ 0 ], me[ 4 ], me[ 8 ], me[ 1 ], me[ 5 ], me[ 9 ], me[ 2 ], me[ 6 ], me[ 10 ] ); return this; } /** * Post-multiplies this matrix by the given 3x3 matrix. * * @param {Matrix3} m - The matrix to multiply with. * @return {Matrix3} A reference to this matrix. */ multiply( m ) { return this.multiplyMatrices( this, m ); } /** * Pre-multiplies this matrix by the given 3x3 matrix. * * @param {Matrix3} m - The matrix to multiply with. * @return {Matrix3} A reference to this matrix. */ premultiply( m ) { return this.multiplyMatrices( m, this ); } /** * Multiples the given 3x3 matrices and stores the result * in this matrix. * * @param {Matrix3} a - The first matrix. * @param {Matrix3} b - The second matrix. * @return {Matrix3} A reference to this matrix. */ multiplyMatrices( a, b ) { const ae = a.elements; const be = b.elements; const te = this.elements; const a11 = ae[ 0 ], a12 = ae[ 3 ], a13 = ae[ 6 ]; const a21 = ae[ 1 ], a22 = ae[ 4 ], a23 = ae[ 7 ]; const a31 = ae[ 2 ], a32 = ae[ 5 ], a33 = ae[ 8 ]; const b11 = be[ 0 ], b12 = be[ 3 ], b13 = be[ 6 ]; const b21 = be[ 1 ], b22 = be[ 4 ], b23 = be[ 7 ]; const b31 = be[ 2 ], b32 = be[ 5 ], b33 = be[ 8 ]; te[ 0 ] = a11 * b11 + a12 * b21 + a13 * b31; te[ 3 ] = a11 * b12 + a12 * b22 + a13 * b32; te[ 6 ] = a11 * b13 + a12 * b23 + a13 * b33; te[ 1 ] = a21 * b11 + a22 * b21 + a23 * b31; te[ 4 ] = a21 * b12 + a22 * b22 + a23 * b32; te[ 7 ] = a21 * b13 + a22 * b23 + a23 * b33; te[ 2 ] = a31 * b11 + a32 * b21 + a33 * b31; te[ 5 ] = a31 * b12 + a32 * b22 + a33 * b32; te[ 8 ] = a31 * b13 + a32 * b23 + a33 * b33; return this; } /** * Multiplies every component of the matrix by the given scalar. * * @param {number} s - The scalar. * @return {Matrix3} A reference to this matrix. */ multiplyScalar( s ) { const te = this.elements; te[ 0 ] *= s; te[ 3 ] *= s; te[ 6 ] *= s; te[ 1 ] *= s; te[ 4 ] *= s; te[ 7 ] *= s; te[ 2 ] *= s; te[ 5 ] *= s; te[ 8 ] *= s; return this; } /** * Computes and returns the determinant of this matrix. * * @return {number} The determinant. */ determinant() { const te = this.elements; const a = te[ 0 ], b = te[ 1 ], c = te[ 2 ], d = te[ 3 ], e = te[ 4 ], f = te[ 5 ], g = te[ 6 ], h = te[ 7 ], i = te[ 8 ]; return a * e * i - a * f * h - b * d * i + b * f * g + c * d * h - c * e * g; } /** * Inverts this matrix, using the [analytic method]{@link https://en.wikipedia.org/wiki/Invertible_matrix#Analytic_solution}. * You can not invert with a determinant of zero. If you attempt this, the method produces * a zero matrix instead. * * @return {Matrix3} A reference to this matrix. */ invert() { const te = this.elements, n11 = te[ 0 ], n21 = te[ 1 ], n31 = te[ 2 ], n12 = te[ 3 ], n22 = te[ 4 ], n32 = te[ 5 ], n13 = te[ 6 ], n23 = te[ 7 ], n33 = te[ 8 ], t11 = n33 * n22 - n32 * n23, t12 = n32 * n13 - n33 * n12, t13 = n23 * n12 - n22 * n13, det = n11 * t11 + n21 * t12 + n31 * t13; if ( det === 0 ) return this.set( 0, 0, 0, 0, 0, 0, 0, 0, 0 ); const detInv = 1 / det; te[ 0 ] = t11 * detInv; te[ 1 ] = ( n31 * n23 - n33 * n21 ) * detInv; te[ 2 ] = ( n32 * n21 - n31 * n22 ) * detInv; te[ 3 ] = t12 * detInv; te[ 4 ] = ( n33 * n11 - n31 * n13 ) * detInv; te[ 5 ] = ( n31 * n12 - n32 * n11 ) * detInv; te[ 6 ] = t13 * detInv; te[ 7 ] = ( n21 * n13 - n23 * n11 ) * detInv; te[ 8 ] = ( n22 * n11 - n21 * n12 ) * detInv; return this; } /** * Transposes this matrix in place. * * @return {Matrix3} A reference to this matrix. */ transpose() { let tmp; const m = this.elements; tmp = m[ 1 ]; m[ 1 ] = m[ 3 ]; m[ 3 ] = tmp; tmp = m[ 2 ]; m[ 2 ] = m[ 6 ]; m[ 6 ] = tmp; tmp = m[ 5 ]; m[ 5 ] = m[ 7 ]; m[ 7 ] = tmp; return this; } /** * Computes the normal matrix which is the inverse transpose of the upper * left 3x3 portion of the given 4x4 matrix. * * @param {Matrix4} matrix4 - The 4x4 matrix. * @return {Matrix3} A reference to this matrix. */ getNormalMatrix( matrix4 ) { return this.setFromMatrix4( matrix4 ).invert().transpose(); } /** * Transposes this matrix into the supplied array, and returns itself unchanged. * * @param {Array} r - An array to store the transposed matrix elements. * @return {Matrix3} A reference to this matrix. */ transposeIntoArray( r ) { const m = this.elements; r[ 0 ] = m[ 0 ]; r[ 1 ] = m[ 3 ]; r[ 2 ] = m[ 6 ]; r[ 3 ] = m[ 1 ]; r[ 4 ] = m[ 4 ]; r[ 5 ] = m[ 7 ]; r[ 6 ] = m[ 2 ]; r[ 7 ] = m[ 5 ]; r[ 8 ] = m[ 8 ]; return this; } /** * Sets the UV transform matrix from offset, repeat, rotation, and center. * * @param {number} tx - Offset x. * @param {number} ty - Offset y. * @param {number} sx - Repeat x. * @param {number} sy - Repeat y. * @param {number} rotation - Rotation, in radians. Positive values rotate counterclockwise. * @param {number} cx - Center x of rotation. * @param {number} cy - Center y of rotation * @return {Matrix3} A reference to this matrix. */ setUvTransform( tx, ty, sx, sy, rotation, cx, cy ) { const c = Math.cos( rotation ); const s = Math.sin( rotation ); this.set( sx * c, sx * s, - sx * ( c * cx + s * cy ) + cx + tx, - sy * s, sy * c, - sy * ( - s * cx + c * cy ) + cy + ty, 0, 0, 1 ); return this; } /** * Scales this matrix with the given scalar values. * * @param {number} sx - The amount to scale in the X axis. * @param {number} sy - The amount to scale in the Y axis. * @return {Matrix3} A reference to this matrix. */ scale( sx, sy ) { this.premultiply( _m3.makeScale( sx, sy ) ); return this; } /** * Rotates this matrix by the given angle. * * @param {number} theta - The rotation in radians. * @return {Matrix3} A reference to this matrix. */ rotate( theta ) { this.premultiply( _m3.makeRotation( - theta ) ); return this; } /** * Translates this matrix by the given scalar values. * * @param {number} tx - The amount to translate in the X axis. * @param {number} ty - The amount to translate in the Y axis. * @return {Matrix3} A reference to this matrix. */ translate( tx, ty ) { this.premultiply( _m3.makeTranslation( tx, ty ) ); return this; } // for 2D Transforms /** * Sets this matrix as a 2D translation transform. * * @param {number|Vector2} x - The amount to translate in the X axis or alternatively a translation vector. * @param {number} y - The amount to translate in the Y axis. * @return {Matrix3} A reference to this matrix. */ makeTranslation( x, y ) { if ( x.isVector2 ) { this.set( 1, 0, x.x, 0, 1, x.y, 0, 0, 1 ); } else { this.set( 1, 0, x, 0, 1, y, 0, 0, 1 ); } return this; } /** * Sets this matrix as a 2D rotational transformation. * * @param {number} theta - The rotation in radians. * @return {Matrix3} A reference to this matrix. */ makeRotation( theta ) { // counterclockwise const c = Math.cos( theta ); const s = Math.sin( theta ); this.set( c, - s, 0, s, c, 0, 0, 0, 1 ); return this; } /** * Sets this matrix as a 2D scale transform. * * @param {number} x - The amount to scale in the X axis. * @param {number} y - The amount to scale in the Y axis. * @return {Matrix3} A reference to this matrix. */ makeScale( x, y ) { this.set( x, 0, 0, 0, y, 0, 0, 0, 1 ); return this; } /** * Returns `true` if this matrix is equal with the given one. * * @param {Matrix3} matrix - The matrix to test for equality. * @return {boolean} Whether this matrix is equal with the given one. */ equals( matrix ) { const te = this.elements; const me = matrix.elements; for ( let i = 0; i < 9; i ++ ) { if ( te[ i ] !== me[ i ] ) return false; } return true; } /** * Sets the elements of the matrix from the given array. * * @param {Array} array - The matrix elements in column-major order. * @param {number} [offset=0] - Index of the first element in the array. * @return {Matrix3} A reference to this matrix. */ fromArray( array, offset = 0 ) { for ( let i = 0; i < 9; i ++ ) { this.elements[ i ] = array[ i + offset ]; } return this; } /** * Writes the elements of this matrix to the given array. If no array is provided, * the method returns a new instance. * * @param {Array} [array=[]] - The target array holding the matrix elements in column-major order. * @param {number} [offset=0] - Index of the first element in the array. * @return {Array} The matrix elements in column-major order. */ toArray( array = [], offset = 0 ) { const te = this.elements; array[ offset ] = te[ 0 ]; array[ offset + 1 ] = te[ 1 ]; array[ offset + 2 ] = te[ 2 ]; array[ offset + 3 ] = te[ 3 ]; array[ offset + 4 ] = te[ 4 ]; array[ offset + 5 ] = te[ 5 ]; array[ offset + 6 ] = te[ 6 ]; array[ offset + 7 ] = te[ 7 ]; array[ offset + 8 ] = te[ 8 ]; return array; } /** * Returns a matrix with copied values from this instance. * * @return {Matrix3} A clone of this instance. */ clone() { return new this.constructor().fromArray( this.elements ); } } const _m3 = /*@__PURE__*/ new Matrix3(); function arrayNeedsUint32( array ) { // assumes larger values usually on last for ( let i = array.length - 1; i >= 0; -- i ) { if ( array[ i ] >= 65535 ) return true; // account for PRIMITIVE_RESTART_FIXED_INDEX, #24565 } return false; } const TYPED_ARRAYS = { Int8Array: Int8Array, Uint8Array: Uint8Array, Uint8ClampedArray: Uint8ClampedArray, Int16Array: Int16Array, Uint16Array: Uint16Array, Int32Array: Int32Array, Uint32Array: Uint32Array, Float32Array: Float32Array, Float64Array: Float64Array }; function getTypedArray( type, buffer ) { return new TYPED_ARRAYS[ type ]( buffer ); } function createElementNS( name ) { return document.createElementNS( 'http://www.w3.org/1999/xhtml', name ); } function createCanvasElement() { const canvas = createElementNS( 'canvas' ); canvas.style.display = 'block'; return canvas; } const _cache = {}; function warnOnce( message ) { if ( message in _cache ) return; _cache[ message ] = true; console.warn( message ); } function probeAsync( gl, sync, interval ) { return new Promise( function ( resolve, reject ) { function probe() { switch ( gl.clientWaitSync( sync, gl.SYNC_FLUSH_COMMANDS_BIT, 0 ) ) { case gl.WAIT_FAILED: reject(); break; case gl.TIMEOUT_EXPIRED: setTimeout( probe, interval ); break; default: resolve(); } } setTimeout( probe, interval ); } ); } function toNormalizedProjectionMatrix( projectionMatrix ) { const m = projectionMatrix.elements; // Convert [-1, 1] to [0, 1] projection matrix m[ 2 ] = 0.5 * m[ 2 ] + 0.5 * m[ 3 ]; m[ 6 ] = 0.5 * m[ 6 ] + 0.5 * m[ 7 ]; m[ 10 ] = 0.5 * m[ 10 ] + 0.5 * m[ 11 ]; m[ 14 ] = 0.5 * m[ 14 ] + 0.5 * m[ 15 ]; } function toReversedProjectionMatrix( projectionMatrix ) { const m = projectionMatrix.elements; const isPerspectiveMatrix = m[ 11 ] === -1; // Reverse [0, 1] projection matrix if ( isPerspectiveMatrix ) { m[ 10 ] = - m[ 10 ] - 1; m[ 14 ] = - m[ 14 ]; } else { m[ 10 ] = - m[ 10 ]; m[ 14 ] = - m[ 14 ] + 1; } } const LINEAR_REC709_TO_XYZ = /*@__PURE__*/ new Matrix3().set( 0.4123908, 0.3575843, 0.1804808, 0.2126390, 0.7151687, 0.0721923, 0.0193308, 0.1191948, 0.9505322 ); const XYZ_TO_LINEAR_REC709 = /*@__PURE__*/ new Matrix3().set( 3.2409699, -1.5373832, -0.4986108, -0.9692436, 1.8759675, 0.0415551, 0.0556301, -0.203977, 1.0569715 ); function createColorManagement() { const ColorManagement = { enabled: true, workingColorSpace: LinearSRGBColorSpace, /** * Implementations of supported color spaces. * * Required: * - primaries: chromaticity coordinates [ rx ry gx gy bx by ] * - whitePoint: reference white [ x y ] * - transfer: transfer function (pre-defined) * - toXYZ: Matrix3 RGB to XYZ transform * - fromXYZ: Matrix3 XYZ to RGB transform * - luminanceCoefficients: RGB luminance coefficients * * Optional: * - outputColorSpaceConfig: { drawingBufferColorSpace: ColorSpace } * - workingColorSpaceConfig: { unpackColorSpace: ColorSpace } * * Reference: * - https://www.russellcottrell.com/photo/matrixCalculator.htm */ spaces: {}, convert: function ( color, sourceColorSpace, targetColorSpace ) { if ( this.enabled === false || sourceColorSpace === targetColorSpace || ! sourceColorSpace || ! targetColorSpace ) { return color; } if ( this.spaces[ sourceColorSpace ].transfer === SRGBTransfer ) { color.r = SRGBToLinear( color.r ); color.g = SRGBToLinear( color.g ); color.b = SRGBToLinear( color.b ); } if ( this.spaces[ sourceColorSpace ].primaries !== this.spaces[ targetColorSpace ].primaries ) { color.applyMatrix3( this.spaces[ sourceColorSpace ].toXYZ ); color.applyMatrix3( this.spaces[ targetColorSpace ].fromXYZ ); } if ( this.spaces[ targetColorSpace ].transfer === SRGBTransfer ) { color.r = LinearToSRGB( color.r ); color.g = LinearToSRGB( color.g ); color.b = LinearToSRGB( color.b ); } return color; }, fromWorkingColorSpace: function ( color, targetColorSpace ) { return this.convert( color, this.workingColorSpace, targetColorSpace ); }, toWorkingColorSpace: function ( color, sourceColorSpace ) { return this.convert( color, sourceColorSpace, this.workingColorSpace ); }, getPrimaries: function ( colorSpace ) { return this.spaces[ colorSpace ].primaries; }, getTransfer: function ( colorSpace ) { if ( colorSpace === NoColorSpace ) return LinearTransfer; return this.spaces[ colorSpace ].transfer; }, getLuminanceCoefficients: function ( target, colorSpace = this.workingColorSpace ) { return target.fromArray( this.spaces[ colorSpace ].luminanceCoefficients ); }, define: function ( colorSpaces ) { Object.assign( this.spaces, colorSpaces ); }, // Internal APIs _getMatrix: function ( targetMatrix, sourceColorSpace, targetColorSpace ) { return targetMatrix .copy( this.spaces[ sourceColorSpace ].toXYZ ) .multiply( this.spaces[ targetColorSpace ].fromXYZ ); }, _getDrawingBufferColorSpace: function ( colorSpace ) { return this.spaces[ colorSpace ].outputColorSpaceConfig.drawingBufferColorSpace; }, _getUnpackColorSpace: function ( colorSpace = this.workingColorSpace ) { return this.spaces[ colorSpace ].workingColorSpaceConfig.unpackColorSpace; } }; /****************************************************************************** * sRGB definitions */ const REC709_PRIMARIES = [ 0.640, 0.330, 0.300, 0.600, 0.150, 0.060 ]; const REC709_LUMINANCE_COEFFICIENTS = [ 0.2126, 0.7152, 0.0722 ]; const D65 = [ 0.3127, 0.3290 ]; ColorManagement.define( { [ LinearSRGBColorSpace ]: { primaries: REC709_PRIMARIES, whitePoint: D65, transfer: LinearTransfer, toXYZ: LINEAR_REC709_TO_XYZ, fromXYZ: XYZ_TO_LINEAR_REC709, luminanceCoefficients: REC709_LUMINANCE_COEFFICIENTS, workingColorSpaceConfig: { unpackColorSpace: SRGBColorSpace }, outputColorSpaceConfig: { drawingBufferColorSpace: SRGBColorSpace } }, [ SRGBColorSpace ]: { primaries: REC709_PRIMARIES, whitePoint: D65, transfer: SRGBTransfer, toXYZ: LINEAR_REC709_TO_XYZ, fromXYZ: XYZ_TO_LINEAR_REC709, luminanceCoefficients: REC709_LUMINANCE_COEFFICIENTS, outputColorSpaceConfig: { drawingBufferColorSpace: SRGBColorSpace } }, } ); return ColorManagement; } const ColorManagement = /*@__PURE__*/ createColorManagement(); function SRGBToLinear( c ) { return ( c < 0.04045 ) ? c * 0.0773993808 : Math.pow( c * 0.9478672986 + 0.0521327014, 2.4 ); } function LinearToSRGB( c ) { return ( c < 0.0031308 ) ? c * 12.92 : 1.055 * ( Math.pow( c, 0.41666 ) ) - 0.055; } let _canvas; /** * A class containing utility functions for images. * * @hideconstructor */ class ImageUtils { /** * Returns a data URI containing a representation of the given image. * * @param {(HTMLImageElement|HTMLCanvasElement)} image - The image object. * @param {string} [type='image/png'] - Indicates the image format. * @return {string} The data URI. */ static getDataURL( image, type = 'image/png' ) { if ( /^data:/i.test( image.src ) ) { return image.src; } if ( typeof HTMLCanvasElement === 'undefined' ) { return image.src; } let canvas; if ( image instanceof HTMLCanvasElement ) { canvas = image; } else { if ( _canvas === undefined ) _canvas = createElementNS( 'canvas' ); _canvas.width = image.width; _canvas.height = image.height; const context = _canvas.getContext( '2d' ); if ( image instanceof ImageData ) { context.putImageData( image, 0, 0 ); } else { context.drawImage( image, 0, 0, image.width, image.height ); } canvas = _canvas; } return canvas.toDataURL( type ); } /** * Converts the given sRGB image data to linear color space. * * @param {(HTMLImageElement|HTMLCanvasElement|ImageBitmap|Object)} image - The image object. * @return {HTMLCanvasElement|Object} The converted image. */ static sRGBToLinear( image ) { if ( ( typeof HTMLImageElement !== 'undefined' && image instanceof HTMLImageElement ) || ( typeof HTMLCanvasElement !== 'undefined' && image instanceof HTMLCanvasElement ) || ( typeof ImageBitmap !== 'undefined' && image instanceof ImageBitmap ) ) { const canvas = createElementNS( 'canvas' ); canvas.width = image.width; canvas.height = image.height; const context = canvas.getContext( '2d' ); context.drawImage( image, 0, 0, image.width, image.height ); const imageData = context.getImageData( 0, 0, image.width, image.height ); const data = imageData.data; for ( let i = 0; i < data.length; i ++ ) { data[ i ] = SRGBToLinear( data[ i ] / 255 ) * 255; } context.putImageData( imageData, 0, 0 ); return canvas; } else if ( image.data ) { const data = image.data.slice( 0 ); for ( let i = 0; i < data.length; i ++ ) { if ( data instanceof Uint8Array || data instanceof Uint8ClampedArray ) { data[ i ] = Math.floor( SRGBToLinear( data[ i ] / 255 ) * 255 ); } else { // assuming float data[ i ] = SRGBToLinear( data[ i ] ); } } return { data: data, width: image.width, height: image.height }; } else { console.warn( 'THREE.ImageUtils.sRGBToLinear(): Unsupported image type. No color space conversion applied.' ); return image; } } } let _sourceId = 0; /** * Represents the data source of a texture. * * The main purpose of this class is to decouple the data definition from the texture * definition so the same data can be used with multiple texture instances. */ class Source { /** * Constructs a new video texture. * * @param {any} [data=null] - The data definition of a texture. */ constructor( data = null ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isSource = true; /** * The ID of the source. * * @name Source#id * @type {number} * @readonly */ Object.defineProperty( this, 'id', { value: _sourceId ++ } ); /** * The UUID of the source. * * @type {string} * @readonly */ this.uuid = generateUUID(); /** * The data definition of a texture. * * @type {any} */ this.data = data; /** * This property is only relevant when {@link Source#needsUpdate} is set to `true` and * provides more control on how texture data should be processed. When `dataReady` is set * to `false`, the engine performs the memory allocation (if necessary) but does not transfer * the data into the GPU memory. * * @type {boolean} * @default true */ this.dataReady = true; /** * This starts at `0` and counts how many times {@link Source#needsUpdate} is set to `true`. * * @type {number} * @readonly * @default 0 */ this.version = 0; } /** * When the property is set to `true`, the engine allocates the memory * for the texture (if necessary) and triggers the actual texture upload * to the GPU next time the source is used. * * @type {boolean} * @default false * @param {boolean} value */ set needsUpdate( value ) { if ( value === true ) this.version ++; } /** * Serializes the source into JSON. * * @param {?(Object|string)} meta - An optional value holding meta information about the serialization. * @return {Object} A JSON object representing the serialized source. * @see {@link ObjectLoader#parse} */ toJSON( meta ) { const isRootObject = ( meta === undefined || typeof meta === 'string' ); if ( ! isRootObject && meta.images[ this.uuid ] !== undefined ) { return meta.images[ this.uuid ]; } const output = { uuid: this.uuid, url: '' }; const data = this.data; if ( data !== null ) { let url; if ( Array.isArray( data ) ) { // cube texture url = []; for ( let i = 0, l = data.length; i < l; i ++ ) { if ( data[ i ].isDataTexture ) { url.push( serializeImage( data[ i ].image ) ); } else { url.push( serializeImage( data[ i ] ) ); } } } else { // texture url = serializeImage( data ); } output.url = url; } if ( ! isRootObject ) { meta.images[ this.uuid ] = output; } return output; } } function serializeImage( image ) { if ( ( typeof HTMLImageElement !== 'undefined' && image instanceof HTMLImageElement ) || ( typeof HTMLCanvasElement !== 'undefined' && image instanceof HTMLCanvasElement ) || ( typeof ImageBitmap !== 'undefined' && image instanceof ImageBitmap ) ) { // default images return ImageUtils.getDataURL( image ); } else { if ( image.data ) { // images of DataTexture return { data: Array.from( image.data ), width: image.width, height: image.height, type: image.data.constructor.name }; } else { console.warn( 'THREE.Texture: Unable to serialize Texture.' ); return {}; } } } let _textureId = 0; /** * Base class for all textures. * * Note: After the initial use of a texture, its dimensions, format, and type * cannot be changed. Instead, call {@link Texture#dispose} on the texture and instantiate a new one. * * @augments EventDispatcher */ class Texture extends EventDispatcher { /** * Constructs a new texture. * * @param {?Object} [image=Texture.DEFAULT_IMAGE] - The image holding the texture data. * @param {number} [mapping=Texture.DEFAULT_MAPPING] - The texture mapping. * @param {number} [wrapS=ClampToEdgeWrapping] - The wrapS value. * @param {number} [wrapT=ClampToEdgeWrapping] - The wrapT value. * @param {number} [magFilter=LinearFilter] - The mag filter value. * @param {number} [minFilter=LinearMipmapLinearFilter] - The min filter value. * @param {number} [format=RGBAFormat] - The texture format. * @param {number} [type=UnsignedByteType] - The texture type. * @param {number} [anisotropy=Texture.DEFAULT_ANISOTROPY] - The anisotropy value. * @param {string} [colorSpace=NoColorSpace] - The color space. */ constructor( image = Texture.DEFAULT_IMAGE, mapping = Texture.DEFAULT_MAPPING, wrapS = ClampToEdgeWrapping, wrapT = ClampToEdgeWrapping, magFilter = LinearFilter, minFilter = LinearMipmapLinearFilter, format = RGBAFormat, type = UnsignedByteType, anisotropy = Texture.DEFAULT_ANISOTROPY, colorSpace = NoColorSpace ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isTexture = true; /** * The ID of the texture. * * @name Texture#id * @type {number} * @readonly */ Object.defineProperty( this, 'id', { value: _textureId ++ } ); /** * The UUID of the material. * * @type {string} * @readonly */ this.uuid = generateUUID(); /** * The name of the material. * * @type {string} */ this.name = ''; /** * The data definition of a texture. A reference to the data source can be * shared across textures. This is often useful in context of spritesheets * where multiple textures render the same data but with different texture * transformations. * * @type {Source} */ this.source = new Source( image ); /** * An array holding user-defined mipmaps. * * @type {Array} */ this.mipmaps = []; /** * How the texture is applied to the object. The value `UVMapping` * is the default, where texture or uv coordinates are used to apply the map. * * @type {(UVMapping|CubeReflectionMapping|CubeRefractionMapping|EquirectangularReflectionMapping|EquirectangularRefractionMapping|CubeUVReflectionMapping)} * @default UVMapping */ this.mapping = mapping; /** * Lets you select the uv attribute to map the texture to. `0` for `uv`, * `1` for `uv1`, `2` for `uv2` and `3` for `uv3`. * * @type {number} * @default 0 */ this.channel = 0; /** * This defines how the texture is wrapped horizontally and corresponds to * *U* in UV mapping. * * @type {(RepeatWrapping|ClampToEdgeWrapping|MirroredRepeatWrapping)} * @default ClampToEdgeWrapping */ this.wrapS = wrapS; /** * This defines how the texture is wrapped horizontally and corresponds to * *V* in UV mapping. * * @type {(RepeatWrapping|ClampToEdgeWrapping|MirroredRepeatWrapping)} * @default ClampToEdgeWrapping */ this.wrapT = wrapT; /** * How the texture is sampled when a texel covers more than one pixel. * * @type {(NearestFilter|NearestMipmapNearestFilter|NearestMipmapLinearFilter|LinearFilter|LinearMipmapNearestFilter|LinearMipmapLinearFilter)} * @default LinearFilter */ this.magFilter = magFilter; /** * How the texture is sampled when a texel covers less than one pixel. * * @type {(NearestFilter|NearestMipmapNearestFilter|NearestMipmapLinearFilter|LinearFilter|LinearMipmapNearestFilter|LinearMipmapLinearFilter)} * @default LinearMipmapLinearFilter */ this.minFilter = minFilter; /** * The number of samples taken along the axis through the pixel that has the * highest density of texels. By default, this value is `1`. A higher value * gives a less blurry result than a basic mipmap, at the cost of more * texture samples being used. * * @type {number} * @default 0 */ this.anisotropy = anisotropy; /** * The format of the texture. * * @type {number} * @default RGBAFormat */ this.format = format; /** * The default internal format is derived from {@link Texture#format} and {@link Texture#type} and * defines how the texture data is going to be stored on the GPU. * * This property allows to overwrite the default format. * * @type {?string} * @default null */ this.internalFormat = null; /** * The data type of the texture. * * @type {number} * @default UnsignedByteType */ this.type = type; /** * How much a single repetition of the texture is offset from the beginning, * in each direction U and V. Typical range is `0.0` to `1.0`. * * @type {Vector2} * @default (0,0) */ this.offset = new Vector2( 0, 0 ); /** * How many times the texture is repeated across the surface, in each * direction U and V. If repeat is set greater than `1` in either direction, * the corresponding wrap parameter should also be set to `RepeatWrapping` * or `MirroredRepeatWrapping` to achieve the desired tiling effect. * * @type {Vector2} * @default (1,1) */ this.repeat = new Vector2( 1, 1 ); /** * The point around which rotation occurs. A value of `(0.5, 0.5)` corresponds * to the center of the texture. Default is `(0, 0)`, the lower left. * * @type {Vector2} * @default (0,0) */ this.center = new Vector2( 0, 0 ); /** * How much the texture is rotated around the center point, in radians. * Positive values are counter-clockwise. * * @type {number} * @default 0 */ this.rotation = 0; /** * Whether to update the texture's uv-transformation {@link Texture#matrix} * from the properties {@link Texture#offset}, {@link Texture#repeat}, * {@link Texture#rotation}, and {@link Texture#center}. * * Set this to `false` if you are specifying the uv-transform matrix directly. * * @type {boolean} * @default true */ this.matrixAutoUpdate = true; /** * The uv-transformation matrix of the texture. * * @type {Matrix3} */ this.matrix = new Matrix3(); /** * Whether to generate mipmaps (if possible) for a texture. * * Set this to `false` if you are creating mipmaps manually. * * @type {boolean} * @default true */ this.generateMipmaps = true; /** * If set to `true`, the alpha channel, if present, is multiplied into the * color channels when the texture is uploaded to the GPU. * * Note that this property has no effect when using `ImageBitmap`. You need to * configure premultiply alpha on bitmap creation instead. * * @type {boolean} * @default false */ this.premultiplyAlpha = false; /** * If set to `true`, the texture is flipped along the vertical axis when * uploaded to the GPU. * * Note that this property has no effect when using `ImageBitmap`. You need to * configure the flip on bitmap creation instead. * * @type {boolean} * @default true */ this.flipY = true; /** * Specifies the alignment requirements for the start of each pixel row in memory. * The allowable values are `1` (byte-alignment), `2` (rows aligned to even-numbered bytes), * `4` (word-alignment), and `8` (rows start on double-word boundaries). * * @type {number} * @default 4 */ this.unpackAlignment = 4; // valid values: 1, 2, 4, 8 (see http://www.khronos.org/opengles/sdk/docs/man/xhtml/glPixelStorei.xml) /** * Textures containing color data should be annotated with `SRGBColorSpace` or `LinearSRGBColorSpace`. * * @type {string} * @default NoColorSpace */ this.colorSpace = colorSpace; /** * An object that can be used to store custom data about the texture. It * should not hold references to functions as these will not be cloned. * * @type {Object} */ this.userData = {}; /** * This starts at `0` and counts how many times {@link Texture#needsUpdate} is set to `true`. * * @type {number} * @readonly * @default 0 */ this.version = 0; /** * A callback function, called when the texture is updated (e.g., when * {@link Texture#needsUpdate} has been set to true and then the texture is used). * * @type {?Function} * @default null */ this.onUpdate = null; /** * An optional back reference to the textures render target. * * @type {?(RenderTarget|WebGLRenderTarget)} * @default null */ this.renderTarget = null; /** * Indicates whether a texture belongs to a render target or not. * * @type {boolean} * @readonly * @default false */ this.isRenderTargetTexture = false; /** * Indicates whether this texture should be processed by `PMREMGenerator` or not * (only relevant for render target textures). * * @type {number} * @readonly * @default 0 */ this.pmremVersion = 0; } /** * The image object holding the texture data. * * @type {?Object} */ get image() { return this.source.data; } set image( value = null ) { this.source.data = value; } /** * Updates the texture transformation matrix from the from the properties {@link Texture#offset}, * {@link Texture#repeat}, {@link Texture#rotation}, and {@link Texture#center}. */ updateMatrix() { this.matrix.setUvTransform( this.offset.x, this.offset.y, this.repeat.x, this.repeat.y, this.rotation, this.center.x, this.center.y ); } /** * Returns a new texture with copied values from this instance. * * @return {Texture} A clone of this instance. */ clone() { return new this.constructor().copy( this ); } /** * Copies the values of the given texture to this instance. * * @param {Texture} source - The texture to copy. * @return {Texture} A reference to this instance. */ copy( source ) { this.name = source.name; this.source = source.source; this.mipmaps = source.mipmaps.slice( 0 ); this.mapping = source.mapping; this.channel = source.channel; this.wrapS = source.wrapS; this.wrapT = source.wrapT; this.magFilter = source.magFilter; this.minFilter = source.minFilter; this.anisotropy = source.anisotropy; this.format = source.format; this.internalFormat = source.internalFormat; this.type = source.type; this.offset.copy( source.offset ); this.repeat.copy( source.repeat ); this.center.copy( source.center ); this.rotation = source.rotation; this.matrixAutoUpdate = source.matrixAutoUpdate; this.matrix.copy( source.matrix ); this.generateMipmaps = source.generateMipmaps; this.premultiplyAlpha = source.premultiplyAlpha; this.flipY = source.flipY; this.unpackAlignment = source.unpackAlignment; this.colorSpace = source.colorSpace; this.renderTarget = source.renderTarget; this.isRenderTargetTexture = source.isRenderTargetTexture; this.userData = JSON.parse( JSON.stringify( source.userData ) ); this.needsUpdate = true; return this; } /** * Serializes the texture into JSON. * * @param {?(Object|string)} meta - An optional value holding meta information about the serialization. * @return {Object} A JSON object representing the serialized texture. * @see {@link ObjectLoader#parse} */ toJSON( meta ) { const isRootObject = ( meta === undefined || typeof meta === 'string' ); if ( ! isRootObject && meta.textures[ this.uuid ] !== undefined ) { return meta.textures[ this.uuid ]; } const output = { metadata: { version: 4.6, type: 'Texture', generator: 'Texture.toJSON' }, uuid: this.uuid, name: this.name, image: this.source.toJSON( meta ).uuid, mapping: this.mapping, channel: this.channel, repeat: [ this.repeat.x, this.repeat.y ], offset: [ this.offset.x, this.offset.y ], center: [ this.center.x, this.center.y ], rotation: this.rotation, wrap: [ this.wrapS, this.wrapT ], format: this.format, internalFormat: this.internalFormat, type: this.type, colorSpace: this.colorSpace, minFilter: this.minFilter, magFilter: this.magFilter, anisotropy: this.anisotropy, flipY: this.flipY, generateMipmaps: this.generateMipmaps, premultiplyAlpha: this.premultiplyAlpha, unpackAlignment: this.unpackAlignment }; if ( Object.keys( this.userData ).length > 0 ) output.userData = this.userData; if ( ! isRootObject ) { meta.textures[ this.uuid ] = output; } return output; } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. * * @fires Texture#dispose */ dispose() { /** * Fires when the texture has been disposed of. * * @event Texture#dispose * @type {Object} */ this.dispatchEvent( { type: 'dispose' } ); } /** * Transforms the given uv vector with the textures uv transformation matrix. * * @param {Vector2} uv - The uv vector. * @return {Vector2} The transformed uv vector. */ transformUv( uv ) { if ( this.mapping !== UVMapping ) return uv; uv.applyMatrix3( this.matrix ); if ( uv.x < 0 || uv.x > 1 ) { switch ( this.wrapS ) { case RepeatWrapping: uv.x = uv.x - Math.floor( uv.x ); break; case ClampToEdgeWrapping: uv.x = uv.x < 0 ? 0 : 1; break; case MirroredRepeatWrapping: if ( Math.abs( Math.floor( uv.x ) % 2 ) === 1 ) { uv.x = Math.ceil( uv.x ) - uv.x; } else { uv.x = uv.x - Math.floor( uv.x ); } break; } } if ( uv.y < 0 || uv.y > 1 ) { switch ( this.wrapT ) { case RepeatWrapping: uv.y = uv.y - Math.floor( uv.y ); break; case ClampToEdgeWrapping: uv.y = uv.y < 0 ? 0 : 1; break; case MirroredRepeatWrapping: if ( Math.abs( Math.floor( uv.y ) % 2 ) === 1 ) { uv.y = Math.ceil( uv.y ) - uv.y; } else { uv.y = uv.y - Math.floor( uv.y ); } break; } } if ( this.flipY ) { uv.y = 1 - uv.y; } return uv; } /** * Setting this property to `true` indicates the engine the texture * must be updated in the next render. This triggers a texture upload * to the GPU and ensures correct texture parameter configuration. * * @type {boolean} * @default false * @param {boolean} value */ set needsUpdate( value ) { if ( value === true ) { this.version ++; this.source.needsUpdate = true; } } /** * Setting this property to `true` indicates the engine the PMREM * must be regenerated. * * @type {boolean} * @default false * @param {boolean} value */ set needsPMREMUpdate( value ) { if ( value === true ) { this.pmremVersion ++; } } } /** * The default image for all textures. * * @static * @type {?Image} * @default null */ Texture.DEFAULT_IMAGE = null; /** * The default mapping for all textures. * * @static * @type {number} * @default UVMapping */ Texture.DEFAULT_MAPPING = UVMapping; /** * The default anisotropy value for all textures. * * @static * @type {number} * @default 1 */ Texture.DEFAULT_ANISOTROPY = 1; /** * Class representing a 4D vector. A 4D vector is an ordered quadruplet of numbers * (labeled x, y, z and w), which can be used to represent a number of things, such as: * * - A point in 4D space. * - A direction and length in 4D space. In three.js the length will * always be the Euclidean distance(straight-line distance) from `(0, 0, 0, 0)` to `(x, y, z, w)` * and the direction is also measured from `(0, 0, 0, 0)` towards `(x, y, z, w)`. * - Any arbitrary ordered quadruplet of numbers. * * There are other things a 4D vector can be used to represent, however these * are the most common uses in *three.js*. * * Iterating through a vector instance will yield its components `(x, y, z, w)` in * the corresponding order. * ```js * const a = new THREE.Vector4( 0, 1, 0, 0 ); * * //no arguments; will be initialised to (0, 0, 0, 1) * const b = new THREE.Vector4( ); * * const d = a.dot( b ); * ``` */ class Vector4 { /** * Constructs a new 4D vector. * * @param {number} [x=0] - The x value of this vector. * @param {number} [y=0] - The y value of this vector. * @param {number} [z=0] - The z value of this vector. * @param {number} [w=1] - The w value of this vector. */ constructor( x = 0, y = 0, z = 0, w = 1 ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ Vector4.prototype.isVector4 = true; /** * The x value of this vector. * * @type {number} */ this.x = x; /** * The y value of this vector. * * @type {number} */ this.y = y; /** * The z value of this vector. * * @type {number} */ this.z = z; /** * The w value of this vector. * * @type {number} */ this.w = w; } /** * Alias for {@link Vector4#z}. * * @type {number} */ get width() { return this.z; } set width( value ) { this.z = value; } /** * Alias for {@link Vector4#w}. * * @type {number} */ get height() { return this.w; } set height( value ) { this.w = value; } /** * Sets the vector components. * * @param {number} x - The value of the x component. * @param {number} y - The value of the y component. * @param {number} z - The value of the z component. * @param {number} w - The value of the w component. * @return {Vector4} A reference to this vector. */ set( x, y, z, w ) { this.x = x; this.y = y; this.z = z; this.w = w; return this; } /** * Sets the vector components to the same value. * * @param {number} scalar - The value to set for all vector components. * @return {Vector4} A reference to this vector. */ setScalar( scalar ) { this.x = scalar; this.y = scalar; this.z = scalar; this.w = scalar; return this; } /** * Sets the vector's x component to the given value * * @param {number} x - The value to set. * @return {Vector4} A reference to this vector. */ setX( x ) { this.x = x; return this; } /** * Sets the vector's y component to the given value * * @param {number} y - The value to set. * @return {Vector4} A reference to this vector. */ setY( y ) { this.y = y; return this; } /** * Sets the vector's z component to the given value * * @param {number} z - The value to set. * @return {Vector4} A reference to this vector. */ setZ( z ) { this.z = z; return this; } /** * Sets the vector's w component to the given value * * @param {number} w - The value to set. * @return {Vector4} A reference to this vector. */ setW( w ) { this.w = w; return this; } /** * Allows to set a vector component with an index. * * @param {number} index - The component index. `0` equals to x, `1` equals to y, * `2` equals to z, `3` equals to w. * @param {number} value - The value to set. * @return {Vector4} A reference to this vector. */ setComponent( index, value ) { switch ( index ) { case 0: this.x = value; break; case 1: this.y = value; break; case 2: this.z = value; break; case 3: this.w = value; break; default: throw new Error( 'index is out of range: ' + index ); } return this; } /** * Returns the value of the vector component which matches the given index. * * @param {number} index - The component index. `0` equals to x, `1` equals to y, * `2` equals to z, `3` equals to w. * @return {number} A vector component value. */ getComponent( index ) { switch ( index ) { case 0: return this.x; case 1: return this.y; case 2: return this.z; case 3: return this.w; default: throw new Error( 'index is out of range: ' + index ); } } /** * Returns a new vector with copied values from this instance. * * @return {Vector4} A clone of this instance. */ clone() { return new this.constructor( this.x, this.y, this.z, this.w ); } /** * Copies the values of the given vector to this instance. * * @param {Vector3|Vector4} v - The vector to copy. * @return {Vector4} A reference to this vector. */ copy( v ) { this.x = v.x; this.y = v.y; this.z = v.z; this.w = ( v.w !== undefined ) ? v.w : 1; return this; } /** * Adds the given vector to this instance. * * @param {Vector4} v - The vector to add. * @return {Vector4} A reference to this vector. */ add( v ) { this.x += v.x; this.y += v.y; this.z += v.z; this.w += v.w; return this; } /** * Adds the given scalar value to all components of this instance. * * @param {number} s - The scalar to add. * @return {Vector4} A reference to this vector. */ addScalar( s ) { this.x += s; this.y += s; this.z += s; this.w += s; return this; } /** * Adds the given vectors and stores the result in this instance. * * @param {Vector4} a - The first vector. * @param {Vector4} b - The second vector. * @return {Vector4} A reference to this vector. */ addVectors( a, b ) { this.x = a.x + b.x; this.y = a.y + b.y; this.z = a.z + b.z; this.w = a.w + b.w; return this; } /** * Adds the given vector scaled by the given factor to this instance. * * @param {Vector4} v - The vector. * @param {number} s - The factor that scales `v`. * @return {Vector4} A reference to this vector. */ addScaledVector( v, s ) { this.x += v.x * s; this.y += v.y * s; this.z += v.z * s; this.w += v.w * s; return this; } /** * Subtracts the given vector from this instance. * * @param {Vector4} v - The vector to subtract. * @return {Vector4} A reference to this vector. */ sub( v ) { this.x -= v.x; this.y -= v.y; this.z -= v.z; this.w -= v.w; return this; } /** * Subtracts the given scalar value from all components of this instance. * * @param {number} s - The scalar to subtract. * @return {Vector4} A reference to this vector. */ subScalar( s ) { this.x -= s; this.y -= s; this.z -= s; this.w -= s; return this; } /** * Subtracts the given vectors and stores the result in this instance. * * @param {Vector4} a - The first vector. * @param {Vector4} b - The second vector. * @return {Vector4} A reference to this vector. */ subVectors( a, b ) { this.x = a.x - b.x; this.y = a.y - b.y; this.z = a.z - b.z; this.w = a.w - b.w; return this; } /** * Multiplies the given vector with this instance. * * @param {Vector4} v - The vector to multiply. * @return {Vector4} A reference to this vector. */ multiply( v ) { this.x *= v.x; this.y *= v.y; this.z *= v.z; this.w *= v.w; return this; } /** * Multiplies the given scalar value with all components of this instance. * * @param {number} scalar - The scalar to multiply. * @return {Vector4} A reference to this vector. */ multiplyScalar( scalar ) { this.x *= scalar; this.y *= scalar; this.z *= scalar; this.w *= scalar; return this; } /** * Multiplies this vector with the given 4x4 matrix. * * @param {Matrix4} m - The 4x4 matrix. * @return {Vector4} A reference to this vector. */ applyMatrix4( m ) { const x = this.x, y = this.y, z = this.z, w = this.w; const e = m.elements; this.x = e[ 0 ] * x + e[ 4 ] * y + e[ 8 ] * z + e[ 12 ] * w; this.y = e[ 1 ] * x + e[ 5 ] * y + e[ 9 ] * z + e[ 13 ] * w; this.z = e[ 2 ] * x + e[ 6 ] * y + e[ 10 ] * z + e[ 14 ] * w; this.w = e[ 3 ] * x + e[ 7 ] * y + e[ 11 ] * z + e[ 15 ] * w; return this; } /** * Divides this instance by the given vector. * * @param {Vector4} v - The vector to divide. * @return {Vector4} A reference to this vector. */ divide( v ) { this.x /= v.x; this.y /= v.y; this.z /= v.z; this.w /= v.w; return this; } /** * Divides this vector by the given scalar. * * @param {number} scalar - The scalar to divide. * @return {Vector4} A reference to this vector. */ divideScalar( scalar ) { return this.multiplyScalar( 1 / scalar ); } /** * Sets the x, y and z components of this * vector to the quaternion's axis and w to the angle. * * @param {Quaternion} q - The Quaternion to set. * @return {Vector4} A reference to this vector. */ setAxisAngleFromQuaternion( q ) { // http://www.euclideanspace.com/maths/geometry/rotations/conversions/quaternionToAngle/index.htm // q is assumed to be normalized this.w = 2 * Math.acos( q.w ); const s = Math.sqrt( 1 - q.w * q.w ); if ( s < 0.0001 ) { this.x = 1; this.y = 0; this.z = 0; } else { this.x = q.x / s; this.y = q.y / s; this.z = q.z / s; } return this; } /** * Sets the x, y and z components of this * vector to the axis of rotation and w to the angle. * * @param {Matrix4} m - A 4x4 matrix of which the upper left 3x3 matrix is a pure rotation matrix. * @return {Vector4} A reference to this vector. */ setAxisAngleFromRotationMatrix( m ) { // http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToAngle/index.htm // assumes the upper 3x3 of m is a pure rotation matrix (i.e, unscaled) let angle, x, y, z; // variables for result const epsilon = 0.01, // margin to allow for rounding errors epsilon2 = 0.1, // margin to distinguish between 0 and 180 degrees te = m.elements, m11 = te[ 0 ], m12 = te[ 4 ], m13 = te[ 8 ], m21 = te[ 1 ], m22 = te[ 5 ], m23 = te[ 9 ], m31 = te[ 2 ], m32 = te[ 6 ], m33 = te[ 10 ]; if ( ( Math.abs( m12 - m21 ) < epsilon ) && ( Math.abs( m13 - m31 ) < epsilon ) && ( Math.abs( m23 - m32 ) < epsilon ) ) { // singularity found // first check for identity matrix which must have +1 for all terms // in leading diagonal and zero in other terms if ( ( Math.abs( m12 + m21 ) < epsilon2 ) && ( Math.abs( m13 + m31 ) < epsilon2 ) && ( Math.abs( m23 + m32 ) < epsilon2 ) && ( Math.abs( m11 + m22 + m33 - 3 ) < epsilon2 ) ) { // this singularity is identity matrix so angle = 0 this.set( 1, 0, 0, 0 ); return this; // zero angle, arbitrary axis } // otherwise this singularity is angle = 180 angle = Math.PI; const xx = ( m11 + 1 ) / 2; const yy = ( m22 + 1 ) / 2; const zz = ( m33 + 1 ) / 2; const xy = ( m12 + m21 ) / 4; const xz = ( m13 + m31 ) / 4; const yz = ( m23 + m32 ) / 4; if ( ( xx > yy ) && ( xx > zz ) ) { // m11 is the largest diagonal term if ( xx < epsilon ) { x = 0; y = 0.707106781; z = 0.707106781; } else { x = Math.sqrt( xx ); y = xy / x; z = xz / x; } } else if ( yy > zz ) { // m22 is the largest diagonal term if ( yy < epsilon ) { x = 0.707106781; y = 0; z = 0.707106781; } else { y = Math.sqrt( yy ); x = xy / y; z = yz / y; } } else { // m33 is the largest diagonal term so base result on this if ( zz < epsilon ) { x = 0.707106781; y = 0.707106781; z = 0; } else { z = Math.sqrt( zz ); x = xz / z; y = yz / z; } } this.set( x, y, z, angle ); return this; // return 180 deg rotation } // as we have reached here there are no singularities so we can handle normally let s = Math.sqrt( ( m32 - m23 ) * ( m32 - m23 ) + ( m13 - m31 ) * ( m13 - m31 ) + ( m21 - m12 ) * ( m21 - m12 ) ); // used to normalize if ( Math.abs( s ) < 0.001 ) s = 1; // prevent divide by zero, should not happen if matrix is orthogonal and should be // caught by singularity test above, but I've left it in just in case this.x = ( m32 - m23 ) / s; this.y = ( m13 - m31 ) / s; this.z = ( m21 - m12 ) / s; this.w = Math.acos( ( m11 + m22 + m33 - 1 ) / 2 ); return this; } /** * Sets the vector components to the position elements of the * given transformation matrix. * * @param {Matrix4} m - The 4x4 matrix. * @return {Vector4} A reference to this vector. */ setFromMatrixPosition( m ) { const e = m.elements; this.x = e[ 12 ]; this.y = e[ 13 ]; this.z = e[ 14 ]; this.w = e[ 15 ]; return this; } /** * If this vector's x, y, z or w value is greater than the given vector's x, y, z or w * value, replace that value with the corresponding min value. * * @param {Vector4} v - The vector. * @return {Vector4} A reference to this vector. */ min( v ) { this.x = Math.min( this.x, v.x ); this.y = Math.min( this.y, v.y ); this.z = Math.min( this.z, v.z ); this.w = Math.min( this.w, v.w ); return this; } /** * If this vector's x, y, z or w value is less than the given vector's x, y, z or w * value, replace that value with the corresponding max value. * * @param {Vector4} v - The vector. * @return {Vector4} A reference to this vector. */ max( v ) { this.x = Math.max( this.x, v.x ); this.y = Math.max( this.y, v.y ); this.z = Math.max( this.z, v.z ); this.w = Math.max( this.w, v.w ); return this; } /** * If this vector's x, y, z or w value is greater than the max vector's x, y, z or w * value, it is replaced by the corresponding value. * If this vector's x, y, z or w value is less than the min vector's x, y, z or w value, * it is replaced by the corresponding value. * * @param {Vector4} min - The minimum x, y and z values. * @param {Vector4} max - The maximum x, y and z values in the desired range. * @return {Vector4} A reference to this vector. */ clamp( min, max ) { // assumes min < max, componentwise this.x = clamp( this.x, min.x, max.x ); this.y = clamp( this.y, min.y, max.y ); this.z = clamp( this.z, min.z, max.z ); this.w = clamp( this.w, min.w, max.w ); return this; } /** * If this vector's x, y, z or w values are greater than the max value, they are * replaced by the max value. * If this vector's x, y, z or w values are less than the min value, they are * replaced by the min value. * * @param {number} minVal - The minimum value the components will be clamped to. * @param {number} maxVal - The maximum value the components will be clamped to. * @return {Vector4} A reference to this vector. */ clampScalar( minVal, maxVal ) { this.x = clamp( this.x, minVal, maxVal ); this.y = clamp( this.y, minVal, maxVal ); this.z = clamp( this.z, minVal, maxVal ); this.w = clamp( this.w, minVal, maxVal ); return this; } /** * If this vector's length is greater than the max value, it is replaced by * the max value. * If this vector's length is less than the min value, it is replaced by the * min value. * * @param {number} min - The minimum value the vector length will be clamped to. * @param {number} max - The maximum value the vector length will be clamped to. * @return {Vector4} A reference to this vector. */ clampLength( min, max ) { const length = this.length(); return this.divideScalar( length || 1 ).multiplyScalar( clamp( length, min, max ) ); } /** * The components of this vector are rounded down to the nearest integer value. * * @return {Vector4} A reference to this vector. */ floor() { this.x = Math.floor( this.x ); this.y = Math.floor( this.y ); this.z = Math.floor( this.z ); this.w = Math.floor( this.w ); return this; } /** * The components of this vector are rounded up to the nearest integer value. * * @return {Vector4} A reference to this vector. */ ceil() { this.x = Math.ceil( this.x ); this.y = Math.ceil( this.y ); this.z = Math.ceil( this.z ); this.w = Math.ceil( this.w ); return this; } /** * The components of this vector are rounded to the nearest integer value * * @return {Vector4} A reference to this vector. */ round() { this.x = Math.round( this.x ); this.y = Math.round( this.y ); this.z = Math.round( this.z ); this.w = Math.round( this.w ); return this; } /** * The components of this vector are rounded towards zero (up if negative, * down if positive) to an integer value. * * @return {Vector4} A reference to this vector. */ roundToZero() { this.x = Math.trunc( this.x ); this.y = Math.trunc( this.y ); this.z = Math.trunc( this.z ); this.w = Math.trunc( this.w ); return this; } /** * Inverts this vector - i.e. sets x = -x, y = -y, z = -z, w = -w. * * @return {Vector4} A reference to this vector. */ negate() { this.x = - this.x; this.y = - this.y; this.z = - this.z; this.w = - this.w; return this; } /** * Calculates the dot product of the given vector with this instance. * * @param {Vector4} v - The vector to compute the dot product with. * @return {number} The result of the dot product. */ dot( v ) { return this.x * v.x + this.y * v.y + this.z * v.z + this.w * v.w; } /** * Computes the square of the Euclidean length (straight-line length) from * (0, 0, 0, 0) to (x, y, z, w). If you are comparing the lengths of vectors, you should * compare the length squared instead as it is slightly more efficient to calculate. * * @return {number} The square length of this vector. */ lengthSq() { return this.x * this.x + this.y * this.y + this.z * this.z + this.w * this.w; } /** * Computes the Euclidean length (straight-line length) from (0, 0, 0, 0) to (x, y, z, w). * * @return {number} The length of this vector. */ length() { return Math.sqrt( this.x * this.x + this.y * this.y + this.z * this.z + this.w * this.w ); } /** * Computes the Manhattan length of this vector. * * @return {number} The length of this vector. */ manhattanLength() { return Math.abs( this.x ) + Math.abs( this.y ) + Math.abs( this.z ) + Math.abs( this.w ); } /** * Converts this vector to a unit vector - that is, sets it equal to a vector * with the same direction as this one, but with a vector length of `1`. * * @return {Vector4} A reference to this vector. */ normalize() { return this.divideScalar( this.length() || 1 ); } /** * Sets this vector to a vector with the same direction as this one, but * with the specified length. * * @param {number} length - The new length of this vector. * @return {Vector4} A reference to this vector. */ setLength( length ) { return this.normalize().multiplyScalar( length ); } /** * Linearly interpolates between the given vector and this instance, where * alpha is the percent distance along the line - alpha = 0 will be this * vector, and alpha = 1 will be the given one. * * @param {Vector4} v - The vector to interpolate towards. * @param {number} alpha - The interpolation factor, typically in the closed interval `[0, 1]`. * @return {Vector4} A reference to this vector. */ lerp( v, alpha ) { this.x += ( v.x - this.x ) * alpha; this.y += ( v.y - this.y ) * alpha; this.z += ( v.z - this.z ) * alpha; this.w += ( v.w - this.w ) * alpha; return this; } /** * Linearly interpolates between the given vectors, where alpha is the percent * distance along the line - alpha = 0 will be first vector, and alpha = 1 will * be the second one. The result is stored in this instance. * * @param {Vector4} v1 - The first vector. * @param {Vector4} v2 - The second vector. * @param {number} alpha - The interpolation factor, typically in the closed interval `[0, 1]`. * @return {Vector4} A reference to this vector. */ lerpVectors( v1, v2, alpha ) { this.x = v1.x + ( v2.x - v1.x ) * alpha; this.y = v1.y + ( v2.y - v1.y ) * alpha; this.z = v1.z + ( v2.z - v1.z ) * alpha; this.w = v1.w + ( v2.w - v1.w ) * alpha; return this; } /** * Returns `true` if this vector is equal with the given one. * * @param {Vector4} v - The vector to test for equality. * @return {boolean} Whether this vector is equal with the given one. */ equals( v ) { return ( ( v.x === this.x ) && ( v.y === this.y ) && ( v.z === this.z ) && ( v.w === this.w ) ); } /** * Sets this vector's x value to be `array[ offset ]`, y value to be `array[ offset + 1 ]`, * z value to be `array[ offset + 2 ]`, w value to be `array[ offset + 3 ]`. * * @param {Array} array - An array holding the vector component values. * @param {number} [offset=0] - The offset into the array. * @return {Vector4} A reference to this vector. */ fromArray( array, offset = 0 ) { this.x = array[ offset ]; this.y = array[ offset + 1 ]; this.z = array[ offset + 2 ]; this.w = array[ offset + 3 ]; return this; } /** * Writes the components of this vector to the given array. If no array is provided, * the method returns a new instance. * * @param {Array} [array=[]] - The target array holding the vector components. * @param {number} [offset=0] - Index of the first element in the array. * @return {Array} The vector components. */ toArray( array = [], offset = 0 ) { array[ offset ] = this.x; array[ offset + 1 ] = this.y; array[ offset + 2 ] = this.z; array[ offset + 3 ] = this.w; return array; } /** * Sets the components of this vector from the given buffer attribute. * * @param {BufferAttribute} attribute - The buffer attribute holding vector data. * @param {number} index - The index into the attribute. * @return {Vector4} A reference to this vector. */ fromBufferAttribute( attribute, index ) { this.x = attribute.getX( index ); this.y = attribute.getY( index ); this.z = attribute.getZ( index ); this.w = attribute.getW( index ); return this; } /** * Sets each component of this vector to a pseudo-random value between `0` and * `1`, excluding `1`. * * @return {Vector4} A reference to this vector. */ random() { this.x = Math.random(); this.y = Math.random(); this.z = Math.random(); this.w = Math.random(); return this; } *[ Symbol.iterator ]() { yield this.x; yield this.y; yield this.z; yield this.w; } } /** * A render target is a buffer where the video card draws pixels for a scene * that is being rendered in the background. It is used in different effects, * such as applying postprocessing to a rendered image before displaying it * on the screen. * * @augments EventDispatcher */ class RenderTarget extends EventDispatcher { /** * Render target options. * * @typedef {Object} RenderTarget~Options * @property {boolean} [generateMipmaps=false] - Whether to generate mipmaps or not. * @property {number} [magFilter=LinearFilter] - The mag filter. * @property {number} [minFilter=LinearFilter] - The min filter. * @property {number} [format=RGBAFormat] - The texture format. * @property {number} [type=UnsignedByteType] - The texture type. * @property {?string} [internalFormat=null] - The texture's internal format. * @property {number} [wrapS=ClampToEdgeWrapping] - The texture's uv wrapping mode. * @property {number} [wrapT=ClampToEdgeWrapping] - The texture's uv wrapping mode. * @property {number} [anisotropy=1] - The texture's anisotropy value. * @property {string} [colorSpace=NoColorSpace] - The texture's color space. * @property {boolean} [depthBuffer=true] - Whether to allocate a depth buffer or not. * @property {boolean} [stencilBuffer=false] - Whether to allocate a stencil buffer or not. * @property {boolean} [resolveDepthBuffer=true] - Whether to resolve the depth buffer or not. * @property {boolean} [resolveStencilBuffer=true] - Whether to resolve the stencil buffer or not. * @property {?Texture} [depthTexture=null] - Reference to a depth texture. * @property {number} [samples=0] - The MSAA samples count. * @property {number} [count=1] - Defines the number of color attachments . Must be at least `1`. */ /** * Constructs a new render target. * * @param {number} [width=1] - The width of the render target. * @param {number} [height=1] - The height of the render target. * @param {RenderTarget~Options} [options] - The configuration object. */ constructor( width = 1, height = 1, options = {} ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isRenderTarget = true; /** * The width of the render target. * * @type {number} * @default 1 */ this.width = width; /** * The height of the render target. * * @type {number} * @default 1 */ this.height = height; /** * The depth of the render target. * * @type {number} * @default 1 */ this.depth = 1; /** * A rectangular area inside the render target's viewport. Fragments that are * outside the area will be discarded. * * @type {Vector4} * @default (0,0,width,height) */ this.scissor = new Vector4( 0, 0, width, height ); /** * Indicates whether the scissor test should be enabled when rendering into * this render target or not. * * @type {boolean} * @default false */ this.scissorTest = false; /** * A rectangular area representing the render target's viewport. * * @type {Vector4} * @default (0,0,width,height) */ this.viewport = new Vector4( 0, 0, width, height ); const image = { width: width, height: height, depth: 1 }; options = Object.assign( { generateMipmaps: false, internalFormat: null, minFilter: LinearFilter, depthBuffer: true, stencilBuffer: false, resolveDepthBuffer: true, resolveStencilBuffer: true, depthTexture: null, samples: 0, count: 1 }, options ); const texture = new Texture( image, options.mapping, options.wrapS, options.wrapT, options.magFilter, options.minFilter, options.format, options.type, options.anisotropy, options.colorSpace ); texture.flipY = false; texture.generateMipmaps = options.generateMipmaps; texture.internalFormat = options.internalFormat; /** * An array of textures. Each color attachment is represented as a separate texture. * Has at least a single entry for the default color attachment. * * @type {Array} */ this.textures = []; const count = options.count; for ( let i = 0; i < count; i ++ ) { this.textures[ i ] = texture.clone(); this.textures[ i ].isRenderTargetTexture = true; this.textures[ i ].renderTarget = this; } /** * Whether to allocate a depth buffer or not. * * @type {boolean} * @default true */ this.depthBuffer = options.depthBuffer; /** * Whether to allocate a stencil buffer or not. * * @type {boolean} * @default false */ this.stencilBuffer = options.stencilBuffer; /** * Whether to resolve the depth buffer or not. * * @type {boolean} * @default true */ this.resolveDepthBuffer = options.resolveDepthBuffer; /** * Whether to resolve the stencil buffer or not. * * @type {boolean} * @default true */ this.resolveStencilBuffer = options.resolveStencilBuffer; this._depthTexture = options.depthTexture; /** * The number of MSAA samples. * * A value of `0` disables MSAA. * * @type {number} * @default 0 */ this.samples = options.samples; } /** * The texture representing the default color attachment. * * @type {Texture} */ get texture() { return this.textures[ 0 ]; } set texture( value ) { this.textures[ 0 ] = value; } set depthTexture( current ) { if ( this._depthTexture !== null ) this._depthTexture.renderTarget = null; if ( current !== null ) current.renderTarget = this; this._depthTexture = current; } /** * Instead of saving the depth in a renderbuffer, a texture * can be used instead which is useful for further processing * e.g. in context of post-processing. * * @type {?DepthTexture} * @default null */ get depthTexture() { return this._depthTexture; } /** * Sets the size of this render target. * * @param {number} width - The width. * @param {number} height - The height. * @param {number} [depth=1] - The depth. */ setSize( width, height, depth = 1 ) { if ( this.width !== width || this.height !== height || this.depth !== depth ) { this.width = width; this.height = height; this.depth = depth; for ( let i = 0, il = this.textures.length; i < il; i ++ ) { this.textures[ i ].image.width = width; this.textures[ i ].image.height = height; this.textures[ i ].image.depth = depth; } this.dispose(); } this.viewport.set( 0, 0, width, height ); this.scissor.set( 0, 0, width, height ); } /** * Returns a new render target with copied values from this instance. * * @return {RenderTarget} A clone of this instance. */ clone() { return new this.constructor().copy( this ); } /** * Copies the settings of the given render target. This is a structural copy so * no resources are shared between render targets after the copy. That includes * all MRT textures and the depth texture. * * @param {RenderTarget} source - The render target to copy. * @return {RenderTarget} A reference to this instance. */ copy( source ) { this.width = source.width; this.height = source.height; this.depth = source.depth; this.scissor.copy( source.scissor ); this.scissorTest = source.scissorTest; this.viewport.copy( source.viewport ); this.textures.length = 0; for ( let i = 0, il = source.textures.length; i < il; i ++ ) { this.textures[ i ] = source.textures[ i ].clone(); this.textures[ i ].isRenderTargetTexture = true; this.textures[ i ].renderTarget = this; // ensure image object is not shared, see #20328 const image = Object.assign( {}, source.textures[ i ].image ); this.textures[ i ].source = new Source( image ); } this.depthBuffer = source.depthBuffer; this.stencilBuffer = source.stencilBuffer; this.resolveDepthBuffer = source.resolveDepthBuffer; this.resolveStencilBuffer = source.resolveStencilBuffer; if ( source.depthTexture !== null ) this.depthTexture = source.depthTexture.clone(); this.samples = source.samples; return this; } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. * * @fires RenderTarget#dispose */ dispose() { this.dispatchEvent( { type: 'dispose' } ); } } /** * A render target used in context of {@link WebGLRenderer}. * * @augments RenderTarget */ class WebGLRenderTarget extends RenderTarget { /** * Constructs a new 3D render target. * * @param {number} [width=1] - The width of the render target. * @param {number} [height=1] - The height of the render target. * @param {RenderTarget~Options} [options] - The configuration object. */ constructor( width = 1, height = 1, options = {} ) { super( width, height, options ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isWebGLRenderTarget = true; } } /** * Creates an array of textures directly from raw buffer data. * * @augments Texture */ class DataArrayTexture extends Texture { /** * Constructs a new data array texture. * * @param {?TypedArray} [data=null] - The buffer data. * @param {number} [width=1] - The width of the texture. * @param {number} [height=1] - The height of the texture. * @param {number} [depth=1] - The depth of the texture. */ constructor( data = null, width = 1, height = 1, depth = 1 ) { super( null ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isDataArrayTexture = true; /** * The image definition of a data texture. * * @type {{data:TypedArray,width:number,height:number,depth:number}} */ this.image = { data, width, height, depth }; /** * How the texture is sampled when a texel covers more than one pixel. * * Overwritten and set to `NearestFilter` by default. * * @type {(NearestFilter|NearestMipmapNearestFilter|NearestMipmapLinearFilter|LinearFilter|LinearMipmapNearestFilter|LinearMipmapLinearFilter)} * @default NearestFilter */ this.magFilter = NearestFilter; /** * How the texture is sampled when a texel covers less than one pixel. * * Overwritten and set to `NearestFilter` by default. * * @type {(NearestFilter|NearestMipmapNearestFilter|NearestMipmapLinearFilter|LinearFilter|LinearMipmapNearestFilter|LinearMipmapLinearFilter)} * @default NearestFilter */ this.minFilter = NearestFilter; /** * This defines how the texture is wrapped in the depth and corresponds to * *W* in UVW mapping. * * @type {(RepeatWrapping|ClampToEdgeWrapping|MirroredRepeatWrapping)} * @default ClampToEdgeWrapping */ this.wrapR = ClampToEdgeWrapping; /** * Whether to generate mipmaps (if possible) for a texture. * * Overwritten and set to `false` by default. * * @type {boolean} * @default false */ this.generateMipmaps = false; /** * If set to `true`, the texture is flipped along the vertical axis when * uploaded to the GPU. * * Overwritten and set to `false` by default. * * @type {boolean} * @default false */ this.flipY = false; /** * Specifies the alignment requirements for the start of each pixel row in memory. * * Overwritten and set to `1` by default. * * @type {boolean} * @default 1 */ this.unpackAlignment = 1; /** * A set of all layers which need to be updated in the texture. * * @type {Set} */ this.layerUpdates = new Set(); } /** * Describes that a specific layer of the texture needs to be updated. * Normally when {@link Texture#needsUpdate} is set to `true`, the * entire data texture array is sent to the GPU. Marking specific * layers will only transmit subsets of all mipmaps associated with a * specific depth in the array which is often much more performant. * * @param {number} layerIndex - The layer index that should be updated. */ addLayerUpdate( layerIndex ) { this.layerUpdates.add( layerIndex ); } /** * Resets the layer updates registry. */ clearLayerUpdates() { this.layerUpdates.clear(); } } /** * An array render target used in context of {@link WebGLRenderer}. * * @augments WebGLRenderTarget */ class WebGLArrayRenderTarget extends WebGLRenderTarget { /** * Constructs a new array render target. * * @param {number} [width=1] - The width of the render target. * @param {number} [height=1] - The height of the render target. * @param {number} [depth=1] - The height of the render target. * @param {RenderTarget~Options} [options] - The configuration object. */ constructor( width = 1, height = 1, depth = 1, options = {} ) { super( width, height, options ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isWebGLArrayRenderTarget = true; this.depth = depth; /** * Overwritten with a different texture type. * * @type {DataArrayTexture} */ this.texture = new DataArrayTexture( null, width, height, depth ); this.texture.isRenderTargetTexture = true; } } /** * Creates a three-dimensional texture from raw data, with parameters to * divide it into width, height, and depth. * * @augments Texture */ class Data3DTexture extends Texture { /** * Constructs a new data array texture. * * @param {?TypedArray} [data=null] - The buffer data. * @param {number} [width=1] - The width of the texture. * @param {number} [height=1] - The height of the texture. * @param {number} [depth=1] - The depth of the texture. */ constructor( data = null, width = 1, height = 1, depth = 1 ) { // We're going to add .setXXX() methods for setting properties later. // Users can still set in Data3DTexture directly. // // const texture = new THREE.Data3DTexture( data, width, height, depth ); // texture.anisotropy = 16; // // See #14839 super( null ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isData3DTexture = true; /** * The image definition of a data texture. * * @type {{data:TypedArray,width:number,height:number,depth:number}} */ this.image = { data, width, height, depth }; /** * How the texture is sampled when a texel covers more than one pixel. * * Overwritten and set to `NearestFilter` by default. * * @type {(NearestFilter|NearestMipmapNearestFilter|NearestMipmapLinearFilter|LinearFilter|LinearMipmapNearestFilter|LinearMipmapLinearFilter)} * @default NearestFilter */ this.magFilter = NearestFilter; /** * How the texture is sampled when a texel covers less than one pixel. * * Overwritten and set to `NearestFilter` by default. * * @type {(NearestFilter|NearestMipmapNearestFilter|NearestMipmapLinearFilter|LinearFilter|LinearMipmapNearestFilter|LinearMipmapLinearFilter)} * @default NearestFilter */ this.minFilter = NearestFilter; /** * This defines how the texture is wrapped in the depth and corresponds to * *W* in UVW mapping. * * @type {(RepeatWrapping|ClampToEdgeWrapping|MirroredRepeatWrapping)} * @default ClampToEdgeWrapping */ this.wrapR = ClampToEdgeWrapping; /** * Whether to generate mipmaps (if possible) for a texture. * * Overwritten and set to `false` by default. * * @type {boolean} * @default false */ this.generateMipmaps = false; /** * If set to `true`, the texture is flipped along the vertical axis when * uploaded to the GPU. * * Overwritten and set to `false` by default. * * @type {boolean} * @default false */ this.flipY = false; /** * Specifies the alignment requirements for the start of each pixel row in memory. * * Overwritten and set to `1` by default. * * @type {boolean} * @default 1 */ this.unpackAlignment = 1; } } /** * A 3D render target used in context of {@link WebGLRenderer}. * * @augments WebGLRenderTarget */ class WebGL3DRenderTarget extends WebGLRenderTarget { /** * Constructs a new 3D render target. * * @param {number} [width=1] - The width of the render target. * @param {number} [height=1] - The height of the render target. * @param {number} [depth=1] - The height of the render target. * @param {RenderTarget~Options} [options] - The configuration object. */ constructor( width = 1, height = 1, depth = 1, options = {} ) { super( width, height, options ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isWebGL3DRenderTarget = true; this.depth = depth; /** * Overwritten with a different texture type. * * @type {Data3DTexture} */ this.texture = new Data3DTexture( null, width, height, depth ); this.texture.isRenderTargetTexture = true; } } /** * Class for representing a Quaternion. Quaternions are used in three.js to represent rotations. * * Iterating through a vector instance will yield its components `(x, y, z, w)` in * the corresponding order. * * Note that three.js expects Quaternions to be normalized. * ```js * const quaternion = new THREE.Quaternion(); * quaternion.setFromAxisAngle( new THREE.Vector3( 0, 1, 0 ), Math.PI / 2 ); * * const vector = new THREE.Vector3( 1, 0, 0 ); * vector.applyQuaternion( quaternion ); * ``` */ class Quaternion { /** * Constructs a new quaternion. * * @param {number} [x=0] - The x value of this quaternion. * @param {number} [y=0] - The y value of this quaternion. * @param {number} [z=0] - The z value of this quaternion. * @param {number} [w=1] - The w value of this quaternion. */ constructor( x = 0, y = 0, z = 0, w = 1 ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isQuaternion = true; this._x = x; this._y = y; this._z = z; this._w = w; } /** * Interpolates between two quaternions via SLERP. This implementation assumes the * quaternion data are managed in flat arrays. * * @param {Array} dst - The destination array. * @param {number} dstOffset - An offset into the destination array. * @param {Array} src0 - The source array of the first quaternion. * @param {number} srcOffset0 - An offset into the first source array. * @param {Array} src1 - The source array of the second quaternion. * @param {number} srcOffset1 - An offset into the second source array. * @param {number} t - The interpolation factor in the range `[0,1]`. * @see {@link Quaternion#slerp} */ static slerpFlat( dst, dstOffset, src0, srcOffset0, src1, srcOffset1, t ) { // fuzz-free, array-based Quaternion SLERP operation let x0 = src0[ srcOffset0 + 0 ], y0 = src0[ srcOffset0 + 1 ], z0 = src0[ srcOffset0 + 2 ], w0 = src0[ srcOffset0 + 3 ]; const x1 = src1[ srcOffset1 + 0 ], y1 = src1[ srcOffset1 + 1 ], z1 = src1[ srcOffset1 + 2 ], w1 = src1[ srcOffset1 + 3 ]; if ( t === 0 ) { dst[ dstOffset + 0 ] = x0; dst[ dstOffset + 1 ] = y0; dst[ dstOffset + 2 ] = z0; dst[ dstOffset + 3 ] = w0; return; } if ( t === 1 ) { dst[ dstOffset + 0 ] = x1; dst[ dstOffset + 1 ] = y1; dst[ dstOffset + 2 ] = z1; dst[ dstOffset + 3 ] = w1; return; } if ( w0 !== w1 || x0 !== x1 || y0 !== y1 || z0 !== z1 ) { let s = 1 - t; const cos = x0 * x1 + y0 * y1 + z0 * z1 + w0 * w1, dir = ( cos >= 0 ? 1 : -1 ), sqrSin = 1 - cos * cos; // Skip the Slerp for tiny steps to avoid numeric problems: if ( sqrSin > Number.EPSILON ) { const sin = Math.sqrt( sqrSin ), len = Math.atan2( sin, cos * dir ); s = Math.sin( s * len ) / sin; t = Math.sin( t * len ) / sin; } const tDir = t * dir; x0 = x0 * s + x1 * tDir; y0 = y0 * s + y1 * tDir; z0 = z0 * s + z1 * tDir; w0 = w0 * s + w1 * tDir; // Normalize in case we just did a lerp: if ( s === 1 - t ) { const f = 1 / Math.sqrt( x0 * x0 + y0 * y0 + z0 * z0 + w0 * w0 ); x0 *= f; y0 *= f; z0 *= f; w0 *= f; } } dst[ dstOffset ] = x0; dst[ dstOffset + 1 ] = y0; dst[ dstOffset + 2 ] = z0; dst[ dstOffset + 3 ] = w0; } /** * Multiplies two quaternions. This implementation assumes the quaternion data are managed * in flat arrays. * * @param {Array} dst - The destination array. * @param {number} dstOffset - An offset into the destination array. * @param {Array} src0 - The source array of the first quaternion. * @param {number} srcOffset0 - An offset into the first source array. * @param {Array} src1 - The source array of the second quaternion. * @param {number} srcOffset1 - An offset into the second source array. * @return {Array} The destination array. * @see {@link Quaternion#multiplyQuaternions}. */ static multiplyQuaternionsFlat( dst, dstOffset, src0, srcOffset0, src1, srcOffset1 ) { const x0 = src0[ srcOffset0 ]; const y0 = src0[ srcOffset0 + 1 ]; const z0 = src0[ srcOffset0 + 2 ]; const w0 = src0[ srcOffset0 + 3 ]; const x1 = src1[ srcOffset1 ]; const y1 = src1[ srcOffset1 + 1 ]; const z1 = src1[ srcOffset1 + 2 ]; const w1 = src1[ srcOffset1 + 3 ]; dst[ dstOffset ] = x0 * w1 + w0 * x1 + y0 * z1 - z0 * y1; dst[ dstOffset + 1 ] = y0 * w1 + w0 * y1 + z0 * x1 - x0 * z1; dst[ dstOffset + 2 ] = z0 * w1 + w0 * z1 + x0 * y1 - y0 * x1; dst[ dstOffset + 3 ] = w0 * w1 - x0 * x1 - y0 * y1 - z0 * z1; return dst; } /** * The x value of this quaternion. * * @type {number} * @default 0 */ get x() { return this._x; } set x( value ) { this._x = value; this._onChangeCallback(); } /** * The y value of this quaternion. * * @type {number} * @default 0 */ get y() { return this._y; } set y( value ) { this._y = value; this._onChangeCallback(); } /** * The z value of this quaternion. * * @type {number} * @default 0 */ get z() { return this._z; } set z( value ) { this._z = value; this._onChangeCallback(); } /** * The w value of this quaternion. * * @type {number} * @default 1 */ get w() { return this._w; } set w( value ) { this._w = value; this._onChangeCallback(); } /** * Sets the quaternion components. * * @param {number} x - The x value of this quaternion. * @param {number} y - The y value of this quaternion. * @param {number} z - The z value of this quaternion. * @param {number} w - The w value of this quaternion. * @return {Quaternion} A reference to this quaternion. */ set( x, y, z, w ) { this._x = x; this._y = y; this._z = z; this._w = w; this._onChangeCallback(); return this; } /** * Returns a new quaternion with copied values from this instance. * * @return {Quaternion} A clone of this instance. */ clone() { return new this.constructor( this._x, this._y, this._z, this._w ); } /** * Copies the values of the given quaternion to this instance. * * @param {Quaternion} quaternion - The quaternion to copy. * @return {Quaternion} A reference to this quaternion. */ copy( quaternion ) { this._x = quaternion.x; this._y = quaternion.y; this._z = quaternion.z; this._w = quaternion.w; this._onChangeCallback(); return this; } /** * Sets this quaternion from the rotation specified by the given * Euler angles. * * @param {Euler} euler - The Euler angles. * @param {boolean} [update=true] - Whether the internal `onChange` callback should be executed or not. * @return {Quaternion} A reference to this quaternion. */ setFromEuler( euler, update = true ) { const x = euler._x, y = euler._y, z = euler._z, order = euler._order; // http://www.mathworks.com/matlabcentral/fileexchange/ // 20696-function-to-convert-between-dcm-euler-angles-quaternions-and-euler-vectors/ // content/SpinCalc.m const cos = Math.cos; const sin = Math.sin; const c1 = cos( x / 2 ); const c2 = cos( y / 2 ); const c3 = cos( z / 2 ); const s1 = sin( x / 2 ); const s2 = sin( y / 2 ); const s3 = sin( z / 2 ); switch ( order ) { case 'XYZ': this._x = s1 * c2 * c3 + c1 * s2 * s3; this._y = c1 * s2 * c3 - s1 * c2 * s3; this._z = c1 * c2 * s3 + s1 * s2 * c3; this._w = c1 * c2 * c3 - s1 * s2 * s3; break; case 'YXZ': this._x = s1 * c2 * c3 + c1 * s2 * s3; this._y = c1 * s2 * c3 - s1 * c2 * s3; this._z = c1 * c2 * s3 - s1 * s2 * c3; this._w = c1 * c2 * c3 + s1 * s2 * s3; break; case 'ZXY': this._x = s1 * c2 * c3 - c1 * s2 * s3; this._y = c1 * s2 * c3 + s1 * c2 * s3; this._z = c1 * c2 * s3 + s1 * s2 * c3; this._w = c1 * c2 * c3 - s1 * s2 * s3; break; case 'ZYX': this._x = s1 * c2 * c3 - c1 * s2 * s3; this._y = c1 * s2 * c3 + s1 * c2 * s3; this._z = c1 * c2 * s3 - s1 * s2 * c3; this._w = c1 * c2 * c3 + s1 * s2 * s3; break; case 'YZX': this._x = s1 * c2 * c3 + c1 * s2 * s3; this._y = c1 * s2 * c3 + s1 * c2 * s3; this._z = c1 * c2 * s3 - s1 * s2 * c3; this._w = c1 * c2 * c3 - s1 * s2 * s3; break; case 'XZY': this._x = s1 * c2 * c3 - c1 * s2 * s3; this._y = c1 * s2 * c3 - s1 * c2 * s3; this._z = c1 * c2 * s3 + s1 * s2 * c3; this._w = c1 * c2 * c3 + s1 * s2 * s3; break; default: console.warn( 'THREE.Quaternion: .setFromEuler() encountered an unknown order: ' + order ); } if ( update === true ) this._onChangeCallback(); return this; } /** * Sets this quaternion from the given axis and angle. * * @param {Vector3} axis - The normalized axis. * @param {number} angle - The angle in radians. * @return {Quaternion} A reference to this quaternion. */ setFromAxisAngle( axis, angle ) { // http://www.euclideanspace.com/maths/geometry/rotations/conversions/angleToQuaternion/index.htm const halfAngle = angle / 2, s = Math.sin( halfAngle ); this._x = axis.x * s; this._y = axis.y * s; this._z = axis.z * s; this._w = Math.cos( halfAngle ); this._onChangeCallback(); return this; } /** * Sets this quaternion from the given rotation matrix. * * @param {Matrix4} m - A 4x4 matrix of which the upper 3x3 of matrix is a pure rotation matrix (i.e. unscaled). * @return {Quaternion} A reference to this quaternion. */ setFromRotationMatrix( m ) { // http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToQuaternion/index.htm // assumes the upper 3x3 of m is a pure rotation matrix (i.e, unscaled) const te = m.elements, m11 = te[ 0 ], m12 = te[ 4 ], m13 = te[ 8 ], m21 = te[ 1 ], m22 = te[ 5 ], m23 = te[ 9 ], m31 = te[ 2 ], m32 = te[ 6 ], m33 = te[ 10 ], trace = m11 + m22 + m33; if ( trace > 0 ) { const s = 0.5 / Math.sqrt( trace + 1.0 ); this._w = 0.25 / s; this._x = ( m32 - m23 ) * s; this._y = ( m13 - m31 ) * s; this._z = ( m21 - m12 ) * s; } else if ( m11 > m22 && m11 > m33 ) { const s = 2.0 * Math.sqrt( 1.0 + m11 - m22 - m33 ); this._w = ( m32 - m23 ) / s; this._x = 0.25 * s; this._y = ( m12 + m21 ) / s; this._z = ( m13 + m31 ) / s; } else if ( m22 > m33 ) { const s = 2.0 * Math.sqrt( 1.0 + m22 - m11 - m33 ); this._w = ( m13 - m31 ) / s; this._x = ( m12 + m21 ) / s; this._y = 0.25 * s; this._z = ( m23 + m32 ) / s; } else { const s = 2.0 * Math.sqrt( 1.0 + m33 - m11 - m22 ); this._w = ( m21 - m12 ) / s; this._x = ( m13 + m31 ) / s; this._y = ( m23 + m32 ) / s; this._z = 0.25 * s; } this._onChangeCallback(); return this; } /** * Sets this quaternion to the rotation required to rotate the direction vector * `vFrom` to the direction vector `vTo`. * * @param {Vector3} vFrom - The first (normalized) direction vector. * @param {Vector3} vTo - The second (normalized) direction vector. * @return {Quaternion} A reference to this quaternion. */ setFromUnitVectors( vFrom, vTo ) { // assumes direction vectors vFrom and vTo are normalized let r = vFrom.dot( vTo ) + 1; if ( r < Number.EPSILON ) { // vFrom and vTo point in opposite directions r = 0; if ( Math.abs( vFrom.x ) > Math.abs( vFrom.z ) ) { this._x = - vFrom.y; this._y = vFrom.x; this._z = 0; this._w = r; } else { this._x = 0; this._y = - vFrom.z; this._z = vFrom.y; this._w = r; } } else { // crossVectors( vFrom, vTo ); // inlined to avoid cyclic dependency on Vector3 this._x = vFrom.y * vTo.z - vFrom.z * vTo.y; this._y = vFrom.z * vTo.x - vFrom.x * vTo.z; this._z = vFrom.x * vTo.y - vFrom.y * vTo.x; this._w = r; } return this.normalize(); } /** * Returns the angle between this quaternion and the given one in radians. * * @param {Quaternion} q - The quaternion to compute the angle with. * @return {number} The angle in radians. */ angleTo( q ) { return 2 * Math.acos( Math.abs( clamp( this.dot( q ), -1, 1 ) ) ); } /** * Rotates this quaternion by a given angular step to the given quaternion. * The method ensures that the final quaternion will not overshoot `q`. * * @param {Quaternion} q - The target quaternion. * @param {number} step - The angular step in radians. * @return {Quaternion} A reference to this quaternion. */ rotateTowards( q, step ) { const angle = this.angleTo( q ); if ( angle === 0 ) return this; const t = Math.min( 1, step / angle ); this.slerp( q, t ); return this; } /** * Sets this quaternion to the identity quaternion; that is, to the * quaternion that represents "no rotation". * * @return {Quaternion} A reference to this quaternion. */ identity() { return this.set( 0, 0, 0, 1 ); } /** * Inverts this quaternion via {@link Quaternion#conjugate}. The * quaternion is assumed to have unit length. * * @return {Quaternion} A reference to this quaternion. */ invert() { return this.conjugate(); } /** * Returns the rotational conjugate of this quaternion. The conjugate of a * quaternion represents the same rotation in the opposite direction about * the rotational axis. * * @return {Quaternion} A reference to this quaternion. */ conjugate() { this._x *= -1; this._y *= -1; this._z *= -1; this._onChangeCallback(); return this; } /** * Calculates the dot product of this quaternion and the given one. * * @param {Quaternion} v - The quaternion to compute the dot product with. * @return {number} The result of the dot product. */ dot( v ) { return this._x * v._x + this._y * v._y + this._z * v._z + this._w * v._w; } /** * Computes the squared Euclidean length (straight-line length) of this quaternion, * considered as a 4 dimensional vector. This can be useful if you are comparing the * lengths of two quaternions, as this is a slightly more efficient calculation than * {@link Quaternion#length}. * * @return {number} The squared Euclidean length. */ lengthSq() { return this._x * this._x + this._y * this._y + this._z * this._z + this._w * this._w; } /** * Computes the Euclidean length (straight-line length) of this quaternion, * considered as a 4 dimensional vector. * * @return {number} The Euclidean length. */ length() { return Math.sqrt( this._x * this._x + this._y * this._y + this._z * this._z + this._w * this._w ); } /** * Normalizes this quaternion - that is, calculated the quaternion that performs * the same rotation as this one, but has a length equal to `1`. * * @return {Quaternion} A reference to this quaternion. */ normalize() { let l = this.length(); if ( l === 0 ) { this._x = 0; this._y = 0; this._z = 0; this._w = 1; } else { l = 1 / l; this._x = this._x * l; this._y = this._y * l; this._z = this._z * l; this._w = this._w * l; } this._onChangeCallback(); return this; } /** * Multiplies this quaternion by the given one. * * @param {Quaternion} q - The quaternion. * @return {Quaternion} A reference to this quaternion. */ multiply( q ) { return this.multiplyQuaternions( this, q ); } /** * Pre-multiplies this quaternion by the given one. * * @param {Quaternion} q - The quaternion. * @return {Quaternion} A reference to this quaternion. */ premultiply( q ) { return this.multiplyQuaternions( q, this ); } /** * Multiplies the given quaternions and stores the result in this instance. * * @param {Quaternion} a - The first quaternion. * @param {Quaternion} b - The second quaternion. * @return {Quaternion} A reference to this quaternion. */ multiplyQuaternions( a, b ) { // from http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions/code/index.htm const qax = a._x, qay = a._y, qaz = a._z, qaw = a._w; const qbx = b._x, qby = b._y, qbz = b._z, qbw = b._w; this._x = qax * qbw + qaw * qbx + qay * qbz - qaz * qby; this._y = qay * qbw + qaw * qby + qaz * qbx - qax * qbz; this._z = qaz * qbw + qaw * qbz + qax * qby - qay * qbx; this._w = qaw * qbw - qax * qbx - qay * qby - qaz * qbz; this._onChangeCallback(); return this; } /** * Performs a spherical linear interpolation between quaternions. * * @param {Quaternion} qb - The target quaternion. * @param {number} t - The interpolation factor in the closed interval `[0, 1]`. * @return {Quaternion} A reference to this quaternion. */ slerp( qb, t ) { if ( t === 0 ) return this; if ( t === 1 ) return this.copy( qb ); const x = this._x, y = this._y, z = this._z, w = this._w; // http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions/slerp/ let cosHalfTheta = w * qb._w + x * qb._x + y * qb._y + z * qb._z; if ( cosHalfTheta < 0 ) { this._w = - qb._w; this._x = - qb._x; this._y = - qb._y; this._z = - qb._z; cosHalfTheta = - cosHalfTheta; } else { this.copy( qb ); } if ( cosHalfTheta >= 1.0 ) { this._w = w; this._x = x; this._y = y; this._z = z; return this; } const sqrSinHalfTheta = 1.0 - cosHalfTheta * cosHalfTheta; if ( sqrSinHalfTheta <= Number.EPSILON ) { const s = 1 - t; this._w = s * w + t * this._w; this._x = s * x + t * this._x; this._y = s * y + t * this._y; this._z = s * z + t * this._z; this.normalize(); // normalize calls _onChangeCallback() return this; } const sinHalfTheta = Math.sqrt( sqrSinHalfTheta ); const halfTheta = Math.atan2( sinHalfTheta, cosHalfTheta ); const ratioA = Math.sin( ( 1 - t ) * halfTheta ) / sinHalfTheta, ratioB = Math.sin( t * halfTheta ) / sinHalfTheta; this._w = ( w * ratioA + this._w * ratioB ); this._x = ( x * ratioA + this._x * ratioB ); this._y = ( y * ratioA + this._y * ratioB ); this._z = ( z * ratioA + this._z * ratioB ); this._onChangeCallback(); return this; } /** * Performs a spherical linear interpolation between the given quaternions * and stores the result in this quaternion. * * @param {Quaternion} qa - The source quaternion. * @param {Quaternion} qb - The target quaternion. * @param {number} t - The interpolation factor in the closed interval `[0, 1]`. * @return {Quaternion} A reference to this quaternion. */ slerpQuaternions( qa, qb, t ) { return this.copy( qa ).slerp( qb, t ); } /** * Sets this quaternion to a uniformly random, normalized quaternion. * * @return {Quaternion} A reference to this quaternion. */ random() { // Ken Shoemake // Uniform random rotations // D. Kirk, editor, Graphics Gems III, pages 124-132. Academic Press, New York, 1992. const theta1 = 2 * Math.PI * Math.random(); const theta2 = 2 * Math.PI * Math.random(); const x0 = Math.random(); const r1 = Math.sqrt( 1 - x0 ); const r2 = Math.sqrt( x0 ); return this.set( r1 * Math.sin( theta1 ), r1 * Math.cos( theta1 ), r2 * Math.sin( theta2 ), r2 * Math.cos( theta2 ), ); } /** * Returns `true` if this quaternion is equal with the given one. * * @param {Quaternion} quaternion - The quaternion to test for equality. * @return {boolean} Whether this quaternion is equal with the given one. */ equals( quaternion ) { return ( quaternion._x === this._x ) && ( quaternion._y === this._y ) && ( quaternion._z === this._z ) && ( quaternion._w === this._w ); } /** * Sets this quaternion's components from the given array. * * @param {Array} array - An array holding the quaternion component values. * @param {number} [offset=0] - The offset into the array. * @return {Quaternion} A reference to this quaternion. */ fromArray( array, offset = 0 ) { this._x = array[ offset ]; this._y = array[ offset + 1 ]; this._z = array[ offset + 2 ]; this._w = array[ offset + 3 ]; this._onChangeCallback(); return this; } /** * Writes the components of this quaternion to the given array. If no array is provided, * the method returns a new instance. * * @param {Array} [array=[]] - The target array holding the quaternion components. * @param {number} [offset=0] - Index of the first element in the array. * @return {Array} The quaternion components. */ toArray( array = [], offset = 0 ) { array[ offset ] = this._x; array[ offset + 1 ] = this._y; array[ offset + 2 ] = this._z; array[ offset + 3 ] = this._w; return array; } /** * Sets the components of this quaternion from the given buffer attribute. * * @param {BufferAttribute} attribute - The buffer attribute holding quaternion data. * @param {number} index - The index into the attribute. * @return {Quaternion} A reference to this quaternion. */ fromBufferAttribute( attribute, index ) { this._x = attribute.getX( index ); this._y = attribute.getY( index ); this._z = attribute.getZ( index ); this._w = attribute.getW( index ); this._onChangeCallback(); return this; } /** * This methods defines the serialization result of this class. Returns the * numerical elements of this quaternion in an array of format `[x, y, z, w]`. * * @return {Array} The serialized quaternion. */ toJSON() { return this.toArray(); } _onChange( callback ) { this._onChangeCallback = callback; return this; } _onChangeCallback() {} *[ Symbol.iterator ]() { yield this._x; yield this._y; yield this._z; yield this._w; } } /** * Class representing a 3D vector. A 3D vector is an ordered triplet of numbers * (labeled x, y and z), which can be used to represent a number of things, such as: * * - A point in 3D space. * - A direction and length in 3D space. In three.js the length will * always be the Euclidean distance(straight-line distance) from `(0, 0, 0)` to `(x, y, z)` * and the direction is also measured from `(0, 0, 0)` towards `(x, y, z)`. * - Any arbitrary ordered triplet of numbers. * * There are other things a 3D vector can be used to represent, such as * momentum vectors and so on, however these are the most * common uses in three.js. * * Iterating through a vector instance will yield its components `(x, y, z)` in * the corresponding order. * ```js * const a = new THREE.Vector3( 0, 1, 0 ); * * //no arguments; will be initialised to (0, 0, 0) * const b = new THREE.Vector3( ); * * const d = a.distanceTo( b ); * ``` */ class Vector3 { /** * Constructs a new 3D vector. * * @param {number} [x=0] - The x value of this vector. * @param {number} [y=0] - The y value of this vector. * @param {number} [z=0] - The z value of this vector. */ constructor( x = 0, y = 0, z = 0 ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ Vector3.prototype.isVector3 = true; /** * The x value of this vector. * * @type {number} */ this.x = x; /** * The y value of this vector. * * @type {number} */ this.y = y; /** * The z value of this vector. * * @type {number} */ this.z = z; } /** * Sets the vector components. * * @param {number} x - The value of the x component. * @param {number} y - The value of the y component. * @param {number} z - The value of the z component. * @return {Vector3} A reference to this vector. */ set( x, y, z ) { if ( z === undefined ) z = this.z; // sprite.scale.set(x,y) this.x = x; this.y = y; this.z = z; return this; } /** * Sets the vector components to the same value. * * @param {number} scalar - The value to set for all vector components. * @return {Vector3} A reference to this vector. */ setScalar( scalar ) { this.x = scalar; this.y = scalar; this.z = scalar; return this; } /** * Sets the vector's x component to the given value * * @param {number} x - The value to set. * @return {Vector3} A reference to this vector. */ setX( x ) { this.x = x; return this; } /** * Sets the vector's y component to the given value * * @param {number} y - The value to set. * @return {Vector3} A reference to this vector. */ setY( y ) { this.y = y; return this; } /** * Sets the vector's z component to the given value * * @param {number} z - The value to set. * @return {Vector3} A reference to this vector. */ setZ( z ) { this.z = z; return this; } /** * Allows to set a vector component with an index. * * @param {number} index - The component index. `0` equals to x, `1` equals to y, `2` equals to z. * @param {number} value - The value to set. * @return {Vector3} A reference to this vector. */ setComponent( index, value ) { switch ( index ) { case 0: this.x = value; break; case 1: this.y = value; break; case 2: this.z = value; break; default: throw new Error( 'index is out of range: ' + index ); } return this; } /** * Returns the value of the vector component which matches the given index. * * @param {number} index - The component index. `0` equals to x, `1` equals to y, `2` equals to z. * @return {number} A vector component value. */ getComponent( index ) { switch ( index ) { case 0: return this.x; case 1: return this.y; case 2: return this.z; default: throw new Error( 'index is out of range: ' + index ); } } /** * Returns a new vector with copied values from this instance. * * @return {Vector3} A clone of this instance. */ clone() { return new this.constructor( this.x, this.y, this.z ); } /** * Copies the values of the given vector to this instance. * * @param {Vector3} v - The vector to copy. * @return {Vector3} A reference to this vector. */ copy( v ) { this.x = v.x; this.y = v.y; this.z = v.z; return this; } /** * Adds the given vector to this instance. * * @param {Vector3} v - The vector to add. * @return {Vector3} A reference to this vector. */ add( v ) { this.x += v.x; this.y += v.y; this.z += v.z; return this; } /** * Adds the given scalar value to all components of this instance. * * @param {number} s - The scalar to add. * @return {Vector3} A reference to this vector. */ addScalar( s ) { this.x += s; this.y += s; this.z += s; return this; } /** * Adds the given vectors and stores the result in this instance. * * @param {Vector3} a - The first vector. * @param {Vector3} b - The second vector. * @return {Vector3} A reference to this vector. */ addVectors( a, b ) { this.x = a.x + b.x; this.y = a.y + b.y; this.z = a.z + b.z; return this; } /** * Adds the given vector scaled by the given factor to this instance. * * @param {Vector3|Vector4} v - The vector. * @param {number} s - The factor that scales `v`. * @return {Vector3} A reference to this vector. */ addScaledVector( v, s ) { this.x += v.x * s; this.y += v.y * s; this.z += v.z * s; return this; } /** * Subtracts the given vector from this instance. * * @param {Vector3} v - The vector to subtract. * @return {Vector3} A reference to this vector. */ sub( v ) { this.x -= v.x; this.y -= v.y; this.z -= v.z; return this; } /** * Subtracts the given scalar value from all components of this instance. * * @param {number} s - The scalar to subtract. * @return {Vector3} A reference to this vector. */ subScalar( s ) { this.x -= s; this.y -= s; this.z -= s; return this; } /** * Subtracts the given vectors and stores the result in this instance. * * @param {Vector3} a - The first vector. * @param {Vector3} b - The second vector. * @return {Vector3} A reference to this vector. */ subVectors( a, b ) { this.x = a.x - b.x; this.y = a.y - b.y; this.z = a.z - b.z; return this; } /** * Multiplies the given vector with this instance. * * @param {Vector3} v - The vector to multiply. * @return {Vector3} A reference to this vector. */ multiply( v ) { this.x *= v.x; this.y *= v.y; this.z *= v.z; return this; } /** * Multiplies the given scalar value with all components of this instance. * * @param {number} scalar - The scalar to multiply. * @return {Vector3} A reference to this vector. */ multiplyScalar( scalar ) { this.x *= scalar; this.y *= scalar; this.z *= scalar; return this; } /** * Multiplies the given vectors and stores the result in this instance. * * @param {Vector3} a - The first vector. * @param {Vector3} b - The second vector. * @return {Vector3} A reference to this vector. */ multiplyVectors( a, b ) { this.x = a.x * b.x; this.y = a.y * b.y; this.z = a.z * b.z; return this; } /** * Applies the given Euler rotation to this vector. * * @param {Euler} euler - The Euler angles. * @return {Vector3} A reference to this vector. */ applyEuler( euler ) { return this.applyQuaternion( _quaternion$4.setFromEuler( euler ) ); } /** * Applies a rotation specified by an axis and an angle to this vector. * * @param {Vector3} axis - A normalized vector representing the rotation axis. * @param {number} angle - The angle in radians. * @return {Vector3} A reference to this vector. */ applyAxisAngle( axis, angle ) { return this.applyQuaternion( _quaternion$4.setFromAxisAngle( axis, angle ) ); } /** * Multiplies this vector with the given 3x3 matrix. * * @param {Matrix3} m - The 3x3 matrix. * @return {Vector3} A reference to this vector. */ applyMatrix3( m ) { const x = this.x, y = this.y, z = this.z; const e = m.elements; this.x = e[ 0 ] * x + e[ 3 ] * y + e[ 6 ] * z; this.y = e[ 1 ] * x + e[ 4 ] * y + e[ 7 ] * z; this.z = e[ 2 ] * x + e[ 5 ] * y + e[ 8 ] * z; return this; } /** * Multiplies this vector by the given normal matrix and normalizes * the result. * * @param {Matrix3} m - The normal matrix. * @return {Vector3} A reference to this vector. */ applyNormalMatrix( m ) { return this.applyMatrix3( m ).normalize(); } /** * Multiplies this vector (with an implicit 1 in the 4th dimension) by m, and * divides by perspective. * * @param {Matrix4} m - The matrix to apply. * @return {Vector3} A reference to this vector. */ applyMatrix4( m ) { const x = this.x, y = this.y, z = this.z; const e = m.elements; const w = 1 / ( e[ 3 ] * x + e[ 7 ] * y + e[ 11 ] * z + e[ 15 ] ); this.x = ( e[ 0 ] * x + e[ 4 ] * y + e[ 8 ] * z + e[ 12 ] ) * w; this.y = ( e[ 1 ] * x + e[ 5 ] * y + e[ 9 ] * z + e[ 13 ] ) * w; this.z = ( e[ 2 ] * x + e[ 6 ] * y + e[ 10 ] * z + e[ 14 ] ) * w; return this; } /** * Applies the given Quaternion to this vector. * * @param {Quaternion} q - The Quaternion. * @return {Vector3} A reference to this vector. */ applyQuaternion( q ) { // quaternion q is assumed to have unit length const vx = this.x, vy = this.y, vz = this.z; const qx = q.x, qy = q.y, qz = q.z, qw = q.w; // t = 2 * cross( q.xyz, v ); const tx = 2 * ( qy * vz - qz * vy ); const ty = 2 * ( qz * vx - qx * vz ); const tz = 2 * ( qx * vy - qy * vx ); // v + q.w * t + cross( q.xyz, t ); this.x = vx + qw * tx + qy * tz - qz * ty; this.y = vy + qw * ty + qz * tx - qx * tz; this.z = vz + qw * tz + qx * ty - qy * tx; return this; } /** * Projects this vector from world space into the camera's normalized * device coordinate (NDC) space. * * @param {Camera} camera - The camera. * @return {Vector3} A reference to this vector. */ project( camera ) { return this.applyMatrix4( camera.matrixWorldInverse ).applyMatrix4( camera.projectionMatrix ); } /** * Unprojects this vector from the camera's normalized device coordinate (NDC) * space into world space. * * @param {Camera} camera - The camera. * @return {Vector3} A reference to this vector. */ unproject( camera ) { return this.applyMatrix4( camera.projectionMatrixInverse ).applyMatrix4( camera.matrixWorld ); } /** * Transforms the direction of this vector by a matrix (the upper left 3 x 3 * subset of the given 4x4 matrix and then normalizes the result. * * @param {Matrix4} m - The matrix. * @return {Vector3} A reference to this vector. */ transformDirection( m ) { // input: THREE.Matrix4 affine matrix // vector interpreted as a direction const x = this.x, y = this.y, z = this.z; const e = m.elements; this.x = e[ 0 ] * x + e[ 4 ] * y + e[ 8 ] * z; this.y = e[ 1 ] * x + e[ 5 ] * y + e[ 9 ] * z; this.z = e[ 2 ] * x + e[ 6 ] * y + e[ 10 ] * z; return this.normalize(); } /** * Divides this instance by the given vector. * * @param {Vector3} v - The vector to divide. * @return {Vector3} A reference to this vector. */ divide( v ) { this.x /= v.x; this.y /= v.y; this.z /= v.z; return this; } /** * Divides this vector by the given scalar. * * @param {number} scalar - The scalar to divide. * @return {Vector3} A reference to this vector. */ divideScalar( scalar ) { return this.multiplyScalar( 1 / scalar ); } /** * If this vector's x, y or z value is greater than the given vector's x, y or z * value, replace that value with the corresponding min value. * * @param {Vector3} v - The vector. * @return {Vector3} A reference to this vector. */ min( v ) { this.x = Math.min( this.x, v.x ); this.y = Math.min( this.y, v.y ); this.z = Math.min( this.z, v.z ); return this; } /** * If this vector's x, y or z value is less than the given vector's x, y or z * value, replace that value with the corresponding max value. * * @param {Vector3} v - The vector. * @return {Vector3} A reference to this vector. */ max( v ) { this.x = Math.max( this.x, v.x ); this.y = Math.max( this.y, v.y ); this.z = Math.max( this.z, v.z ); return this; } /** * If this vector's x, y or z value is greater than the max vector's x, y or z * value, it is replaced by the corresponding value. * If this vector's x, y or z value is less than the min vector's x, y or z value, * it is replaced by the corresponding value. * * @param {Vector3} min - The minimum x, y and z values. * @param {Vector3} max - The maximum x, y and z values in the desired range. * @return {Vector3} A reference to this vector. */ clamp( min, max ) { // assumes min < max, componentwise this.x = clamp( this.x, min.x, max.x ); this.y = clamp( this.y, min.y, max.y ); this.z = clamp( this.z, min.z, max.z ); return this; } /** * If this vector's x, y or z values are greater than the max value, they are * replaced by the max value. * If this vector's x, y or z values are less than the min value, they are * replaced by the min value. * * @param {number} minVal - The minimum value the components will be clamped to. * @param {number} maxVal - The maximum value the components will be clamped to. * @return {Vector3} A reference to this vector. */ clampScalar( minVal, maxVal ) { this.x = clamp( this.x, minVal, maxVal ); this.y = clamp( this.y, minVal, maxVal ); this.z = clamp( this.z, minVal, maxVal ); return this; } /** * If this vector's length is greater than the max value, it is replaced by * the max value. * If this vector's length is less than the min value, it is replaced by the * min value. * * @param {number} min - The minimum value the vector length will be clamped to. * @param {number} max - The maximum value the vector length will be clamped to. * @return {Vector3} A reference to this vector. */ clampLength( min, max ) { const length = this.length(); return this.divideScalar( length || 1 ).multiplyScalar( clamp( length, min, max ) ); } /** * The components of this vector are rounded down to the nearest integer value. * * @return {Vector3} A reference to this vector. */ floor() { this.x = Math.floor( this.x ); this.y = Math.floor( this.y ); this.z = Math.floor( this.z ); return this; } /** * The components of this vector are rounded up to the nearest integer value. * * @return {Vector3} A reference to this vector. */ ceil() { this.x = Math.ceil( this.x ); this.y = Math.ceil( this.y ); this.z = Math.ceil( this.z ); return this; } /** * The components of this vector are rounded to the nearest integer value * * @return {Vector3} A reference to this vector. */ round() { this.x = Math.round( this.x ); this.y = Math.round( this.y ); this.z = Math.round( this.z ); return this; } /** * The components of this vector are rounded towards zero (up if negative, * down if positive) to an integer value. * * @return {Vector3} A reference to this vector. */ roundToZero() { this.x = Math.trunc( this.x ); this.y = Math.trunc( this.y ); this.z = Math.trunc( this.z ); return this; } /** * Inverts this vector - i.e. sets x = -x, y = -y and z = -z. * * @return {Vector3} A reference to this vector. */ negate() { this.x = - this.x; this.y = - this.y; this.z = - this.z; return this; } /** * Calculates the dot product of the given vector with this instance. * * @param {Vector3} v - The vector to compute the dot product with. * @return {number} The result of the dot product. */ dot( v ) { return this.x * v.x + this.y * v.y + this.z * v.z; } // TODO lengthSquared? /** * Computes the square of the Euclidean length (straight-line length) from * (0, 0, 0) to (x, y, z). If you are comparing the lengths of vectors, you should * compare the length squared instead as it is slightly more efficient to calculate. * * @return {number} The square length of this vector. */ lengthSq() { return this.x * this.x + this.y * this.y + this.z * this.z; } /** * Computes the Euclidean length (straight-line length) from (0, 0, 0) to (x, y, z). * * @return {number} The length of this vector. */ length() { return Math.sqrt( this.x * this.x + this.y * this.y + this.z * this.z ); } /** * Computes the Manhattan length of this vector. * * @return {number} The length of this vector. */ manhattanLength() { return Math.abs( this.x ) + Math.abs( this.y ) + Math.abs( this.z ); } /** * Converts this vector to a unit vector - that is, sets it equal to a vector * with the same direction as this one, but with a vector length of `1`. * * @return {Vector3} A reference to this vector. */ normalize() { return this.divideScalar( this.length() || 1 ); } /** * Sets this vector to a vector with the same direction as this one, but * with the specified length. * * @param {number} length - The new length of this vector. * @return {Vector3} A reference to this vector. */ setLength( length ) { return this.normalize().multiplyScalar( length ); } /** * Linearly interpolates between the given vector and this instance, where * alpha is the percent distance along the line - alpha = 0 will be this * vector, and alpha = 1 will be the given one. * * @param {Vector3} v - The vector to interpolate towards. * @param {number} alpha - The interpolation factor, typically in the closed interval `[0, 1]`. * @return {Vector3} A reference to this vector. */ lerp( v, alpha ) { this.x += ( v.x - this.x ) * alpha; this.y += ( v.y - this.y ) * alpha; this.z += ( v.z - this.z ) * alpha; return this; } /** * Linearly interpolates between the given vectors, where alpha is the percent * distance along the line - alpha = 0 will be first vector, and alpha = 1 will * be the second one. The result is stored in this instance. * * @param {Vector3} v1 - The first vector. * @param {Vector3} v2 - The second vector. * @param {number} alpha - The interpolation factor, typically in the closed interval `[0, 1]`. * @return {Vector3} A reference to this vector. */ lerpVectors( v1, v2, alpha ) { this.x = v1.x + ( v2.x - v1.x ) * alpha; this.y = v1.y + ( v2.y - v1.y ) * alpha; this.z = v1.z + ( v2.z - v1.z ) * alpha; return this; } /** * Calculates the cross product of the given vector with this instance. * * @param {Vector3} v - The vector to compute the cross product with. * @return {Vector3} The result of the cross product. */ cross( v ) { return this.crossVectors( this, v ); } /** * Calculates the cross product of the given vectors and stores the result * in this instance. * * @param {Vector3} a - The first vector. * @param {Vector3} b - The second vector. * @return {Vector3} A reference to this vector. */ crossVectors( a, b ) { const ax = a.x, ay = a.y, az = a.z; const bx = b.x, by = b.y, bz = b.z; this.x = ay * bz - az * by; this.y = az * bx - ax * bz; this.z = ax * by - ay * bx; return this; } /** * Projects this vector onto the given one. * * @param {Vector3} v - The vector to project to. * @return {Vector3} A reference to this vector. */ projectOnVector( v ) { const denominator = v.lengthSq(); if ( denominator === 0 ) return this.set( 0, 0, 0 ); const scalar = v.dot( this ) / denominator; return this.copy( v ).multiplyScalar( scalar ); } /** * Projects this vector onto a plane by subtracting this * vector projected onto the plane's normal from this vector. * * @param {Vector3} planeNormal - The plane normal. * @return {Vector3} A reference to this vector. */ projectOnPlane( planeNormal ) { _vector$c.copy( this ).projectOnVector( planeNormal ); return this.sub( _vector$c ); } /** * Reflects this vector off a plane orthogonal to the given normal vector. * * @param {Vector3} normal - The (normalized) normal vector. * @return {Vector3} A reference to this vector. */ reflect( normal ) { return this.sub( _vector$c.copy( normal ).multiplyScalar( 2 * this.dot( normal ) ) ); } /** * Returns the angle between the given vector and this instance in radians. * * @param {Vector3} v - The vector to compute the angle with. * @return {number} The angle in radians. */ angleTo( v ) { const denominator = Math.sqrt( this.lengthSq() * v.lengthSq() ); if ( denominator === 0 ) return Math.PI / 2; const theta = this.dot( v ) / denominator; // clamp, to handle numerical problems return Math.acos( clamp( theta, -1, 1 ) ); } /** * Computes the distance from the given vector to this instance. * * @param {Vector3} v - The vector to compute the distance to. * @return {number} The distance. */ distanceTo( v ) { return Math.sqrt( this.distanceToSquared( v ) ); } /** * Computes the squared distance from the given vector to this instance. * If you are just comparing the distance with another distance, you should compare * the distance squared instead as it is slightly more efficient to calculate. * * @param {Vector3} v - The vector to compute the squared distance to. * @return {number} The squared distance. */ distanceToSquared( v ) { const dx = this.x - v.x, dy = this.y - v.y, dz = this.z - v.z; return dx * dx + dy * dy + dz * dz; } /** * Computes the Manhattan distance from the given vector to this instance. * * @param {Vector3} v - The vector to compute the Manhattan distance to. * @return {number} The Manhattan distance. */ manhattanDistanceTo( v ) { return Math.abs( this.x - v.x ) + Math.abs( this.y - v.y ) + Math.abs( this.z - v.z ); } /** * Sets the vector components from the given spherical coordinates. * * @param {Spherical} s - The spherical coordinates. * @return {Vector3} A reference to this vector. */ setFromSpherical( s ) { return this.setFromSphericalCoords( s.radius, s.phi, s.theta ); } /** * Sets the vector components from the given spherical coordinates. * * @param {number} radius - The radius. * @param {number} phi - The phi angle in radians. * @param {number} theta - The theta angle in radians. * @return {Vector3} A reference to this vector. */ setFromSphericalCoords( radius, phi, theta ) { const sinPhiRadius = Math.sin( phi ) * radius; this.x = sinPhiRadius * Math.sin( theta ); this.y = Math.cos( phi ) * radius; this.z = sinPhiRadius * Math.cos( theta ); return this; } /** * Sets the vector components from the given cylindrical coordinates. * * @param {Cylindrical} c - The cylindrical coordinates. * @return {Vector3} A reference to this vector. */ setFromCylindrical( c ) { return this.setFromCylindricalCoords( c.radius, c.theta, c.y ); } /** * Sets the vector components from the given cylindrical coordinates. * * @param {number} radius - The radius. * @param {number} theta - The theta angle in radians. * @param {number} y - The y value. * @return {Vector3} A reference to this vector. */ setFromCylindricalCoords( radius, theta, y ) { this.x = radius * Math.sin( theta ); this.y = y; this.z = radius * Math.cos( theta ); return this; } /** * Sets the vector components to the position elements of the * given transformation matrix. * * @param {Matrix4} m - The 4x4 matrix. * @return {Vector3} A reference to this vector. */ setFromMatrixPosition( m ) { const e = m.elements; this.x = e[ 12 ]; this.y = e[ 13 ]; this.z = e[ 14 ]; return this; } /** * Sets the vector components to the scale elements of the * given transformation matrix. * * @param {Matrix4} m - The 4x4 matrix. * @return {Vector3} A reference to this vector. */ setFromMatrixScale( m ) { const sx = this.setFromMatrixColumn( m, 0 ).length(); const sy = this.setFromMatrixColumn( m, 1 ).length(); const sz = this.setFromMatrixColumn( m, 2 ).length(); this.x = sx; this.y = sy; this.z = sz; return this; } /** * Sets the vector components from the specified matrix column. * * @param {Matrix4} m - The 4x4 matrix. * @param {number} index - The column index. * @return {Vector3} A reference to this vector. */ setFromMatrixColumn( m, index ) { return this.fromArray( m.elements, index * 4 ); } /** * Sets the vector components from the specified matrix column. * * @param {Matrix3} m - The 3x3 matrix. * @param {number} index - The column index. * @return {Vector3} A reference to this vector. */ setFromMatrix3Column( m, index ) { return this.fromArray( m.elements, index * 3 ); } /** * Sets the vector components from the given Euler angles. * * @param {Euler} e - The Euler angles to set. * @return {Vector3} A reference to this vector. */ setFromEuler( e ) { this.x = e._x; this.y = e._y; this.z = e._z; return this; } /** * Sets the vector components from the RGB components of the * given color. * * @param {Color} c - The color to set. * @return {Vector3} A reference to this vector. */ setFromColor( c ) { this.x = c.r; this.y = c.g; this.z = c.b; return this; } /** * Returns `true` if this vector is equal with the given one. * * @param {Vector3} v - The vector to test for equality. * @return {boolean} Whether this vector is equal with the given one. */ equals( v ) { return ( ( v.x === this.x ) && ( v.y === this.y ) && ( v.z === this.z ) ); } /** * Sets this vector's x value to be `array[ offset ]`, y value to be `array[ offset + 1 ]` * and z value to be `array[ offset + 2 ]`. * * @param {Array} array - An array holding the vector component values. * @param {number} [offset=0] - The offset into the array. * @return {Vector3} A reference to this vector. */ fromArray( array, offset = 0 ) { this.x = array[ offset ]; this.y = array[ offset + 1 ]; this.z = array[ offset + 2 ]; return this; } /** * Writes the components of this vector to the given array. If no array is provided, * the method returns a new instance. * * @param {Array} [array=[]] - The target array holding the vector components. * @param {number} [offset=0] - Index of the first element in the array. * @return {Array} The vector components. */ toArray( array = [], offset = 0 ) { array[ offset ] = this.x; array[ offset + 1 ] = this.y; array[ offset + 2 ] = this.z; return array; } /** * Sets the components of this vector from the given buffer attribute. * * @param {BufferAttribute} attribute - The buffer attribute holding vector data. * @param {number} index - The index into the attribute. * @return {Vector3} A reference to this vector. */ fromBufferAttribute( attribute, index ) { this.x = attribute.getX( index ); this.y = attribute.getY( index ); this.z = attribute.getZ( index ); return this; } /** * Sets each component of this vector to a pseudo-random value between `0` and * `1`, excluding `1`. * * @return {Vector3} A reference to this vector. */ random() { this.x = Math.random(); this.y = Math.random(); this.z = Math.random(); return this; } /** * Sets this vector to a uniformly random point on a unit sphere. * * @return {Vector3} A reference to this vector. */ randomDirection() { // https://mathworld.wolfram.com/SpherePointPicking.html const theta = Math.random() * Math.PI * 2; const u = Math.random() * 2 - 1; const c = Math.sqrt( 1 - u * u ); this.x = c * Math.cos( theta ); this.y = u; this.z = c * Math.sin( theta ); return this; } *[ Symbol.iterator ]() { yield this.x; yield this.y; yield this.z; } } const _vector$c = /*@__PURE__*/ new Vector3(); const _quaternion$4 = /*@__PURE__*/ new Quaternion(); /** * Represents an axis-aligned bounding box (AABB) in 3D space. */ class Box3 { /** * Constructs a new bounding box. * * @param {Vector3} [min=(Infinity,Infinity,Infinity)] - A vector representing the lower boundary of the box. * @param {Vector3} [max=(-Infinity,-Infinity,-Infinity)] - A vector representing the upper boundary of the box. */ constructor( min = new Vector3( + Infinity, + Infinity, + Infinity ), max = new Vector3( - Infinity, - Infinity, - Infinity ) ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isBox3 = true; /** * The lower boundary of the box. * * @type {Vector3} */ this.min = min; /** * The upper boundary of the box. * * @type {Vector3} */ this.max = max; } /** * Sets the lower and upper boundaries of this box. * Please note that this method only copies the values from the given objects. * * @param {Vector3} min - The lower boundary of the box. * @param {Vector3} max - The upper boundary of the box. * @return {Box3} A reference to this bounding box. */ set( min, max ) { this.min.copy( min ); this.max.copy( max ); return this; } /** * Sets the upper and lower bounds of this box so it encloses the position data * in the given array. * * @param {Array} array - An array holding 3D position data. * @return {Box3} A reference to this bounding box. */ setFromArray( array ) { this.makeEmpty(); for ( let i = 0, il = array.length; i < il; i += 3 ) { this.expandByPoint( _vector$b.fromArray( array, i ) ); } return this; } /** * Sets the upper and lower bounds of this box so it encloses the position data * in the given buffer attribute. * * @param {BufferAttribute} attribute - A buffer attribute holding 3D position data. * @return {Box3} A reference to this bounding box. */ setFromBufferAttribute( attribute ) { this.makeEmpty(); for ( let i = 0, il = attribute.count; i < il; i ++ ) { this.expandByPoint( _vector$b.fromBufferAttribute( attribute, i ) ); } return this; } /** * Sets the upper and lower bounds of this box so it encloses the position data * in the given array. * * @param {Array} points - An array holding 3D position data as instances of {@link Vector3}. * @return {Box3} A reference to this bounding box. */ setFromPoints( points ) { this.makeEmpty(); for ( let i = 0, il = points.length; i < il; i ++ ) { this.expandByPoint( points[ i ] ); } return this; } /** * Centers this box on the given center vector and sets this box's width, height and * depth to the given size values. * * @param {Vector3} center - The center of the box. * @param {Vector3} size - The x, y and z dimensions of the box. * @return {Box3} A reference to this bounding box. */ setFromCenterAndSize( center, size ) { const halfSize = _vector$b.copy( size ).multiplyScalar( 0.5 ); this.min.copy( center ).sub( halfSize ); this.max.copy( center ).add( halfSize ); return this; } /** * Computes the world-axis-aligned bounding box for the given 3D object * (including its children), accounting for the object's, and children's, * world transforms. The function may result in a larger box than strictly necessary. * * @param {Object3D} object - The 3D object to compute the bounding box for. * @param {boolean} [precise=false] - If set to `true`, the method computes the smallest * world-axis-aligned bounding box at the expense of more computation. * @return {Box3} A reference to this bounding box. */ setFromObject( object, precise = false ) { this.makeEmpty(); return this.expandByObject( object, precise ); } /** * Returns a new box with copied values from this instance. * * @return {Box3} A clone of this instance. */ clone() { return new this.constructor().copy( this ); } /** * Copies the values of the given box to this instance. * * @param {Box3} box - The box to copy. * @return {Box3} A reference to this bounding box. */ copy( box ) { this.min.copy( box.min ); this.max.copy( box.max ); return this; } /** * Makes this box empty which means in encloses a zero space in 3D. * * @return {Box3} A reference to this bounding box. */ makeEmpty() { this.min.x = this.min.y = this.min.z = + Infinity; this.max.x = this.max.y = this.max.z = - Infinity; return this; } /** * Returns true if this box includes zero points within its bounds. * Note that a box with equal lower and upper bounds still includes one * point, the one both bounds share. * * @return {boolean} Whether this box is empty or not. */ isEmpty() { // this is a more robust check for empty than ( volume <= 0 ) because volume can get positive with two negative axes return ( this.max.x < this.min.x ) || ( this.max.y < this.min.y ) || ( this.max.z < this.min.z ); } /** * Returns the center point of this box. * * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} The center point. */ getCenter( target ) { return this.isEmpty() ? target.set( 0, 0, 0 ) : target.addVectors( this.min, this.max ).multiplyScalar( 0.5 ); } /** * Returns the dimensions of this box. * * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} The size. */ getSize( target ) { return this.isEmpty() ? target.set( 0, 0, 0 ) : target.subVectors( this.max, this.min ); } /** * Expands the boundaries of this box to include the given point. * * @param {Vector3} point - The point that should be included by the bounding box. * @return {Box3} A reference to this bounding box. */ expandByPoint( point ) { this.min.min( point ); this.max.max( point ); return this; } /** * Expands this box equilaterally by the given vector. The width of this * box will be expanded by the x component of the vector in both * directions. The height of this box will be expanded by the y component of * the vector in both directions. The depth of this box will be * expanded by the z component of the vector in both directions. * * @param {Vector3} vector - The vector that should expand the bounding box. * @return {Box3} A reference to this bounding box. */ expandByVector( vector ) { this.min.sub( vector ); this.max.add( vector ); return this; } /** * Expands each dimension of the box by the given scalar. If negative, the * dimensions of the box will be contracted. * * @param {number} scalar - The scalar value that should expand the bounding box. * @return {Box3} A reference to this bounding box. */ expandByScalar( scalar ) { this.min.addScalar( - scalar ); this.max.addScalar( scalar ); return this; } /** * Expands the boundaries of this box to include the given 3D object and * its children, accounting for the object's, and children's, world * transforms. The function may result in a larger box than strictly * necessary (unless the precise parameter is set to true). * * @param {Object3D} object - The 3D object that should expand the bounding box. * @param {boolean} precise - If set to `true`, the method expands the bounding box * as little as necessary at the expense of more computation. * @return {Box3} A reference to this bounding box. */ expandByObject( object, precise = false ) { // Computes the world-axis-aligned bounding box of an object (including its children), // accounting for both the object's, and children's, world transforms object.updateWorldMatrix( false, false ); const geometry = object.geometry; if ( geometry !== undefined ) { const positionAttribute = geometry.getAttribute( 'position' ); // precise AABB computation based on vertex data requires at least a position attribute. // instancing isn't supported so far and uses the normal (conservative) code path. if ( precise === true && positionAttribute !== undefined && object.isInstancedMesh !== true ) { for ( let i = 0, l = positionAttribute.count; i < l; i ++ ) { if ( object.isMesh === true ) { object.getVertexPosition( i, _vector$b ); } else { _vector$b.fromBufferAttribute( positionAttribute, i ); } _vector$b.applyMatrix4( object.matrixWorld ); this.expandByPoint( _vector$b ); } } else { if ( object.boundingBox !== undefined ) { // object-level bounding box if ( object.boundingBox === null ) { object.computeBoundingBox(); } _box$4.copy( object.boundingBox ); } else { // geometry-level bounding box if ( geometry.boundingBox === null ) { geometry.computeBoundingBox(); } _box$4.copy( geometry.boundingBox ); } _box$4.applyMatrix4( object.matrixWorld ); this.union( _box$4 ); } } const children = object.children; for ( let i = 0, l = children.length; i < l; i ++ ) { this.expandByObject( children[ i ], precise ); } return this; } /** * Returns `true` if the given point lies within or on the boundaries of this box. * * @param {Vector3} point - The point to test. * @return {boolean} Whether the bounding box contains the given point or not. */ containsPoint( point ) { return point.x >= this.min.x && point.x <= this.max.x && point.y >= this.min.y && point.y <= this.max.y && point.z >= this.min.z && point.z <= this.max.z; } /** * Returns `true` if this bounding box includes the entirety of the given bounding box. * If this box and the given one are identical, this function also returns `true`. * * @param {Box3} box - The bounding box to test. * @return {boolean} Whether the bounding box contains the given bounding box or not. */ containsBox( box ) { return this.min.x <= box.min.x && box.max.x <= this.max.x && this.min.y <= box.min.y && box.max.y <= this.max.y && this.min.z <= box.min.z && box.max.z <= this.max.z; } /** * Returns a point as a proportion of this box's width, height and depth. * * @param {Vector3} point - A point in 3D space. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} A point as a proportion of this box's width, height and depth. */ getParameter( point, target ) { // This can potentially have a divide by zero if the box // has a size dimension of 0. return target.set( ( point.x - this.min.x ) / ( this.max.x - this.min.x ), ( point.y - this.min.y ) / ( this.max.y - this.min.y ), ( point.z - this.min.z ) / ( this.max.z - this.min.z ) ); } /** * Returns `true` if the given bounding box intersects with this bounding box. * * @param {Box3} box - The bounding box to test. * @return {boolean} Whether the given bounding box intersects with this bounding box. */ intersectsBox( box ) { // using 6 splitting planes to rule out intersections. return box.max.x >= this.min.x && box.min.x <= this.max.x && box.max.y >= this.min.y && box.min.y <= this.max.y && box.max.z >= this.min.z && box.min.z <= this.max.z; } /** * Returns `true` if the given bounding sphere intersects with this bounding box. * * @param {Sphere} sphere - The bounding sphere to test. * @return {boolean} Whether the given bounding sphere intersects with this bounding box. */ intersectsSphere( sphere ) { // Find the point on the AABB closest to the sphere center. this.clampPoint( sphere.center, _vector$b ); // If that point is inside the sphere, the AABB and sphere intersect. return _vector$b.distanceToSquared( sphere.center ) <= ( sphere.radius * sphere.radius ); } /** * Returns `true` if the given plane intersects with this bounding box. * * @param {Plane} plane - The plane to test. * @return {boolean} Whether the given plane intersects with this bounding box. */ intersectsPlane( plane ) { // We compute the minimum and maximum dot product values. If those values // are on the same side (back or front) of the plane, then there is no intersection. let min, max; if ( plane.normal.x > 0 ) { min = plane.normal.x * this.min.x; max = plane.normal.x * this.max.x; } else { min = plane.normal.x * this.max.x; max = plane.normal.x * this.min.x; } if ( plane.normal.y > 0 ) { min += plane.normal.y * this.min.y; max += plane.normal.y * this.max.y; } else { min += plane.normal.y * this.max.y; max += plane.normal.y * this.min.y; } if ( plane.normal.z > 0 ) { min += plane.normal.z * this.min.z; max += plane.normal.z * this.max.z; } else { min += plane.normal.z * this.max.z; max += plane.normal.z * this.min.z; } return ( min <= - plane.constant && max >= - plane.constant ); } /** * Returns `true` if the given triangle intersects with this bounding box. * * @param {Triangle} triangle - The triangle to test. * @return {boolean} Whether the given triangle intersects with this bounding box. */ intersectsTriangle( triangle ) { if ( this.isEmpty() ) { return false; } // compute box center and extents this.getCenter( _center ); _extents.subVectors( this.max, _center ); // translate triangle to aabb origin _v0$3.subVectors( triangle.a, _center ); _v1$7.subVectors( triangle.b, _center ); _v2$4.subVectors( triangle.c, _center ); // compute edge vectors for triangle _f0.subVectors( _v1$7, _v0$3 ); _f1.subVectors( _v2$4, _v1$7 ); _f2.subVectors( _v0$3, _v2$4 ); // test against axes that are given by cross product combinations of the edges of the triangle and the edges of the aabb // make an axis testing of each of the 3 sides of the aabb against each of the 3 sides of the triangle = 9 axis of separation // axis_ij = u_i x f_j (u0, u1, u2 = face normals of aabb = x,y,z axes vectors since aabb is axis aligned) let axes = [ 0, - _f0.z, _f0.y, 0, - _f1.z, _f1.y, 0, - _f2.z, _f2.y, _f0.z, 0, - _f0.x, _f1.z, 0, - _f1.x, _f2.z, 0, - _f2.x, - _f0.y, _f0.x, 0, - _f1.y, _f1.x, 0, - _f2.y, _f2.x, 0 ]; if ( ! satForAxes( axes, _v0$3, _v1$7, _v2$4, _extents ) ) { return false; } // test 3 face normals from the aabb axes = [ 1, 0, 0, 0, 1, 0, 0, 0, 1 ]; if ( ! satForAxes( axes, _v0$3, _v1$7, _v2$4, _extents ) ) { return false; } // finally testing the face normal of the triangle // use already existing triangle edge vectors here _triangleNormal.crossVectors( _f0, _f1 ); axes = [ _triangleNormal.x, _triangleNormal.y, _triangleNormal.z ]; return satForAxes( axes, _v0$3, _v1$7, _v2$4, _extents ); } /** * Clamps the given point within the bounds of this box. * * @param {Vector3} point - The point to clamp. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} The clamped point. */ clampPoint( point, target ) { return target.copy( point ).clamp( this.min, this.max ); } /** * Returns the euclidean distance from any edge of this box to the specified point. If * the given point lies inside of this box, the distance will be `0`. * * @param {Vector3} point - The point to compute the distance to. * @return {number} The euclidean distance. */ distanceToPoint( point ) { return this.clampPoint( point, _vector$b ).distanceTo( point ); } /** * Returns a bounding sphere that encloses this bounding box. * * @param {Sphere} target - The target sphere that is used to store the method's result. * @return {Sphere} The bounding sphere that encloses this bounding box. */ getBoundingSphere( target ) { if ( this.isEmpty() ) { target.makeEmpty(); } else { this.getCenter( target.center ); target.radius = this.getSize( _vector$b ).length() * 0.5; } return target; } /** * Computes the intersection of this bounding box and the given one, setting the upper * bound of this box to the lesser of the two boxes' upper bounds and the * lower bound of this box to the greater of the two boxes' lower bounds. If * there's no overlap, makes this box empty. * * @param {Box3} box - The bounding box to intersect with. * @return {Box3} A reference to this bounding box. */ intersect( box ) { this.min.max( box.min ); this.max.min( box.max ); // ensure that if there is no overlap, the result is fully empty, not slightly empty with non-inf/+inf values that will cause subsequence intersects to erroneously return valid values. if ( this.isEmpty() ) this.makeEmpty(); return this; } /** * Computes the union of this box and another and the given one, setting the upper * bound of this box to the greater of the two boxes' upper bounds and the * lower bound of this box to the lesser of the two boxes' lower bounds. * * @param {Box3} box - The bounding box that will be unioned with this instance. * @return {Box3} A reference to this bounding box. */ union( box ) { this.min.min( box.min ); this.max.max( box.max ); return this; } /** * Transforms this bounding box by the given 4x4 transformation matrix. * * @param {Matrix4} matrix - The transformation matrix. * @return {Box3} A reference to this bounding box. */ applyMatrix4( matrix ) { // transform of empty box is an empty box. if ( this.isEmpty() ) return this; // NOTE: I am using a binary pattern to specify all 2^3 combinations below _points[ 0 ].set( this.min.x, this.min.y, this.min.z ).applyMatrix4( matrix ); // 000 _points[ 1 ].set( this.min.x, this.min.y, this.max.z ).applyMatrix4( matrix ); // 001 _points[ 2 ].set( this.min.x, this.max.y, this.min.z ).applyMatrix4( matrix ); // 010 _points[ 3 ].set( this.min.x, this.max.y, this.max.z ).applyMatrix4( matrix ); // 011 _points[ 4 ].set( this.max.x, this.min.y, this.min.z ).applyMatrix4( matrix ); // 100 _points[ 5 ].set( this.max.x, this.min.y, this.max.z ).applyMatrix4( matrix ); // 101 _points[ 6 ].set( this.max.x, this.max.y, this.min.z ).applyMatrix4( matrix ); // 110 _points[ 7 ].set( this.max.x, this.max.y, this.max.z ).applyMatrix4( matrix ); // 111 this.setFromPoints( _points ); return this; } /** * Adds the given offset to both the upper and lower bounds of this bounding box, * effectively moving it in 3D space. * * @param {Vector3} offset - The offset that should be used to translate the bounding box. * @return {Box3} A reference to this bounding box. */ translate( offset ) { this.min.add( offset ); this.max.add( offset ); return this; } /** * Returns `true` if this bounding box is equal with the given one. * * @param {Box3} box - The box to test for equality. * @return {boolean} Whether this bounding box is equal with the given one. */ equals( box ) { return box.min.equals( this.min ) && box.max.equals( this.max ); } } const _points = [ /*@__PURE__*/ new Vector3(), /*@__PURE__*/ new Vector3(), /*@__PURE__*/ new Vector3(), /*@__PURE__*/ new Vector3(), /*@__PURE__*/ new Vector3(), /*@__PURE__*/ new Vector3(), /*@__PURE__*/ new Vector3(), /*@__PURE__*/ new Vector3() ]; const _vector$b = /*@__PURE__*/ new Vector3(); const _box$4 = /*@__PURE__*/ new Box3(); // triangle centered vertices const _v0$3 = /*@__PURE__*/ new Vector3(); const _v1$7 = /*@__PURE__*/ new Vector3(); const _v2$4 = /*@__PURE__*/ new Vector3(); // triangle edge vectors const _f0 = /*@__PURE__*/ new Vector3(); const _f1 = /*@__PURE__*/ new Vector3(); const _f2 = /*@__PURE__*/ new Vector3(); const _center = /*@__PURE__*/ new Vector3(); const _extents = /*@__PURE__*/ new Vector3(); const _triangleNormal = /*@__PURE__*/ new Vector3(); const _testAxis = /*@__PURE__*/ new Vector3(); function satForAxes( axes, v0, v1, v2, extents ) { for ( let i = 0, j = axes.length - 3; i <= j; i += 3 ) { _testAxis.fromArray( axes, i ); // project the aabb onto the separating axis const r = extents.x * Math.abs( _testAxis.x ) + extents.y * Math.abs( _testAxis.y ) + extents.z * Math.abs( _testAxis.z ); // project all 3 vertices of the triangle onto the separating axis const p0 = v0.dot( _testAxis ); const p1 = v1.dot( _testAxis ); const p2 = v2.dot( _testAxis ); // actual test, basically see if either of the most extreme of the triangle points intersects r if ( Math.max( - Math.max( p0, p1, p2 ), Math.min( p0, p1, p2 ) ) > r ) { // points of the projected triangle are outside the projected half-length of the aabb // the axis is separating and we can exit return false; } } return true; } const _box$3 = /*@__PURE__*/ new Box3(); const _v1$6 = /*@__PURE__*/ new Vector3(); const _v2$3 = /*@__PURE__*/ new Vector3(); /** * An analytical 3D sphere defined by a center and radius. This class is mainly * used as a Bounding Sphere for 3D objects. */ class Sphere { /** * Constructs a new sphere. * * @param {Vector3} [center=(0,0,0)] - The center of the sphere * @param {number} [radius=-1] - The radius of the sphere. */ constructor( center = new Vector3(), radius = -1 ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isSphere = true; /** * The center of the sphere * * @type {Vector3} */ this.center = center; /** * The radius of the sphere. * * @type {number} */ this.radius = radius; } /** * Sets the sphere's components by copying the given values. * * @param {Vector3} center - The center. * @param {number} radius - The radius. * @return {Sphere} A reference to this sphere. */ set( center, radius ) { this.center.copy( center ); this.radius = radius; return this; } /** * Computes the minimum bounding sphere for list of points. * If the optional center point is given, it is used as the sphere's * center. Otherwise, the center of the axis-aligned bounding box * encompassing the points is calculated. * * @param {Array} points - A list of points in 3D space. * @param {Vector3} [optionalCenter] - The center of the sphere. * @return {Sphere} A reference to this sphere. */ setFromPoints( points, optionalCenter ) { const center = this.center; if ( optionalCenter !== undefined ) { center.copy( optionalCenter ); } else { _box$3.setFromPoints( points ).getCenter( center ); } let maxRadiusSq = 0; for ( let i = 0, il = points.length; i < il; i ++ ) { maxRadiusSq = Math.max( maxRadiusSq, center.distanceToSquared( points[ i ] ) ); } this.radius = Math.sqrt( maxRadiusSq ); return this; } /** * Copies the values of the given sphere to this instance. * * @param {Sphere} sphere - The sphere to copy. * @return {Sphere} A reference to this sphere. */ copy( sphere ) { this.center.copy( sphere.center ); this.radius = sphere.radius; return this; } /** * Returns `true` if the sphere is empty (the radius set to a negative number). * * Spheres with a radius of `0` contain only their center point and are not * considered to be empty. * * @return {boolean} Whether this sphere is empty or not. */ isEmpty() { return ( this.radius < 0 ); } /** * Makes this sphere empty which means in encloses a zero space in 3D. * * @return {Sphere} A reference to this sphere. */ makeEmpty() { this.center.set( 0, 0, 0 ); this.radius = -1; return this; } /** * Returns `true` if this sphere contains the given point inclusive of * the surface of the sphere. * * @param {Vector3} point - The point to check. * @return {boolean} Whether this sphere contains the given point or not. */ containsPoint( point ) { return ( point.distanceToSquared( this.center ) <= ( this.radius * this.radius ) ); } /** * Returns the closest distance from the boundary of the sphere to the * given point. If the sphere contains the point, the distance will * be negative. * * @param {Vector3} point - The point to compute the distance to. * @return {number} The distance to the point. */ distanceToPoint( point ) { return ( point.distanceTo( this.center ) - this.radius ); } /** * Returns `true` if this sphere intersects with the given one. * * @param {Sphere} sphere - The sphere to test. * @return {boolean} Whether this sphere intersects with the given one or not. */ intersectsSphere( sphere ) { const radiusSum = this.radius + sphere.radius; return sphere.center.distanceToSquared( this.center ) <= ( radiusSum * radiusSum ); } /** * Returns `true` if this sphere intersects with the given box. * * @param {Box3} box - The box to test. * @return {boolean} Whether this sphere intersects with the given box or not. */ intersectsBox( box ) { return box.intersectsSphere( this ); } /** * Returns `true` if this sphere intersects with the given plane. * * @param {Plane} plane - The plane to test. * @return {boolean} Whether this sphere intersects with the given plane or not. */ intersectsPlane( plane ) { return Math.abs( plane.distanceToPoint( this.center ) ) <= this.radius; } /** * Clamps a point within the sphere. If the point is outside the sphere, it * will clamp it to the closest point on the edge of the sphere. Points * already inside the sphere will not be affected. * * @param {Vector3} point - The plane to clamp. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} The clamped point. */ clampPoint( point, target ) { const deltaLengthSq = this.center.distanceToSquared( point ); target.copy( point ); if ( deltaLengthSq > ( this.radius * this.radius ) ) { target.sub( this.center ).normalize(); target.multiplyScalar( this.radius ).add( this.center ); } return target; } /** * Returns a bounding box that encloses this sphere. * * @param {Box3} target - The target box that is used to store the method's result. * @return {Box3} The bounding box that encloses this sphere. */ getBoundingBox( target ) { if ( this.isEmpty() ) { // Empty sphere produces empty bounding box target.makeEmpty(); return target; } target.set( this.center, this.center ); target.expandByScalar( this.radius ); return target; } /** * Transforms this sphere with the given 4x4 transformation matrix. * * @param {Matrix4} matrix - The transformation matrix. * @return {Sphere} A reference to this sphere. */ applyMatrix4( matrix ) { this.center.applyMatrix4( matrix ); this.radius = this.radius * matrix.getMaxScaleOnAxis(); return this; } /** * Translates the sphere's center by the given offset. * * @param {Vector3} offset - The offset. * @return {Sphere} A reference to this sphere. */ translate( offset ) { this.center.add( offset ); return this; } /** * Expands the boundaries of this sphere to include the given point. * * @param {Vector3} point - The point to include. * @return {Sphere} A reference to this sphere. */ expandByPoint( point ) { if ( this.isEmpty() ) { this.center.copy( point ); this.radius = 0; return this; } _v1$6.subVectors( point, this.center ); const lengthSq = _v1$6.lengthSq(); if ( lengthSq > ( this.radius * this.radius ) ) { // calculate the minimal sphere const length = Math.sqrt( lengthSq ); const delta = ( length - this.radius ) * 0.5; this.center.addScaledVector( _v1$6, delta / length ); this.radius += delta; } return this; } /** * Expands this sphere to enclose both the original sphere and the given sphere. * * @param {Sphere} sphere - The sphere to include. * @return {Sphere} A reference to this sphere. */ union( sphere ) { if ( sphere.isEmpty() ) { return this; } if ( this.isEmpty() ) { this.copy( sphere ); return this; } if ( this.center.equals( sphere.center ) === true ) { this.radius = Math.max( this.radius, sphere.radius ); } else { _v2$3.subVectors( sphere.center, this.center ).setLength( sphere.radius ); this.expandByPoint( _v1$6.copy( sphere.center ).add( _v2$3 ) ); this.expandByPoint( _v1$6.copy( sphere.center ).sub( _v2$3 ) ); } return this; } /** * Returns `true` if this sphere is equal with the given one. * * @param {Sphere} sphere - The sphere to test for equality. * @return {boolean} Whether this bounding sphere is equal with the given one. */ equals( sphere ) { return sphere.center.equals( this.center ) && ( sphere.radius === this.radius ); } /** * Returns a new sphere with copied values from this instance. * * @return {Sphere} A clone of this instance. */ clone() { return new this.constructor().copy( this ); } } const _vector$a = /*@__PURE__*/ new Vector3(); const _segCenter = /*@__PURE__*/ new Vector3(); const _segDir = /*@__PURE__*/ new Vector3(); const _diff = /*@__PURE__*/ new Vector3(); const _edge1 = /*@__PURE__*/ new Vector3(); const _edge2 = /*@__PURE__*/ new Vector3(); const _normal$1 = /*@__PURE__*/ new Vector3(); /** * A ray that emits from an origin in a certain direction. The class is used by * {@link Raycaster} to assist with raycasting. Raycasting is used for * mouse picking (working out what objects in the 3D space the mouse is over) * amongst other things. */ class Ray { /** * Constructs a new ray. * * @param {Vector3} [origin=(0,0,0)] - The origin of the ray. * @param {Vector3} [direction=(0,0,-1)] - The (normalized) direction of the ray. */ constructor( origin = new Vector3(), direction = new Vector3( 0, 0, -1 ) ) { /** * The origin of the ray. * * @type {Vector3} */ this.origin = origin; /** * The (normalized) direction of the ray. * * @type {Vector3} */ this.direction = direction; } /** * Sets the ray's components by copying the given values. * * @param {Vector3} origin - The origin. * @param {Vector3} direction - The direction. * @return {Ray} A reference to this ray. */ set( origin, direction ) { this.origin.copy( origin ); this.direction.copy( direction ); return this; } /** * Copies the values of the given ray to this instance. * * @param {Ray} ray - The ray to copy. * @return {Ray} A reference to this ray. */ copy( ray ) { this.origin.copy( ray.origin ); this.direction.copy( ray.direction ); return this; } /** * Returns a vector that is located at a given distance along this ray. * * @param {number} t - The distance along the ray to retrieve a position for. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} A position on the ray. */ at( t, target ) { return target.copy( this.origin ).addScaledVector( this.direction, t ); } /** * Adjusts the direction of the ray to point at the given vector in world space. * * @param {Vector3} v - The target position. * @return {Ray} A reference to this ray. */ lookAt( v ) { this.direction.copy( v ).sub( this.origin ).normalize(); return this; } /** * Shift the origin of this ray along its direction by the given distance. * * @param {number} t - The distance along the ray to interpolate. * @return {Ray} A reference to this ray. */ recast( t ) { this.origin.copy( this.at( t, _vector$a ) ); return this; } /** * Returns the point along this ray that is closest to the given point. * * @param {Vector3} point - A point in 3D space to get the closet location on the ray for. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} The closest point on this ray. */ closestPointToPoint( point, target ) { target.subVectors( point, this.origin ); const directionDistance = target.dot( this.direction ); if ( directionDistance < 0 ) { return target.copy( this.origin ); } return target.copy( this.origin ).addScaledVector( this.direction, directionDistance ); } /** * Returns the distance of the closest approach between this ray and the given point. * * @param {Vector3} point - A point in 3D space to compute the distance to. * @return {number} The distance. */ distanceToPoint( point ) { return Math.sqrt( this.distanceSqToPoint( point ) ); } /** * Returns the squared distance of the closest approach between this ray and the given point. * * @param {Vector3} point - A point in 3D space to compute the distance to. * @return {number} The squared distance. */ distanceSqToPoint( point ) { const directionDistance = _vector$a.subVectors( point, this.origin ).dot( this.direction ); // point behind the ray if ( directionDistance < 0 ) { return this.origin.distanceToSquared( point ); } _vector$a.copy( this.origin ).addScaledVector( this.direction, directionDistance ); return _vector$a.distanceToSquared( point ); } /** * Returns the squared distance between this ray and the given line segment. * * @param {Vector3} v0 - The start point of the line segment. * @param {Vector3} v1 - The end point of the line segment. * @param {Vector3} [optionalPointOnRay] - When provided, it receives the point on this ray that is closest to the segment. * @param {Vector3} [optionalPointOnSegment] - When provided, it receives the point on the line segment that is closest to this ray. * @return {number} The squared distance. */ distanceSqToSegment( v0, v1, optionalPointOnRay, optionalPointOnSegment ) { // from https://github.com/pmjoniak/GeometricTools/blob/master/GTEngine/Include/Mathematics/GteDistRaySegment.h // It returns the min distance between the ray and the segment // defined by v0 and v1 // It can also set two optional targets : // - The closest point on the ray // - The closest point on the segment _segCenter.copy( v0 ).add( v1 ).multiplyScalar( 0.5 ); _segDir.copy( v1 ).sub( v0 ).normalize(); _diff.copy( this.origin ).sub( _segCenter ); const segExtent = v0.distanceTo( v1 ) * 0.5; const a01 = - this.direction.dot( _segDir ); const b0 = _diff.dot( this.direction ); const b1 = - _diff.dot( _segDir ); const c = _diff.lengthSq(); const det = Math.abs( 1 - a01 * a01 ); let s0, s1, sqrDist, extDet; if ( det > 0 ) { // The ray and segment are not parallel. s0 = a01 * b1 - b0; s1 = a01 * b0 - b1; extDet = segExtent * det; if ( s0 >= 0 ) { if ( s1 >= - extDet ) { if ( s1 <= extDet ) { // region 0 // Minimum at interior points of ray and segment. const invDet = 1 / det; s0 *= invDet; s1 *= invDet; sqrDist = s0 * ( s0 + a01 * s1 + 2 * b0 ) + s1 * ( a01 * s0 + s1 + 2 * b1 ) + c; } else { // region 1 s1 = segExtent; s0 = Math.max( 0, - ( a01 * s1 + b0 ) ); sqrDist = - s0 * s0 + s1 * ( s1 + 2 * b1 ) + c; } } else { // region 5 s1 = - segExtent; s0 = Math.max( 0, - ( a01 * s1 + b0 ) ); sqrDist = - s0 * s0 + s1 * ( s1 + 2 * b1 ) + c; } } else { if ( s1 <= - extDet ) { // region 4 s0 = Math.max( 0, - ( - a01 * segExtent + b0 ) ); s1 = ( s0 > 0 ) ? - segExtent : Math.min( Math.max( - segExtent, - b1 ), segExtent ); sqrDist = - s0 * s0 + s1 * ( s1 + 2 * b1 ) + c; } else if ( s1 <= extDet ) { // region 3 s0 = 0; s1 = Math.min( Math.max( - segExtent, - b1 ), segExtent ); sqrDist = s1 * ( s1 + 2 * b1 ) + c; } else { // region 2 s0 = Math.max( 0, - ( a01 * segExtent + b0 ) ); s1 = ( s0 > 0 ) ? segExtent : Math.min( Math.max( - segExtent, - b1 ), segExtent ); sqrDist = - s0 * s0 + s1 * ( s1 + 2 * b1 ) + c; } } } else { // Ray and segment are parallel. s1 = ( a01 > 0 ) ? - segExtent : segExtent; s0 = Math.max( 0, - ( a01 * s1 + b0 ) ); sqrDist = - s0 * s0 + s1 * ( s1 + 2 * b1 ) + c; } if ( optionalPointOnRay ) { optionalPointOnRay.copy( this.origin ).addScaledVector( this.direction, s0 ); } if ( optionalPointOnSegment ) { optionalPointOnSegment.copy( _segCenter ).addScaledVector( _segDir, s1 ); } return sqrDist; } /** * Intersects this ray with the given sphere, returning the intersection * point or `null` if there is no intersection. * * @param {Sphere} sphere - The sphere to intersect. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {?Vector3} The intersection point. */ intersectSphere( sphere, target ) { _vector$a.subVectors( sphere.center, this.origin ); const tca = _vector$a.dot( this.direction ); const d2 = _vector$a.dot( _vector$a ) - tca * tca; const radius2 = sphere.radius * sphere.radius; if ( d2 > radius2 ) return null; const thc = Math.sqrt( radius2 - d2 ); // t0 = first intersect point - entrance on front of sphere const t0 = tca - thc; // t1 = second intersect point - exit point on back of sphere const t1 = tca + thc; // test to see if t1 is behind the ray - if so, return null if ( t1 < 0 ) return null; // test to see if t0 is behind the ray: // if it is, the ray is inside the sphere, so return the second exit point scaled by t1, // in order to always return an intersect point that is in front of the ray. if ( t0 < 0 ) return this.at( t1, target ); // else t0 is in front of the ray, so return the first collision point scaled by t0 return this.at( t0, target ); } /** * Returns `true` if this ray intersects with the given sphere. * * @param {Sphere} sphere - The sphere to intersect. * @return {boolean} Whether this ray intersects with the given sphere or not. */ intersectsSphere( sphere ) { return this.distanceSqToPoint( sphere.center ) <= ( sphere.radius * sphere.radius ); } /** * Computes the distance from the ray's origin to the given plane. Returns `null` if the ray * does not intersect with the plane. * * @param {Plane} plane - The plane to compute the distance to. * @return {?number} Whether this ray intersects with the given sphere or not. */ distanceToPlane( plane ) { const denominator = plane.normal.dot( this.direction ); if ( denominator === 0 ) { // line is coplanar, return origin if ( plane.distanceToPoint( this.origin ) === 0 ) { return 0; } // Null is preferable to undefined since undefined means.... it is undefined return null; } const t = - ( this.origin.dot( plane.normal ) + plane.constant ) / denominator; // Return if the ray never intersects the plane return t >= 0 ? t : null; } /** * Intersects this ray with the given plane, returning the intersection * point or `null` if there is no intersection. * * @param {Plane} plane - The plane to intersect. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {?Vector3} The intersection point. */ intersectPlane( plane, target ) { const t = this.distanceToPlane( plane ); if ( t === null ) { return null; } return this.at( t, target ); } /** * Returns `true` if this ray intersects with the given plane. * * @param {Plane} plane - The plane to intersect. * @return {boolean} Whether this ray intersects with the given plane or not. */ intersectsPlane( plane ) { // check if the ray lies on the plane first const distToPoint = plane.distanceToPoint( this.origin ); if ( distToPoint === 0 ) { return true; } const denominator = plane.normal.dot( this.direction ); if ( denominator * distToPoint < 0 ) { return true; } // ray origin is behind the plane (and is pointing behind it) return false; } /** * Intersects this ray with the given bounding box, returning the intersection * point or `null` if there is no intersection. * * @param {Box3} box - The box to intersect. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {?Vector3} The intersection point. */ intersectBox( box, target ) { let tmin, tmax, tymin, tymax, tzmin, tzmax; const invdirx = 1 / this.direction.x, invdiry = 1 / this.direction.y, invdirz = 1 / this.direction.z; const origin = this.origin; if ( invdirx >= 0 ) { tmin = ( box.min.x - origin.x ) * invdirx; tmax = ( box.max.x - origin.x ) * invdirx; } else { tmin = ( box.max.x - origin.x ) * invdirx; tmax = ( box.min.x - origin.x ) * invdirx; } if ( invdiry >= 0 ) { tymin = ( box.min.y - origin.y ) * invdiry; tymax = ( box.max.y - origin.y ) * invdiry; } else { tymin = ( box.max.y - origin.y ) * invdiry; tymax = ( box.min.y - origin.y ) * invdiry; } if ( ( tmin > tymax ) || ( tymin > tmax ) ) return null; if ( tymin > tmin || isNaN( tmin ) ) tmin = tymin; if ( tymax < tmax || isNaN( tmax ) ) tmax = tymax; if ( invdirz >= 0 ) { tzmin = ( box.min.z - origin.z ) * invdirz; tzmax = ( box.max.z - origin.z ) * invdirz; } else { tzmin = ( box.max.z - origin.z ) * invdirz; tzmax = ( box.min.z - origin.z ) * invdirz; } if ( ( tmin > tzmax ) || ( tzmin > tmax ) ) return null; if ( tzmin > tmin || tmin !== tmin ) tmin = tzmin; if ( tzmax < tmax || tmax !== tmax ) tmax = tzmax; //return point closest to the ray (positive side) if ( tmax < 0 ) return null; return this.at( tmin >= 0 ? tmin : tmax, target ); } /** * Returns `true` if this ray intersects with the given box. * * @param {Box3} box - The box to intersect. * @return {boolean} Whether this ray intersects with the given box or not. */ intersectsBox( box ) { return this.intersectBox( box, _vector$a ) !== null; } /** * Intersects this ray with the given triangle, returning the intersection * point or `null` if there is no intersection. * * @param {Vector3} a - The first vertex of the triangle. * @param {Vector3} b - The second vertex of the triangle. * @param {Vector3} c - The third vertex of the triangle. * @param {boolean} backfaceCulling - Whether to use backface culling or not. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {?Vector3} The intersection point. */ intersectTriangle( a, b, c, backfaceCulling, target ) { // Compute the offset origin, edges, and normal. // from https://github.com/pmjoniak/GeometricTools/blob/master/GTEngine/Include/Mathematics/GteIntrRay3Triangle3.h _edge1.subVectors( b, a ); _edge2.subVectors( c, a ); _normal$1.crossVectors( _edge1, _edge2 ); // Solve Q + t*D = b1*E1 + b2*E2 (Q = kDiff, D = ray direction, // E1 = kEdge1, E2 = kEdge2, N = Cross(E1,E2)) by // |Dot(D,N)|*b1 = sign(Dot(D,N))*Dot(D,Cross(Q,E2)) // |Dot(D,N)|*b2 = sign(Dot(D,N))*Dot(D,Cross(E1,Q)) // |Dot(D,N)|*t = -sign(Dot(D,N))*Dot(Q,N) let DdN = this.direction.dot( _normal$1 ); let sign; if ( DdN > 0 ) { if ( backfaceCulling ) return null; sign = 1; } else if ( DdN < 0 ) { sign = -1; DdN = - DdN; } else { return null; } _diff.subVectors( this.origin, a ); const DdQxE2 = sign * this.direction.dot( _edge2.crossVectors( _diff, _edge2 ) ); // b1 < 0, no intersection if ( DdQxE2 < 0 ) { return null; } const DdE1xQ = sign * this.direction.dot( _edge1.cross( _diff ) ); // b2 < 0, no intersection if ( DdE1xQ < 0 ) { return null; } // b1+b2 > 1, no intersection if ( DdQxE2 + DdE1xQ > DdN ) { return null; } // Line intersects triangle, check if ray does. const QdN = - sign * _diff.dot( _normal$1 ); // t < 0, no intersection if ( QdN < 0 ) { return null; } // Ray intersects triangle. return this.at( QdN / DdN, target ); } /** * Transforms this ray with the given 4x4 transformation matrix. * * @param {Matrix4} matrix4 - The transformation matrix. * @return {Ray} A reference to this ray. */ applyMatrix4( matrix4 ) { this.origin.applyMatrix4( matrix4 ); this.direction.transformDirection( matrix4 ); return this; } /** * Returns `true` if this ray is equal with the given one. * * @param {Ray} ray - The ray to test for equality. * @return {boolean} Whether this ray is equal with the given one. */ equals( ray ) { return ray.origin.equals( this.origin ) && ray.direction.equals( this.direction ); } /** * Returns a new ray with copied values from this instance. * * @return {Ray} A clone of this instance. */ clone() { return new this.constructor().copy( this ); } } /** * Represents a 4x4 matrix. * * The most common use of a 4x4 matrix in 3D computer graphics is as a transformation matrix. * For an introduction to transformation matrices as used in WebGL, check out [this tutorial]{@link https://www.opengl-tutorial.org/beginners-tutorials/tutorial-3-matrices} * * This allows a 3D vector representing a point in 3D space to undergo * transformations such as translation, rotation, shear, scale, reflection, * orthogonal or perspective projection and so on, by being multiplied by the * matrix. This is known as `applying` the matrix to the vector. * * A Note on Row-Major and Column-Major Ordering: * * The constructor and {@link Matrix3#set} method take arguments in * [row-major]{@link https://en.wikipedia.org/wiki/Row-_and_column-major_order#Column-major_order} * order, while internally they are stored in the {@link Matrix3#elements} array in column-major order. * This means that calling: * ```js * const m = new THREE.Matrix4(); * m.set( 11, 12, 13, 14, * 21, 22, 23, 24, * 31, 32, 33, 34, * 41, 42, 43, 44 ); * ``` * will result in the elements array containing: * ```js * m.elements = [ 11, 21, 31, 41, * 12, 22, 32, 42, * 13, 23, 33, 43, * 14, 24, 34, 44 ]; * ``` * and internally all calculations are performed using column-major ordering. * However, as the actual ordering makes no difference mathematically and * most people are used to thinking about matrices in row-major order, the * three.js documentation shows matrices in row-major order. Just bear in * mind that if you are reading the source code, you'll have to take the * transpose of any matrices outlined here to make sense of the calculations. */ class Matrix4 { /** * Constructs a new 4x4 matrix. The arguments are supposed to be * in row-major order. If no arguments are provided, the constructor * initializes the matrix as an identity matrix. * * @param {number} [n11] - 1-1 matrix element. * @param {number} [n12] - 1-2 matrix element. * @param {number} [n13] - 1-3 matrix element. * @param {number} [n14] - 1-4 matrix element. * @param {number} [n21] - 2-1 matrix element. * @param {number} [n22] - 2-2 matrix element. * @param {number} [n23] - 2-3 matrix element. * @param {number} [n24] - 2-4 matrix element. * @param {number} [n31] - 3-1 matrix element. * @param {number} [n32] - 3-2 matrix element. * @param {number} [n33] - 3-3 matrix element. * @param {number} [n34] - 3-4 matrix element. * @param {number} [n41] - 4-1 matrix element. * @param {number} [n42] - 4-2 matrix element. * @param {number} [n43] - 4-3 matrix element. * @param {number} [n44] - 4-4 matrix element. */ constructor( n11, n12, n13, n14, n21, n22, n23, n24, n31, n32, n33, n34, n41, n42, n43, n44 ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ Matrix4.prototype.isMatrix4 = true; /** * A column-major list of matrix values. * * @type {Array} */ this.elements = [ 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 ]; if ( n11 !== undefined ) { this.set( n11, n12, n13, n14, n21, n22, n23, n24, n31, n32, n33, n34, n41, n42, n43, n44 ); } } /** * Sets the elements of the matrix.The arguments are supposed to be * in row-major order. * * @param {number} [n11] - 1-1 matrix element. * @param {number} [n12] - 1-2 matrix element. * @param {number} [n13] - 1-3 matrix element. * @param {number} [n14] - 1-4 matrix element. * @param {number} [n21] - 2-1 matrix element. * @param {number} [n22] - 2-2 matrix element. * @param {number} [n23] - 2-3 matrix element. * @param {number} [n24] - 2-4 matrix element. * @param {number} [n31] - 3-1 matrix element. * @param {number} [n32] - 3-2 matrix element. * @param {number} [n33] - 3-3 matrix element. * @param {number} [n34] - 3-4 matrix element. * @param {number} [n41] - 4-1 matrix element. * @param {number} [n42] - 4-2 matrix element. * @param {number} [n43] - 4-3 matrix element. * @param {number} [n44] - 4-4 matrix element. * @return {Matrix4} A reference to this matrix. */ set( n11, n12, n13, n14, n21, n22, n23, n24, n31, n32, n33, n34, n41, n42, n43, n44 ) { const te = this.elements; te[ 0 ] = n11; te[ 4 ] = n12; te[ 8 ] = n13; te[ 12 ] = n14; te[ 1 ] = n21; te[ 5 ] = n22; te[ 9 ] = n23; te[ 13 ] = n24; te[ 2 ] = n31; te[ 6 ] = n32; te[ 10 ] = n33; te[ 14 ] = n34; te[ 3 ] = n41; te[ 7 ] = n42; te[ 11 ] = n43; te[ 15 ] = n44; return this; } /** * Sets this matrix to the 4x4 identity matrix. * * @return {Matrix4} A reference to this matrix. */ identity() { this.set( 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 ); return this; } /** * Returns a matrix with copied values from this instance. * * @return {Matrix4} A clone of this instance. */ clone() { return new Matrix4().fromArray( this.elements ); } /** * Copies the values of the given matrix to this instance. * * @param {Matrix4} m - The matrix to copy. * @return {Matrix4} A reference to this matrix. */ copy( m ) { const te = this.elements; const me = m.elements; te[ 0 ] = me[ 0 ]; te[ 1 ] = me[ 1 ]; te[ 2 ] = me[ 2 ]; te[ 3 ] = me[ 3 ]; te[ 4 ] = me[ 4 ]; te[ 5 ] = me[ 5 ]; te[ 6 ] = me[ 6 ]; te[ 7 ] = me[ 7 ]; te[ 8 ] = me[ 8 ]; te[ 9 ] = me[ 9 ]; te[ 10 ] = me[ 10 ]; te[ 11 ] = me[ 11 ]; te[ 12 ] = me[ 12 ]; te[ 13 ] = me[ 13 ]; te[ 14 ] = me[ 14 ]; te[ 15 ] = me[ 15 ]; return this; } /** * Copies the translation component of the given matrix * into this matrix's translation component. * * @param {Matrix4} m - The matrix to copy the translation component. * @return {Matrix4} A reference to this matrix. */ copyPosition( m ) { const te = this.elements, me = m.elements; te[ 12 ] = me[ 12 ]; te[ 13 ] = me[ 13 ]; te[ 14 ] = me[ 14 ]; return this; } /** * Set the upper 3x3 elements of this matrix to the values of given 3x3 matrix. * * @param {Matrix3} m - The 3x3 matrix. * @return {Matrix4} A reference to this matrix. */ setFromMatrix3( m ) { const me = m.elements; this.set( me[ 0 ], me[ 3 ], me[ 6 ], 0, me[ 1 ], me[ 4 ], me[ 7 ], 0, me[ 2 ], me[ 5 ], me[ 8 ], 0, 0, 0, 0, 1 ); return this; } /** * Extracts the basis of this matrix into the three axis vectors provided. * * @param {Vector3} xAxis - The basis's x axis. * @param {Vector3} yAxis - The basis's y axis. * @param {Vector3} zAxis - The basis's z axis. * @return {Matrix4} A reference to this matrix. */ extractBasis( xAxis, yAxis, zAxis ) { xAxis.setFromMatrixColumn( this, 0 ); yAxis.setFromMatrixColumn( this, 1 ); zAxis.setFromMatrixColumn( this, 2 ); return this; } /** * Sets the given basis vectors to this matrix. * * @param {Vector3} xAxis - The basis's x axis. * @param {Vector3} yAxis - The basis's y axis. * @param {Vector3} zAxis - The basis's z axis. * @return {Matrix4} A reference to this matrix. */ makeBasis( xAxis, yAxis, zAxis ) { this.set( xAxis.x, yAxis.x, zAxis.x, 0, xAxis.y, yAxis.y, zAxis.y, 0, xAxis.z, yAxis.z, zAxis.z, 0, 0, 0, 0, 1 ); return this; } /** * Extracts the rotation component of the given matrix * into this matrix's rotation component. * * Note: This method does not support reflection matrices. * * @param {Matrix4} m - The matrix. * @return {Matrix4} A reference to this matrix. */ extractRotation( m ) { const te = this.elements; const me = m.elements; const scaleX = 1 / _v1$5.setFromMatrixColumn( m, 0 ).length(); const scaleY = 1 / _v1$5.setFromMatrixColumn( m, 1 ).length(); const scaleZ = 1 / _v1$5.setFromMatrixColumn( m, 2 ).length(); te[ 0 ] = me[ 0 ] * scaleX; te[ 1 ] = me[ 1 ] * scaleX; te[ 2 ] = me[ 2 ] * scaleX; te[ 3 ] = 0; te[ 4 ] = me[ 4 ] * scaleY; te[ 5 ] = me[ 5 ] * scaleY; te[ 6 ] = me[ 6 ] * scaleY; te[ 7 ] = 0; te[ 8 ] = me[ 8 ] * scaleZ; te[ 9 ] = me[ 9 ] * scaleZ; te[ 10 ] = me[ 10 ] * scaleZ; te[ 11 ] = 0; te[ 12 ] = 0; te[ 13 ] = 0; te[ 14 ] = 0; te[ 15 ] = 1; return this; } /** * Sets the rotation component (the upper left 3x3 matrix) of this matrix to * the rotation specified by the given Euler angles. The rest of * the matrix is set to the identity. Depending on the {@link Euler#order}, * there are six possible outcomes. See [this page]{@link https://en.wikipedia.org/wiki/Euler_angles#Rotation_matrix} * for a complete list. * * @param {Euler} euler - The Euler angles. * @return {Matrix4} A reference to this matrix. */ makeRotationFromEuler( euler ) { const te = this.elements; const x = euler.x, y = euler.y, z = euler.z; const a = Math.cos( x ), b = Math.sin( x ); const c = Math.cos( y ), d = Math.sin( y ); const e = Math.cos( z ), f = Math.sin( z ); if ( euler.order === 'XYZ' ) { const ae = a * e, af = a * f, be = b * e, bf = b * f; te[ 0 ] = c * e; te[ 4 ] = - c * f; te[ 8 ] = d; te[ 1 ] = af + be * d; te[ 5 ] = ae - bf * d; te[ 9 ] = - b * c; te[ 2 ] = bf - ae * d; te[ 6 ] = be + af * d; te[ 10 ] = a * c; } else if ( euler.order === 'YXZ' ) { const ce = c * e, cf = c * f, de = d * e, df = d * f; te[ 0 ] = ce + df * b; te[ 4 ] = de * b - cf; te[ 8 ] = a * d; te[ 1 ] = a * f; te[ 5 ] = a * e; te[ 9 ] = - b; te[ 2 ] = cf * b - de; te[ 6 ] = df + ce * b; te[ 10 ] = a * c; } else if ( euler.order === 'ZXY' ) { const ce = c * e, cf = c * f, de = d * e, df = d * f; te[ 0 ] = ce - df * b; te[ 4 ] = - a * f; te[ 8 ] = de + cf * b; te[ 1 ] = cf + de * b; te[ 5 ] = a * e; te[ 9 ] = df - ce * b; te[ 2 ] = - a * d; te[ 6 ] = b; te[ 10 ] = a * c; } else if ( euler.order === 'ZYX' ) { const ae = a * e, af = a * f, be = b * e, bf = b * f; te[ 0 ] = c * e; te[ 4 ] = be * d - af; te[ 8 ] = ae * d + bf; te[ 1 ] = c * f; te[ 5 ] = bf * d + ae; te[ 9 ] = af * d - be; te[ 2 ] = - d; te[ 6 ] = b * c; te[ 10 ] = a * c; } else if ( euler.order === 'YZX' ) { const ac = a * c, ad = a * d, bc = b * c, bd = b * d; te[ 0 ] = c * e; te[ 4 ] = bd - ac * f; te[ 8 ] = bc * f + ad; te[ 1 ] = f; te[ 5 ] = a * e; te[ 9 ] = - b * e; te[ 2 ] = - d * e; te[ 6 ] = ad * f + bc; te[ 10 ] = ac - bd * f; } else if ( euler.order === 'XZY' ) { const ac = a * c, ad = a * d, bc = b * c, bd = b * d; te[ 0 ] = c * e; te[ 4 ] = - f; te[ 8 ] = d * e; te[ 1 ] = ac * f + bd; te[ 5 ] = a * e; te[ 9 ] = ad * f - bc; te[ 2 ] = bc * f - ad; te[ 6 ] = b * e; te[ 10 ] = bd * f + ac; } // bottom row te[ 3 ] = 0; te[ 7 ] = 0; te[ 11 ] = 0; // last column te[ 12 ] = 0; te[ 13 ] = 0; te[ 14 ] = 0; te[ 15 ] = 1; return this; } /** * Sets the rotation component of this matrix to the rotation specified by * the given Quaternion as outlined [here]{@link https://en.wikipedia.org/wiki/Rotation_matrix#Quaternion} * The rest of the matrix is set to the identity. * * @param {Quaternion} q - The Quaternion. * @return {Matrix4} A reference to this matrix. */ makeRotationFromQuaternion( q ) { return this.compose( _zero, q, _one ); } /** * Sets the rotation component of the transformation matrix, looking from `eye` towards * `target`, and oriented by the up-direction. * * @param {Vector3} eye - The eye vector. * @param {Vector3} target - The target vector. * @param {Vector3} up - The up vector. * @return {Matrix4} A reference to this matrix. */ lookAt( eye, target, up ) { const te = this.elements; _z.subVectors( eye, target ); if ( _z.lengthSq() === 0 ) { // eye and target are in the same position _z.z = 1; } _z.normalize(); _x.crossVectors( up, _z ); if ( _x.lengthSq() === 0 ) { // up and z are parallel if ( Math.abs( up.z ) === 1 ) { _z.x += 0.0001; } else { _z.z += 0.0001; } _z.normalize(); _x.crossVectors( up, _z ); } _x.normalize(); _y.crossVectors( _z, _x ); te[ 0 ] = _x.x; te[ 4 ] = _y.x; te[ 8 ] = _z.x; te[ 1 ] = _x.y; te[ 5 ] = _y.y; te[ 9 ] = _z.y; te[ 2 ] = _x.z; te[ 6 ] = _y.z; te[ 10 ] = _z.z; return this; } /** * Post-multiplies this matrix by the given 4x4 matrix. * * @param {Matrix4} m - The matrix to multiply with. * @return {Matrix4} A reference to this matrix. */ multiply( m ) { return this.multiplyMatrices( this, m ); } /** * Pre-multiplies this matrix by the given 4x4 matrix. * * @param {Matrix4} m - The matrix to multiply with. * @return {Matrix4} A reference to this matrix. */ premultiply( m ) { return this.multiplyMatrices( m, this ); } /** * Multiples the given 4x4 matrices and stores the result * in this matrix. * * @param {Matrix4} a - The first matrix. * @param {Matrix4} b - The second matrix. * @return {Matrix4} A reference to this matrix. */ multiplyMatrices( a, b ) { const ae = a.elements; const be = b.elements; const te = this.elements; const a11 = ae[ 0 ], a12 = ae[ 4 ], a13 = ae[ 8 ], a14 = ae[ 12 ]; const a21 = ae[ 1 ], a22 = ae[ 5 ], a23 = ae[ 9 ], a24 = ae[ 13 ]; const a31 = ae[ 2 ], a32 = ae[ 6 ], a33 = ae[ 10 ], a34 = ae[ 14 ]; const a41 = ae[ 3 ], a42 = ae[ 7 ], a43 = ae[ 11 ], a44 = ae[ 15 ]; const b11 = be[ 0 ], b12 = be[ 4 ], b13 = be[ 8 ], b14 = be[ 12 ]; const b21 = be[ 1 ], b22 = be[ 5 ], b23 = be[ 9 ], b24 = be[ 13 ]; const b31 = be[ 2 ], b32 = be[ 6 ], b33 = be[ 10 ], b34 = be[ 14 ]; const b41 = be[ 3 ], b42 = be[ 7 ], b43 = be[ 11 ], b44 = be[ 15 ]; te[ 0 ] = a11 * b11 + a12 * b21 + a13 * b31 + a14 * b41; te[ 4 ] = a11 * b12 + a12 * b22 + a13 * b32 + a14 * b42; te[ 8 ] = a11 * b13 + a12 * b23 + a13 * b33 + a14 * b43; te[ 12 ] = a11 * b14 + a12 * b24 + a13 * b34 + a14 * b44; te[ 1 ] = a21 * b11 + a22 * b21 + a23 * b31 + a24 * b41; te[ 5 ] = a21 * b12 + a22 * b22 + a23 * b32 + a24 * b42; te[ 9 ] = a21 * b13 + a22 * b23 + a23 * b33 + a24 * b43; te[ 13 ] = a21 * b14 + a22 * b24 + a23 * b34 + a24 * b44; te[ 2 ] = a31 * b11 + a32 * b21 + a33 * b31 + a34 * b41; te[ 6 ] = a31 * b12 + a32 * b22 + a33 * b32 + a34 * b42; te[ 10 ] = a31 * b13 + a32 * b23 + a33 * b33 + a34 * b43; te[ 14 ] = a31 * b14 + a32 * b24 + a33 * b34 + a34 * b44; te[ 3 ] = a41 * b11 + a42 * b21 + a43 * b31 + a44 * b41; te[ 7 ] = a41 * b12 + a42 * b22 + a43 * b32 + a44 * b42; te[ 11 ] = a41 * b13 + a42 * b23 + a43 * b33 + a44 * b43; te[ 15 ] = a41 * b14 + a42 * b24 + a43 * b34 + a44 * b44; return this; } /** * Multiplies every component of the matrix by the given scalar. * * @param {number} s - The scalar. * @return {Matrix4} A reference to this matrix. */ multiplyScalar( s ) { const te = this.elements; te[ 0 ] *= s; te[ 4 ] *= s; te[ 8 ] *= s; te[ 12 ] *= s; te[ 1 ] *= s; te[ 5 ] *= s; te[ 9 ] *= s; te[ 13 ] *= s; te[ 2 ] *= s; te[ 6 ] *= s; te[ 10 ] *= s; te[ 14 ] *= s; te[ 3 ] *= s; te[ 7 ] *= s; te[ 11 ] *= s; te[ 15 ] *= s; return this; } /** * Computes and returns the determinant of this matrix. * * Based on the method outlined [here]{@link http://www.euclideanspace.com/maths/algebra/matrix/functions/inverse/fourD/index.html}. * * @return {number} The determinant. */ determinant() { const te = this.elements; const n11 = te[ 0 ], n12 = te[ 4 ], n13 = te[ 8 ], n14 = te[ 12 ]; const n21 = te[ 1 ], n22 = te[ 5 ], n23 = te[ 9 ], n24 = te[ 13 ]; const n31 = te[ 2 ], n32 = te[ 6 ], n33 = te[ 10 ], n34 = te[ 14 ]; const n41 = te[ 3 ], n42 = te[ 7 ], n43 = te[ 11 ], n44 = te[ 15 ]; //TODO: make this more efficient return ( n41 * ( + n14 * n23 * n32 - n13 * n24 * n32 - n14 * n22 * n33 + n12 * n24 * n33 + n13 * n22 * n34 - n12 * n23 * n34 ) + n42 * ( + n11 * n23 * n34 - n11 * n24 * n33 + n14 * n21 * n33 - n13 * n21 * n34 + n13 * n24 * n31 - n14 * n23 * n31 ) + n43 * ( + n11 * n24 * n32 - n11 * n22 * n34 - n14 * n21 * n32 + n12 * n21 * n34 + n14 * n22 * n31 - n12 * n24 * n31 ) + n44 * ( - n13 * n22 * n31 - n11 * n23 * n32 + n11 * n22 * n33 + n13 * n21 * n32 - n12 * n21 * n33 + n12 * n23 * n31 ) ); } /** * Transposes this matrix in place. * * @return {Matrix4} A reference to this matrix. */ transpose() { const te = this.elements; let tmp; tmp = te[ 1 ]; te[ 1 ] = te[ 4 ]; te[ 4 ] = tmp; tmp = te[ 2 ]; te[ 2 ] = te[ 8 ]; te[ 8 ] = tmp; tmp = te[ 6 ]; te[ 6 ] = te[ 9 ]; te[ 9 ] = tmp; tmp = te[ 3 ]; te[ 3 ] = te[ 12 ]; te[ 12 ] = tmp; tmp = te[ 7 ]; te[ 7 ] = te[ 13 ]; te[ 13 ] = tmp; tmp = te[ 11 ]; te[ 11 ] = te[ 14 ]; te[ 14 ] = tmp; return this; } /** * Sets the position component for this matrix from the given vector, * without affecting the rest of the matrix. * * @param {number|Vector3} x - The x component of the vector or alternatively the vector object. * @param {number} y - The y component of the vector. * @param {number} z - The z component of the vector. * @return {Matrix4} A reference to this matrix. */ setPosition( x, y, z ) { const te = this.elements; if ( x.isVector3 ) { te[ 12 ] = x.x; te[ 13 ] = x.y; te[ 14 ] = x.z; } else { te[ 12 ] = x; te[ 13 ] = y; te[ 14 ] = z; } return this; } /** * Inverts this matrix, using the [analytic method]{@link https://en.wikipedia.org/wiki/Invertible_matrix#Analytic_solution}. * You can not invert with a determinant of zero. If you attempt this, the method produces * a zero matrix instead. * * @return {Matrix4} A reference to this matrix. */ invert() { // based on http://www.euclideanspace.com/maths/algebra/matrix/functions/inverse/fourD/index.htm const te = this.elements, n11 = te[ 0 ], n21 = te[ 1 ], n31 = te[ 2 ], n41 = te[ 3 ], n12 = te[ 4 ], n22 = te[ 5 ], n32 = te[ 6 ], n42 = te[ 7 ], n13 = te[ 8 ], n23 = te[ 9 ], n33 = te[ 10 ], n43 = te[ 11 ], n14 = te[ 12 ], n24 = te[ 13 ], n34 = te[ 14 ], n44 = te[ 15 ], t11 = n23 * n34 * n42 - n24 * n33 * n42 + n24 * n32 * n43 - n22 * n34 * n43 - n23 * n32 * n44 + n22 * n33 * n44, t12 = n14 * n33 * n42 - n13 * n34 * n42 - n14 * n32 * n43 + n12 * n34 * n43 + n13 * n32 * n44 - n12 * n33 * n44, t13 = n13 * n24 * n42 - n14 * n23 * n42 + n14 * n22 * n43 - n12 * n24 * n43 - n13 * n22 * n44 + n12 * n23 * n44, t14 = n14 * n23 * n32 - n13 * n24 * n32 - n14 * n22 * n33 + n12 * n24 * n33 + n13 * n22 * n34 - n12 * n23 * n34; const det = n11 * t11 + n21 * t12 + n31 * t13 + n41 * t14; if ( det === 0 ) return this.set( 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ); const detInv = 1 / det; te[ 0 ] = t11 * detInv; te[ 1 ] = ( n24 * n33 * n41 - n23 * n34 * n41 - n24 * n31 * n43 + n21 * n34 * n43 + n23 * n31 * n44 - n21 * n33 * n44 ) * detInv; te[ 2 ] = ( n22 * n34 * n41 - n24 * n32 * n41 + n24 * n31 * n42 - n21 * n34 * n42 - n22 * n31 * n44 + n21 * n32 * n44 ) * detInv; te[ 3 ] = ( n23 * n32 * n41 - n22 * n33 * n41 - n23 * n31 * n42 + n21 * n33 * n42 + n22 * n31 * n43 - n21 * n32 * n43 ) * detInv; te[ 4 ] = t12 * detInv; te[ 5 ] = ( n13 * n34 * n41 - n14 * n33 * n41 + n14 * n31 * n43 - n11 * n34 * n43 - n13 * n31 * n44 + n11 * n33 * n44 ) * detInv; te[ 6 ] = ( n14 * n32 * n41 - n12 * n34 * n41 - n14 * n31 * n42 + n11 * n34 * n42 + n12 * n31 * n44 - n11 * n32 * n44 ) * detInv; te[ 7 ] = ( n12 * n33 * n41 - n13 * n32 * n41 + n13 * n31 * n42 - n11 * n33 * n42 - n12 * n31 * n43 + n11 * n32 * n43 ) * detInv; te[ 8 ] = t13 * detInv; te[ 9 ] = ( n14 * n23 * n41 - n13 * n24 * n41 - n14 * n21 * n43 + n11 * n24 * n43 + n13 * n21 * n44 - n11 * n23 * n44 ) * detInv; te[ 10 ] = ( n12 * n24 * n41 - n14 * n22 * n41 + n14 * n21 * n42 - n11 * n24 * n42 - n12 * n21 * n44 + n11 * n22 * n44 ) * detInv; te[ 11 ] = ( n13 * n22 * n41 - n12 * n23 * n41 - n13 * n21 * n42 + n11 * n23 * n42 + n12 * n21 * n43 - n11 * n22 * n43 ) * detInv; te[ 12 ] = t14 * detInv; te[ 13 ] = ( n13 * n24 * n31 - n14 * n23 * n31 + n14 * n21 * n33 - n11 * n24 * n33 - n13 * n21 * n34 + n11 * n23 * n34 ) * detInv; te[ 14 ] = ( n14 * n22 * n31 - n12 * n24 * n31 - n14 * n21 * n32 + n11 * n24 * n32 + n12 * n21 * n34 - n11 * n22 * n34 ) * detInv; te[ 15 ] = ( n12 * n23 * n31 - n13 * n22 * n31 + n13 * n21 * n32 - n11 * n23 * n32 - n12 * n21 * n33 + n11 * n22 * n33 ) * detInv; return this; } /** * Multiplies the columns of this matrix by the given vector. * * @param {Vector3} v - The scale vector. * @return {Matrix4} A reference to this matrix. */ scale( v ) { const te = this.elements; const x = v.x, y = v.y, z = v.z; te[ 0 ] *= x; te[ 4 ] *= y; te[ 8 ] *= z; te[ 1 ] *= x; te[ 5 ] *= y; te[ 9 ] *= z; te[ 2 ] *= x; te[ 6 ] *= y; te[ 10 ] *= z; te[ 3 ] *= x; te[ 7 ] *= y; te[ 11 ] *= z; return this; } /** * Gets the maximum scale value of the three axes. * * @return {number} The maximum scale. */ getMaxScaleOnAxis() { const te = this.elements; const scaleXSq = te[ 0 ] * te[ 0 ] + te[ 1 ] * te[ 1 ] + te[ 2 ] * te[ 2 ]; const scaleYSq = te[ 4 ] * te[ 4 ] + te[ 5 ] * te[ 5 ] + te[ 6 ] * te[ 6 ]; const scaleZSq = te[ 8 ] * te[ 8 ] + te[ 9 ] * te[ 9 ] + te[ 10 ] * te[ 10 ]; return Math.sqrt( Math.max( scaleXSq, scaleYSq, scaleZSq ) ); } /** * Sets this matrix as a translation transform from the given vector. * * @param {number|Vector3} x - The amount to translate in the X axis or alternatively a translation vector. * @param {number} y - The amount to translate in the Y axis. * @param {number} z - The amount to translate in the z axis. * @return {Matrix4} A reference to this matrix. */ makeTranslation( x, y, z ) { if ( x.isVector3 ) { this.set( 1, 0, 0, x.x, 0, 1, 0, x.y, 0, 0, 1, x.z, 0, 0, 0, 1 ); } else { this.set( 1, 0, 0, x, 0, 1, 0, y, 0, 0, 1, z, 0, 0, 0, 1 ); } return this; } /** * Sets this matrix as a rotational transformation around the X axis by * the given angle. * * @param {number} theta - The rotation in radians. * @return {Matrix4} A reference to this matrix. */ makeRotationX( theta ) { const c = Math.cos( theta ), s = Math.sin( theta ); this.set( 1, 0, 0, 0, 0, c, - s, 0, 0, s, c, 0, 0, 0, 0, 1 ); return this; } /** * Sets this matrix as a rotational transformation around the Y axis by * the given angle. * * @param {number} theta - The rotation in radians. * @return {Matrix4} A reference to this matrix. */ makeRotationY( theta ) { const c = Math.cos( theta ), s = Math.sin( theta ); this.set( c, 0, s, 0, 0, 1, 0, 0, - s, 0, c, 0, 0, 0, 0, 1 ); return this; } /** * Sets this matrix as a rotational transformation around the Z axis by * the given angle. * * @param {number} theta - The rotation in radians. * @return {Matrix4} A reference to this matrix. */ makeRotationZ( theta ) { const c = Math.cos( theta ), s = Math.sin( theta ); this.set( c, - s, 0, 0, s, c, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 ); return this; } /** * Sets this matrix as a rotational transformation around the given axis by * the given angle. * * This is a somewhat controversial but mathematically sound alternative to * rotating via Quaternions. See the discussion [here]{@link https://www.gamedev.net/articles/programming/math-and-physics/do-we-really-need-quaternions-r1199}. * * @param {Vector3} axis - The normalized rotation axis. * @param {number} angle - The rotation in radians. * @return {Matrix4} A reference to this matrix. */ makeRotationAxis( axis, angle ) { // Based on http://www.gamedev.net/reference/articles/article1199.asp const c = Math.cos( angle ); const s = Math.sin( angle ); const t = 1 - c; const x = axis.x, y = axis.y, z = axis.z; const tx = t * x, ty = t * y; this.set( tx * x + c, tx * y - s * z, tx * z + s * y, 0, tx * y + s * z, ty * y + c, ty * z - s * x, 0, tx * z - s * y, ty * z + s * x, t * z * z + c, 0, 0, 0, 0, 1 ); return this; } /** * Sets this matrix as a scale transformation. * * @param {number} x - The amount to scale in the X axis. * @param {number} y - The amount to scale in the Y axis. * @param {number} z - The amount to scale in the Z axis. * @return {Matrix4} A reference to this matrix. */ makeScale( x, y, z ) { this.set( x, 0, 0, 0, 0, y, 0, 0, 0, 0, z, 0, 0, 0, 0, 1 ); return this; } /** * Sets this matrix as a shear transformation. * * @param {number} xy - The amount to shear X by Y. * @param {number} xz - The amount to shear X by Z. * @param {number} yx - The amount to shear Y by X. * @param {number} yz - The amount to shear Y by Z. * @param {number} zx - The amount to shear Z by X. * @param {number} zy - The amount to shear Z by Y. * @return {Matrix4} A reference to this matrix. */ makeShear( xy, xz, yx, yz, zx, zy ) { this.set( 1, yx, zx, 0, xy, 1, zy, 0, xz, yz, 1, 0, 0, 0, 0, 1 ); return this; } /** * Sets this matrix to the transformation composed of the given position, * rotation (Quaternion) and scale. * * @param {Vector3} position - The position vector. * @param {Quaternion} quaternion - The rotation as a Quaternion. * @param {Vector3} scale - The scale vector. * @return {Matrix4} A reference to this matrix. */ compose( position, quaternion, scale ) { const te = this.elements; const x = quaternion._x, y = quaternion._y, z = quaternion._z, w = quaternion._w; const x2 = x + x, y2 = y + y, z2 = z + z; const xx = x * x2, xy = x * y2, xz = x * z2; const yy = y * y2, yz = y * z2, zz = z * z2; const wx = w * x2, wy = w * y2, wz = w * z2; const sx = scale.x, sy = scale.y, sz = scale.z; te[ 0 ] = ( 1 - ( yy + zz ) ) * sx; te[ 1 ] = ( xy + wz ) * sx; te[ 2 ] = ( xz - wy ) * sx; te[ 3 ] = 0; te[ 4 ] = ( xy - wz ) * sy; te[ 5 ] = ( 1 - ( xx + zz ) ) * sy; te[ 6 ] = ( yz + wx ) * sy; te[ 7 ] = 0; te[ 8 ] = ( xz + wy ) * sz; te[ 9 ] = ( yz - wx ) * sz; te[ 10 ] = ( 1 - ( xx + yy ) ) * sz; te[ 11 ] = 0; te[ 12 ] = position.x; te[ 13 ] = position.y; te[ 14 ] = position.z; te[ 15 ] = 1; return this; } /** * Decomposes this matrix into its position, rotation and scale components * and provides the result in the given objects. * * Note: Not all matrices are decomposable in this way. For example, if an * object has a non-uniformly scaled parent, then the object's world matrix * may not be decomposable, and this method may not be appropriate. * * @param {Vector3} position - The position vector. * @param {Quaternion} quaternion - The rotation as a Quaternion. * @param {Vector3} scale - The scale vector. * @return {Matrix4} A reference to this matrix. */ decompose( position, quaternion, scale ) { const te = this.elements; let sx = _v1$5.set( te[ 0 ], te[ 1 ], te[ 2 ] ).length(); const sy = _v1$5.set( te[ 4 ], te[ 5 ], te[ 6 ] ).length(); const sz = _v1$5.set( te[ 8 ], te[ 9 ], te[ 10 ] ).length(); // if determine is negative, we need to invert one scale const det = this.determinant(); if ( det < 0 ) sx = - sx; position.x = te[ 12 ]; position.y = te[ 13 ]; position.z = te[ 14 ]; // scale the rotation part _m1$4.copy( this ); const invSX = 1 / sx; const invSY = 1 / sy; const invSZ = 1 / sz; _m1$4.elements[ 0 ] *= invSX; _m1$4.elements[ 1 ] *= invSX; _m1$4.elements[ 2 ] *= invSX; _m1$4.elements[ 4 ] *= invSY; _m1$4.elements[ 5 ] *= invSY; _m1$4.elements[ 6 ] *= invSY; _m1$4.elements[ 8 ] *= invSZ; _m1$4.elements[ 9 ] *= invSZ; _m1$4.elements[ 10 ] *= invSZ; quaternion.setFromRotationMatrix( _m1$4 ); scale.x = sx; scale.y = sy; scale.z = sz; return this; } /** * Creates a perspective projection matrix. This is used internally by * {@link PerspectiveCamera#updateProjectionMatrix}. * @param {number} left - Left boundary of the viewing frustum at the near plane. * @param {number} right - Right boundary of the viewing frustum at the near plane. * @param {number} top - Top boundary of the viewing frustum at the near plane. * @param {number} bottom - Bottom boundary of the viewing frustum at the near plane. * @param {number} near - The distance from the camera to the near plane. * @param {number} far - The distance from the camera to the far plane. * @param {(WebGLCoordinateSystem|WebGPUCoordinateSystem)} [coordinateSystem=WebGLCoordinateSystem] - The coordinate system. * @return {Matrix4} A reference to this matrix. */ makePerspective( left, right, top, bottom, near, far, coordinateSystem = WebGLCoordinateSystem ) { const te = this.elements; const x = 2 * near / ( right - left ); const y = 2 * near / ( top - bottom ); const a = ( right + left ) / ( right - left ); const b = ( top + bottom ) / ( top - bottom ); let c, d; if ( coordinateSystem === WebGLCoordinateSystem ) { c = - ( far + near ) / ( far - near ); d = ( -2 * far * near ) / ( far - near ); } else if ( coordinateSystem === WebGPUCoordinateSystem ) { c = - far / ( far - near ); d = ( - far * near ) / ( far - near ); } else { throw new Error( 'THREE.Matrix4.makePerspective(): Invalid coordinate system: ' + coordinateSystem ); } te[ 0 ] = x; te[ 4 ] = 0; te[ 8 ] = a; te[ 12 ] = 0; te[ 1 ] = 0; te[ 5 ] = y; te[ 9 ] = b; te[ 13 ] = 0; te[ 2 ] = 0; te[ 6 ] = 0; te[ 10 ] = c; te[ 14 ] = d; te[ 3 ] = 0; te[ 7 ] = 0; te[ 11 ] = -1; te[ 15 ] = 0; return this; } /** * Creates a orthographic projection matrix. This is used internally by * {@link OrthographicCamera#updateProjectionMatrix}. * @param {number} left - Left boundary of the viewing frustum at the near plane. * @param {number} right - Right boundary of the viewing frustum at the near plane. * @param {number} top - Top boundary of the viewing frustum at the near plane. * @param {number} bottom - Bottom boundary of the viewing frustum at the near plane. * @param {number} near - The distance from the camera to the near plane. * @param {number} far - The distance from the camera to the far plane. * @param {(WebGLCoordinateSystem|WebGPUCoordinateSystem)} [coordinateSystem=WebGLCoordinateSystem] - The coordinate system. * @return {Matrix4} A reference to this matrix. */ makeOrthographic( left, right, top, bottom, near, far, coordinateSystem = WebGLCoordinateSystem ) { const te = this.elements; const w = 1.0 / ( right - left ); const h = 1.0 / ( top - bottom ); const p = 1.0 / ( far - near ); const x = ( right + left ) * w; const y = ( top + bottom ) * h; let z, zInv; if ( coordinateSystem === WebGLCoordinateSystem ) { z = ( far + near ) * p; zInv = -2 * p; } else if ( coordinateSystem === WebGPUCoordinateSystem ) { z = near * p; zInv = -1 * p; } else { throw new Error( 'THREE.Matrix4.makeOrthographic(): Invalid coordinate system: ' + coordinateSystem ); } te[ 0 ] = 2 * w; te[ 4 ] = 0; te[ 8 ] = 0; te[ 12 ] = - x; te[ 1 ] = 0; te[ 5 ] = 2 * h; te[ 9 ] = 0; te[ 13 ] = - y; te[ 2 ] = 0; te[ 6 ] = 0; te[ 10 ] = zInv; te[ 14 ] = - z; te[ 3 ] = 0; te[ 7 ] = 0; te[ 11 ] = 0; te[ 15 ] = 1; return this; } /** * Returns `true` if this matrix is equal with the given one. * * @param {Matrix4} matrix - The matrix to test for equality. * @return {boolean} Whether this matrix is equal with the given one. */ equals( matrix ) { const te = this.elements; const me = matrix.elements; for ( let i = 0; i < 16; i ++ ) { if ( te[ i ] !== me[ i ] ) return false; } return true; } /** * Sets the elements of the matrix from the given array. * * @param {Array} array - The matrix elements in column-major order. * @param {number} [offset=0] - Index of the first element in the array. * @return {Matrix4} A reference to this matrix. */ fromArray( array, offset = 0 ) { for ( let i = 0; i < 16; i ++ ) { this.elements[ i ] = array[ i + offset ]; } return this; } /** * Writes the elements of this matrix to the given array. If no array is provided, * the method returns a new instance. * * @param {Array} [array=[]] - The target array holding the matrix elements in column-major order. * @param {number} [offset=0] - Index of the first element in the array. * @return {Array} The matrix elements in column-major order. */ toArray( array = [], offset = 0 ) { const te = this.elements; array[ offset ] = te[ 0 ]; array[ offset + 1 ] = te[ 1 ]; array[ offset + 2 ] = te[ 2 ]; array[ offset + 3 ] = te[ 3 ]; array[ offset + 4 ] = te[ 4 ]; array[ offset + 5 ] = te[ 5 ]; array[ offset + 6 ] = te[ 6 ]; array[ offset + 7 ] = te[ 7 ]; array[ offset + 8 ] = te[ 8 ]; array[ offset + 9 ] = te[ 9 ]; array[ offset + 10 ] = te[ 10 ]; array[ offset + 11 ] = te[ 11 ]; array[ offset + 12 ] = te[ 12 ]; array[ offset + 13 ] = te[ 13 ]; array[ offset + 14 ] = te[ 14 ]; array[ offset + 15 ] = te[ 15 ]; return array; } } const _v1$5 = /*@__PURE__*/ new Vector3(); const _m1$4 = /*@__PURE__*/ new Matrix4(); const _zero = /*@__PURE__*/ new Vector3( 0, 0, 0 ); const _one = /*@__PURE__*/ new Vector3( 1, 1, 1 ); const _x = /*@__PURE__*/ new Vector3(); const _y = /*@__PURE__*/ new Vector3(); const _z = /*@__PURE__*/ new Vector3(); const _matrix$2 = /*@__PURE__*/ new Matrix4(); const _quaternion$3 = /*@__PURE__*/ new Quaternion(); /** * A class representing Euler angles. * * Euler angles describe a rotational transformation by rotating an object on * its various axes in specified amounts per axis, and a specified axis * order. * * Iterating through an instance will yield its components (x, y, z, * order) in the corresponding order. * * ```js * const a = new THREE.Euler( 0, 1, 1.57, 'XYZ' ); * const b = new THREE.Vector3( 1, 0, 1 ); * b.applyEuler(a); * ``` */ class Euler { /** * Constructs a new euler instance. * * @param {number} [x=0] - The angle of the x axis in radians. * @param {number} [y=0] - The angle of the y axis in radians. * @param {number} [z=0] - The angle of the z axis in radians. * @param {string} [order=Euler.DEFAULT_ORDER] - A string representing the order that the rotations are applied. */ constructor( x = 0, y = 0, z = 0, order = Euler.DEFAULT_ORDER ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isEuler = true; this._x = x; this._y = y; this._z = z; this._order = order; } /** * The angle of the x axis in radians. * * @type {number} * @default 0 */ get x() { return this._x; } set x( value ) { this._x = value; this._onChangeCallback(); } /** * The angle of the y axis in radians. * * @type {number} * @default 0 */ get y() { return this._y; } set y( value ) { this._y = value; this._onChangeCallback(); } /** * The angle of the z axis in radians. * * @type {number} * @default 0 */ get z() { return this._z; } set z( value ) { this._z = value; this._onChangeCallback(); } /** * A string representing the order that the rotations are applied. * * @type {string} * @default 'XYZ' */ get order() { return this._order; } set order( value ) { this._order = value; this._onChangeCallback(); } /** * Sets the Euler components. * * @param {number} x - The angle of the x axis in radians. * @param {number} y - The angle of the y axis in radians. * @param {number} z - The angle of the z axis in radians. * @param {string} [order] - A string representing the order that the rotations are applied. * @return {Euler} A reference to this Euler instance. */ set( x, y, z, order = this._order ) { this._x = x; this._y = y; this._z = z; this._order = order; this._onChangeCallback(); return this; } /** * Returns a new Euler instance with copied values from this instance. * * @return {Euler} A clone of this instance. */ clone() { return new this.constructor( this._x, this._y, this._z, this._order ); } /** * Copies the values of the given Euler instance to this instance. * * @param {Euler} euler - The Euler instance to copy. * @return {Euler} A reference to this Euler instance. */ copy( euler ) { this._x = euler._x; this._y = euler._y; this._z = euler._z; this._order = euler._order; this._onChangeCallback(); return this; } /** * Sets the angles of this Euler instance from a pure rotation matrix. * * @param {Matrix4} m - A 4x4 matrix of which the upper 3x3 of matrix is a pure rotation matrix (i.e. unscaled). * @param {string} [order] - A string representing the order that the rotations are applied. * @param {boolean} [update=true] - Whether the internal `onChange` callback should be executed or not. * @return {Euler} A reference to this Euler instance. */ setFromRotationMatrix( m, order = this._order, update = true ) { const te = m.elements; const m11 = te[ 0 ], m12 = te[ 4 ], m13 = te[ 8 ]; const m21 = te[ 1 ], m22 = te[ 5 ], m23 = te[ 9 ]; const m31 = te[ 2 ], m32 = te[ 6 ], m33 = te[ 10 ]; switch ( order ) { case 'XYZ': this._y = Math.asin( clamp( m13, -1, 1 ) ); if ( Math.abs( m13 ) < 0.9999999 ) { this._x = Math.atan2( - m23, m33 ); this._z = Math.atan2( - m12, m11 ); } else { this._x = Math.atan2( m32, m22 ); this._z = 0; } break; case 'YXZ': this._x = Math.asin( - clamp( m23, -1, 1 ) ); if ( Math.abs( m23 ) < 0.9999999 ) { this._y = Math.atan2( m13, m33 ); this._z = Math.atan2( m21, m22 ); } else { this._y = Math.atan2( - m31, m11 ); this._z = 0; } break; case 'ZXY': this._x = Math.asin( clamp( m32, -1, 1 ) ); if ( Math.abs( m32 ) < 0.9999999 ) { this._y = Math.atan2( - m31, m33 ); this._z = Math.atan2( - m12, m22 ); } else { this._y = 0; this._z = Math.atan2( m21, m11 ); } break; case 'ZYX': this._y = Math.asin( - clamp( m31, -1, 1 ) ); if ( Math.abs( m31 ) < 0.9999999 ) { this._x = Math.atan2( m32, m33 ); this._z = Math.atan2( m21, m11 ); } else { this._x = 0; this._z = Math.atan2( - m12, m22 ); } break; case 'YZX': this._z = Math.asin( clamp( m21, -1, 1 ) ); if ( Math.abs( m21 ) < 0.9999999 ) { this._x = Math.atan2( - m23, m22 ); this._y = Math.atan2( - m31, m11 ); } else { this._x = 0; this._y = Math.atan2( m13, m33 ); } break; case 'XZY': this._z = Math.asin( - clamp( m12, -1, 1 ) ); if ( Math.abs( m12 ) < 0.9999999 ) { this._x = Math.atan2( m32, m22 ); this._y = Math.atan2( m13, m11 ); } else { this._x = Math.atan2( - m23, m33 ); this._y = 0; } break; default: console.warn( 'THREE.Euler: .setFromRotationMatrix() encountered an unknown order: ' + order ); } this._order = order; if ( update === true ) this._onChangeCallback(); return this; } /** * Sets the angles of this Euler instance from a normalized quaternion. * * @param {Quaternion} q - A normalized Quaternion. * @param {string} [order] - A string representing the order that the rotations are applied. * @param {boolean} [update=true] - Whether the internal `onChange` callback should be executed or not. * @return {Euler} A reference to this Euler instance. */ setFromQuaternion( q, order, update ) { _matrix$2.makeRotationFromQuaternion( q ); return this.setFromRotationMatrix( _matrix$2, order, update ); } /** * Sets the angles of this Euler instance from the given vector. * * @param {Vector3} v - The vector. * @param {string} [order] - A string representing the order that the rotations are applied. * @return {Euler} A reference to this Euler instance. */ setFromVector3( v, order = this._order ) { return this.set( v.x, v.y, v.z, order ); } /** * Resets the euler angle with a new order by creating a quaternion from this * euler angle and then setting this euler angle with the quaternion and the * new order. * * Warning: This discards revolution information. * * @param {string} [newOrder] - A string representing the new order that the rotations are applied. * @return {Euler} A reference to this Euler instance. */ reorder( newOrder ) { _quaternion$3.setFromEuler( this ); return this.setFromQuaternion( _quaternion$3, newOrder ); } /** * Returns `true` if this Euler instance is equal with the given one. * * @param {Euler} euler - The Euler instance to test for equality. * @return {boolean} Whether this Euler instance is equal with the given one. */ equals( euler ) { return ( euler._x === this._x ) && ( euler._y === this._y ) && ( euler._z === this._z ) && ( euler._order === this._order ); } /** * Sets this Euler instance's components to values from the given array. The first three * entries of the array are assign to the x,y and z components. An optional fourth entry * defines the Euler order. * * @param {Array} array - An array holding the Euler component values. * @return {Euler} A reference to this Euler instance. */ fromArray( array ) { this._x = array[ 0 ]; this._y = array[ 1 ]; this._z = array[ 2 ]; if ( array[ 3 ] !== undefined ) this._order = array[ 3 ]; this._onChangeCallback(); return this; } /** * Writes the components of this Euler instance to the given array. If no array is provided, * the method returns a new instance. * * @param {Array} [array=[]] - The target array holding the Euler components. * @param {number} [offset=0] - Index of the first element in the array. * @return {Array} The Euler components. */ toArray( array = [], offset = 0 ) { array[ offset ] = this._x; array[ offset + 1 ] = this._y; array[ offset + 2 ] = this._z; array[ offset + 3 ] = this._order; return array; } _onChange( callback ) { this._onChangeCallback = callback; return this; } _onChangeCallback() {} *[ Symbol.iterator ]() { yield this._x; yield this._y; yield this._z; yield this._order; } } /** * The default Euler angle order. * * @static * @type {string} * @default 'XYZ' */ Euler.DEFAULT_ORDER = 'XYZ'; /** * A layers object assigns an 3D object to 1 or more of 32 * layers numbered `0` to `31` - internally the layers are stored as a * bit mask], and by default all 3D objects are a member of layer `0`. * * This can be used to control visibility - an object must share a layer with * a camera to be visible when that camera's view is * rendered. * * All classes that inherit from {@link Object3D} have an `layers` property which * is an instance of this class. */ class Layers { /** * Constructs a new layers instance, with membership * initially set to layer `0`. */ constructor() { /** * A bit mask storing which of the 32 layers this layers object is currently * a member of. * * @type {number} */ this.mask = 1 | 0; } /** * Sets membership to the given layer, and remove membership all other layers. * * @param {number} layer - The layer to set. */ set( layer ) { this.mask = ( 1 << layer | 0 ) >>> 0; } /** * Adds membership of the given layer. * * @param {number} layer - The layer to enable. */ enable( layer ) { this.mask |= 1 << layer | 0; } /** * Adds membership to all layers. */ enableAll() { this.mask = 0xffffffff | 0; } /** * Toggles the membership of the given layer. * * @param {number} layer - The layer to toggle. */ toggle( layer ) { this.mask ^= 1 << layer | 0; } /** * Removes membership of the given layer. * * @param {number} layer - The layer to enable. */ disable( layer ) { this.mask &= ~ ( 1 << layer | 0 ); } /** * Removes the membership from all layers. */ disableAll() { this.mask = 0; } /** * Returns `true` if this and the given layers object have at least one * layer in common. * * @param {Layers} layers - The layers to test. * @return {boolean } Whether this and the given layers object have at least one layer in common or not. */ test( layers ) { return ( this.mask & layers.mask ) !== 0; } /** * Returns `true` if the given layer is enabled. * * @param {number} layer - The layer to test. * @return {boolean } Whether the given layer is enabled or not. */ isEnabled( layer ) { return ( this.mask & ( 1 << layer | 0 ) ) !== 0; } } let _object3DId = 0; const _v1$4 = /*@__PURE__*/ new Vector3(); const _q1 = /*@__PURE__*/ new Quaternion(); const _m1$3 = /*@__PURE__*/ new Matrix4(); const _target = /*@__PURE__*/ new Vector3(); const _position$3 = /*@__PURE__*/ new Vector3(); const _scale$2 = /*@__PURE__*/ new Vector3(); const _quaternion$2 = /*@__PURE__*/ new Quaternion(); const _xAxis = /*@__PURE__*/ new Vector3( 1, 0, 0 ); const _yAxis = /*@__PURE__*/ new Vector3( 0, 1, 0 ); const _zAxis = /*@__PURE__*/ new Vector3( 0, 0, 1 ); /** * Fires when the object has been added to its parent object. * * @event Object3D#added * @type {Object} */ const _addedEvent = { type: 'added' }; /** * Fires when the object has been removed from its parent object. * * @event Object3D#removed * @type {Object} */ const _removedEvent = { type: 'removed' }; /** * Fires when a new child object has been added. * * @event Object3D#childadded * @type {Object} */ const _childaddedEvent = { type: 'childadded', child: null }; /** * Fires when a new child object has been added. * * @event Object3D#childremoved * @type {Object} */ const _childremovedEvent = { type: 'childremoved', child: null }; /** * This is the base class for most objects in three.js and provides a set of * properties and methods for manipulating objects in 3D space. * * @augments EventDispatcher */ class Object3D extends EventDispatcher { /** * Constructs a new 3D object. */ constructor() { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isObject3D = true; /** * The ID of the 3D object. * * @name Object3D#id * @type {number} * @readonly */ Object.defineProperty( this, 'id', { value: _object3DId ++ } ); /** * The UUID of the 3D object. * * @type {string} * @readonly */ this.uuid = generateUUID(); /** * The name of the 3D object. * * @type {string} */ this.name = ''; /** * The type property is used for detecting the object type * in context of serialization/deserialization. * * @type {string} * @readonly */ this.type = 'Object3D'; /** * A reference to the parent object. * * @type {?Object3D} * @default null */ this.parent = null; /** * An array holding the child 3D objects of this instance. * * @type {Array} */ this.children = []; /** * Defines the `up` direction of the 3D object which influences * the orientation via methods like {@link Object3D#lookAt}. * * The default values for all 3D objects is defined by `Object3D.DEFAULT_UP`. * * @type {Vector3} */ this.up = Object3D.DEFAULT_UP.clone(); const position = new Vector3(); const rotation = new Euler(); const quaternion = new Quaternion(); const scale = new Vector3( 1, 1, 1 ); function onRotationChange() { quaternion.setFromEuler( rotation, false ); } function onQuaternionChange() { rotation.setFromQuaternion( quaternion, undefined, false ); } rotation._onChange( onRotationChange ); quaternion._onChange( onQuaternionChange ); Object.defineProperties( this, { /** * Represents the object's local position. * * @name Object3D#position * @type {Vector3} * @default (0,0,0) */ position: { configurable: true, enumerable: true, value: position }, /** * Represents the object's local rotation as Euler angles, in radians. * * @name Object3D#rotation * @type {Euler} * @default (0,0,0) */ rotation: { configurable: true, enumerable: true, value: rotation }, /** * Represents the object's local rotation as Quaternions. * * @name Object3D#quaternion * @type {Quaternion} */ quaternion: { configurable: true, enumerable: true, value: quaternion }, /** * Represents the object's local scale. * * @name Object3D#scale * @type {Vector3} * @default (1,1,1) */ scale: { configurable: true, enumerable: true, value: scale }, /** * Represents the object's model-view matrix. * * @name Object3D#modelViewMatrix * @type {Matrix4} */ modelViewMatrix: { value: new Matrix4() }, /** * Represents the object's normal matrix. * * @name Object3D#normalMatrix * @type {Matrix3} */ normalMatrix: { value: new Matrix3() } } ); /** * Represents the object's transformation matrix in local space. * * @type {Matrix4} */ this.matrix = new Matrix4(); /** * Represents the object's transformation matrix in world space. * If the 3D object has no parent, then it's identical to the local transformation matrix * * @type {Matrix4} */ this.matrixWorld = new Matrix4(); /** * When set to `true`, the engine automatically computes the local matrix from position, * rotation and scale every frame. * * The default values for all 3D objects is defined by `Object3D.DEFAULT_MATRIX_AUTO_UPDATE`. * * @type {boolean} * @default true */ this.matrixAutoUpdate = Object3D.DEFAULT_MATRIX_AUTO_UPDATE; /** * When set to `true`, the engine automatically computes the world matrix from the current local * matrix and the object's transformation hierarchy. * * The default values for all 3D objects is defined by `Object3D.DEFAULT_MATRIX_WORLD_AUTO_UPDATE`. * * @type {boolean} * @default true */ this.matrixWorldAutoUpdate = Object3D.DEFAULT_MATRIX_WORLD_AUTO_UPDATE; // checked by the renderer /** * When set to `true`, it calculates the world matrix in that frame and resets this property * to `false`. * * @type {boolean} * @default false */ this.matrixWorldNeedsUpdate = false; /** * The layer membership of the 3D object. The 3D object is only visible if it has * at least one layer in common with the camera in use. This property can also be * used to filter out unwanted objects in ray-intersection tests when using {@link Raycaster}. * * @type {Layers} */ this.layers = new Layers(); /** * When set to `true`, the 3D object gets rendered. * * @type {boolean} * @default true */ this.visible = true; /** * When set to `true`, the 3D object gets rendered into shadow maps. * * @type {boolean} * @default false */ this.castShadow = false; /** * When set to `true`, the 3D object is affected by shadows in the scene. * * @type {boolean} * @default false */ this.receiveShadow = false; /** * When set to `true`, the 3D object is honored by view frustum culling. * * @type {boolean} * @default true */ this.frustumCulled = true; /** * This value allows the default rendering order of scene graph objects to be * overridden although opaque and transparent objects remain sorted independently. * When this property is set for an instance of {@link Group},all descendants * objects will be sorted and rendered together. Sorting is from lowest to highest * render order. * * @type {number} * @default 0 */ this.renderOrder = 0; /** * An array holding the animation clips of the 3D object. * * @type {Array} */ this.animations = []; /** * Custom depth material to be used when rendering to the depth map. Can only be used * in context of meshes. When shadow-casting with a {@link DirectionalLight} or {@link SpotLight}, * if you are modifying vertex positions in the vertex shader you must specify a custom depth * material for proper shadows. * * Only relevant in context of {@link WebGLRenderer}. * * @type {(Material|undefined)} * @default undefined */ this.customDepthMaterial = undefined; /** * Same as {@link Object3D#customDepthMaterial}, but used with {@link PointLight}. * * Only relevant in context of {@link WebGLRenderer}. * * @type {(Material|undefined)} * @default undefined */ this.customDistanceMaterial = undefined; /** * An object that can be used to store custom data about the 3D object. It * should not hold references to functions as these will not be cloned. * * @type {Object} */ this.userData = {}; } /** * A callback that is executed immediately before a 3D object is rendered to a shadow map. * * @param {Renderer|WebGLRenderer} renderer - The renderer. * @param {Object3D} object - The 3D object. * @param {Camera} camera - The camera that is used to render the scene. * @param {Camera} shadowCamera - The shadow camera. * @param {BufferGeometry} geometry - The 3D object's geometry. * @param {Material} depthMaterial - The depth material. * @param {Object} group - The geometry group data. */ onBeforeShadow( /* renderer, object, camera, shadowCamera, geometry, depthMaterial, group */ ) {} /** * A callback that is executed immediately after a 3D object is rendered to a shadow map. * * @param {Renderer|WebGLRenderer} renderer - The renderer. * @param {Object3D} object - The 3D object. * @param {Camera} camera - The camera that is used to render the scene. * @param {Camera} shadowCamera - The shadow camera. * @param {BufferGeometry} geometry - The 3D object's geometry. * @param {Material} depthMaterial - The depth material. * @param {Object} group - The geometry group data. */ onAfterShadow( /* renderer, object, camera, shadowCamera, geometry, depthMaterial, group */ ) {} /** * A callback that is executed immediately before a 3D object is rendered. * * @param {Renderer|WebGLRenderer} renderer - The renderer. * @param {Object3D} object - The 3D object. * @param {Camera} camera - The camera that is used to render the scene. * @param {BufferGeometry} geometry - The 3D object's geometry. * @param {Material} material - The 3D object's material. * @param {Object} group - The geometry group data. */ onBeforeRender( /* renderer, scene, camera, geometry, material, group */ ) {} /** * A callback that is executed immediately after a 3D object is rendered. * * @param {Renderer|WebGLRenderer} renderer - The renderer. * @param {Object3D} object - The 3D object. * @param {Camera} camera - The camera that is used to render the scene. * @param {BufferGeometry} geometry - The 3D object's geometry. * @param {Material} material - The 3D object's material. * @param {Object} group - The geometry group data. */ onAfterRender( /* renderer, scene, camera, geometry, material, group */ ) {} /** * Applies the given transformation matrix to the object and updates the object's position, * rotation and scale. * * @param {Matrix4} matrix - The transformation matrix. */ applyMatrix4( matrix ) { if ( this.matrixAutoUpdate ) this.updateMatrix(); this.matrix.premultiply( matrix ); this.matrix.decompose( this.position, this.quaternion, this.scale ); } /** * Applies a rotation represented by given the quaternion to the 3D object. * * @param {Quaternion} q - The quaternion. * @return {Object3D} A reference to this instance. */ applyQuaternion( q ) { this.quaternion.premultiply( q ); return this; } /** * Sets the given rotation represented as an axis/angle couple to the 3D object. * * @param {Vector3} axis - The (normalized) axis vector. * @param {number} angle - The angle in radians. */ setRotationFromAxisAngle( axis, angle ) { // assumes axis is normalized this.quaternion.setFromAxisAngle( axis, angle ); } /** * Sets the given rotation represented as Euler angles to the 3D object. * * @param {Euler} euler - The Euler angles. */ setRotationFromEuler( euler ) { this.quaternion.setFromEuler( euler, true ); } /** * Sets the given rotation represented as rotation matrix to the 3D object. * * @param {Matrix4} m - Although a 4x4 matrix is expected, the upper 3x3 portion must be * a pure rotation matrix (i.e, unscaled). */ setRotationFromMatrix( m ) { // assumes the upper 3x3 of m is a pure rotation matrix (i.e, unscaled) this.quaternion.setFromRotationMatrix( m ); } /** * Sets the given rotation represented as a Quaternion to the 3D object. * * @param {Quaternion} q - The Quaternion */ setRotationFromQuaternion( q ) { // assumes q is normalized this.quaternion.copy( q ); } /** * Rotates the 3D object along an axis in local space. * * @param {Vector3} axis - The (normalized) axis vector. * @param {number} angle - The angle in radians. * @return {Object3D} A reference to this instance. */ rotateOnAxis( axis, angle ) { // rotate object on axis in object space // axis is assumed to be normalized _q1.setFromAxisAngle( axis, angle ); this.quaternion.multiply( _q1 ); return this; } /** * Rotates the 3D object along an axis in world space. * * @param {Vector3} axis - The (normalized) axis vector. * @param {number} angle - The angle in radians. * @return {Object3D} A reference to this instance. */ rotateOnWorldAxis( axis, angle ) { // rotate object on axis in world space // axis is assumed to be normalized // method assumes no rotated parent _q1.setFromAxisAngle( axis, angle ); this.quaternion.premultiply( _q1 ); return this; } /** * Rotates the 3D object around its X axis in local space. * * @param {number} angle - The angle in radians. * @return {Object3D} A reference to this instance. */ rotateX( angle ) { return this.rotateOnAxis( _xAxis, angle ); } /** * Rotates the 3D object around its Y axis in local space. * * @param {number} angle - The angle in radians. * @return {Object3D} A reference to this instance. */ rotateY( angle ) { return this.rotateOnAxis( _yAxis, angle ); } /** * Rotates the 3D object around its Z axis in local space. * * @param {number} angle - The angle in radians. * @return {Object3D} A reference to this instance. */ rotateZ( angle ) { return this.rotateOnAxis( _zAxis, angle ); } /** * Translate the 3D object by a distance along the given axis in local space. * * @param {Vector3} axis - The (normalized) axis vector. * @param {number} distance - The distance in world units. * @return {Object3D} A reference to this instance. */ translateOnAxis( axis, distance ) { // translate object by distance along axis in object space // axis is assumed to be normalized _v1$4.copy( axis ).applyQuaternion( this.quaternion ); this.position.add( _v1$4.multiplyScalar( distance ) ); return this; } /** * Translate the 3D object by a distance along its X-axis in local space. * * @param {number} distance - The distance in world units. * @return {Object3D} A reference to this instance. */ translateX( distance ) { return this.translateOnAxis( _xAxis, distance ); } /** * Translate the 3D object by a distance along its Y-axis in local space. * * @param {number} distance - The distance in world units. * @return {Object3D} A reference to this instance. */ translateY( distance ) { return this.translateOnAxis( _yAxis, distance ); } /** * Translate the 3D object by a distance along its Z-axis in local space. * * @param {number} distance - The distance in world units. * @return {Object3D} A reference to this instance. */ translateZ( distance ) { return this.translateOnAxis( _zAxis, distance ); } /** * Converts the given vector from this 3D object's local space to world space. * * @param {Vector3} vector - The vector to convert. * @return {Vector3} The converted vector. */ localToWorld( vector ) { this.updateWorldMatrix( true, false ); return vector.applyMatrix4( this.matrixWorld ); } /** * Converts the given vector from this 3D object's word space to local space. * * @param {Vector3} vector - The vector to convert. * @return {Vector3} The converted vector. */ worldToLocal( vector ) { this.updateWorldMatrix( true, false ); return vector.applyMatrix4( _m1$3.copy( this.matrixWorld ).invert() ); } /** * Rotates the object to face a point in world space. * * This method does not support objects having non-uniformly-scaled parent(s). * * @param {number|Vector3} x - The x coordinate in world space. Alternatively, a vector representing a position in world space * @param {number} [y] - The y coordinate in world space. * @param {number} [z] - The z coordinate in world space. */ lookAt( x, y, z ) { // This method does not support objects having non-uniformly-scaled parent(s) if ( x.isVector3 ) { _target.copy( x ); } else { _target.set( x, y, z ); } const parent = this.parent; this.updateWorldMatrix( true, false ); _position$3.setFromMatrixPosition( this.matrixWorld ); if ( this.isCamera || this.isLight ) { _m1$3.lookAt( _position$3, _target, this.up ); } else { _m1$3.lookAt( _target, _position$3, this.up ); } this.quaternion.setFromRotationMatrix( _m1$3 ); if ( parent ) { _m1$3.extractRotation( parent.matrixWorld ); _q1.setFromRotationMatrix( _m1$3 ); this.quaternion.premultiply( _q1.invert() ); } } /** * Adds the given 3D object as a child to this 3D object. An arbitrary number of * objects may be added. Any current parent on an object passed in here will be * removed, since an object can have at most one parent. * * @fires Object3D#added * @fires Object3D#childadded * @param {Object3D} object - The 3D object to add. * @return {Object3D} A reference to this instance. */ add( object ) { if ( arguments.length > 1 ) { for ( let i = 0; i < arguments.length; i ++ ) { this.add( arguments[ i ] ); } return this; } if ( object === this ) { console.error( 'THREE.Object3D.add: object can\'t be added as a child of itself.', object ); return this; } if ( object && object.isObject3D ) { object.removeFromParent(); object.parent = this; this.children.push( object ); object.dispatchEvent( _addedEvent ); _childaddedEvent.child = object; this.dispatchEvent( _childaddedEvent ); _childaddedEvent.child = null; } else { console.error( 'THREE.Object3D.add: object not an instance of THREE.Object3D.', object ); } return this; } /** * Removes the given 3D object as child from this 3D object. * An arbitrary number of objects may be removed. * * @fires Object3D#removed * @fires Object3D#childremoved * @param {Object3D} object - The 3D object to remove. * @return {Object3D} A reference to this instance. */ remove( object ) { if ( arguments.length > 1 ) { for ( let i = 0; i < arguments.length; i ++ ) { this.remove( arguments[ i ] ); } return this; } const index = this.children.indexOf( object ); if ( index !== -1 ) { object.parent = null; this.children.splice( index, 1 ); object.dispatchEvent( _removedEvent ); _childremovedEvent.child = object; this.dispatchEvent( _childremovedEvent ); _childremovedEvent.child = null; } return this; } /** * Removes this 3D object from its current parent. * * @fires Object3D#removed * @fires Object3D#childremoved * @return {Object3D} A reference to this instance. */ removeFromParent() { const parent = this.parent; if ( parent !== null ) { parent.remove( this ); } return this; } /** * Removes all child objects. * * @fires Object3D#removed * @fires Object3D#childremoved * @return {Object3D} A reference to this instance. */ clear() { return this.remove( ... this.children ); } /** * Adds the given 3D object as a child of this 3D object, while maintaining the object's world * transform. This method does not support scene graphs having non-uniformly-scaled nodes(s). * * @fires Object3D#added * @fires Object3D#childadded * @param {Object3D} object - The 3D object to attach. * @return {Object3D} A reference to this instance. */ attach( object ) { // adds object as a child of this, while maintaining the object's world transform // Note: This method does not support scene graphs having non-uniformly-scaled nodes(s) this.updateWorldMatrix( true, false ); _m1$3.copy( this.matrixWorld ).invert(); if ( object.parent !== null ) { object.parent.updateWorldMatrix( true, false ); _m1$3.multiply( object.parent.matrixWorld ); } object.applyMatrix4( _m1$3 ); object.removeFromParent(); object.parent = this; this.children.push( object ); object.updateWorldMatrix( false, true ); object.dispatchEvent( _addedEvent ); _childaddedEvent.child = object; this.dispatchEvent( _childaddedEvent ); _childaddedEvent.child = null; return this; } /** * Searches through the 3D object and its children, starting with the 3D object * itself, and returns the first with a matching ID. * * @param {number} id - The id. * @return {Object3D|undefined} The found 3D object. Returns `undefined` if no 3D object has been found. */ getObjectById( id ) { return this.getObjectByProperty( 'id', id ); } /** * Searches through the 3D object and its children, starting with the 3D object * itself, and returns the first with a matching name. * * @param {string} name - The name. * @return {Object3D|undefined} The found 3D object. Returns `undefined` if no 3D object has been found. */ getObjectByName( name ) { return this.getObjectByProperty( 'name', name ); } /** * Searches through the 3D object and its children, starting with the 3D object * itself, and returns the first with a matching property value. * * @param {string} name - The name of the property. * @param {any} value - The value. * @return {Object3D|undefined} The found 3D object. Returns `undefined` if no 3D object has been found. */ getObjectByProperty( name, value ) { if ( this[ name ] === value ) return this; for ( let i = 0, l = this.children.length; i < l; i ++ ) { const child = this.children[ i ]; const object = child.getObjectByProperty( name, value ); if ( object !== undefined ) { return object; } } return undefined; } /** * Searches through the 3D object and its children, starting with the 3D object * itself, and returns all 3D objects with a matching property value. * * @param {string} name - The name of the property. * @param {any} value - The value. * @param {Array} result - The method stores the result in this array. * @return {Array} The found 3D objects. */ getObjectsByProperty( name, value, result = [] ) { if ( this[ name ] === value ) result.push( this ); const children = this.children; for ( let i = 0, l = children.length; i < l; i ++ ) { children[ i ].getObjectsByProperty( name, value, result ); } return result; } /** * Returns a vector representing the position of the 3D object in world space. * * @param {Vector3} target - The target vector the result is stored to. * @return {Vector3} The 3D object's position in world space. */ getWorldPosition( target ) { this.updateWorldMatrix( true, false ); return target.setFromMatrixPosition( this.matrixWorld ); } /** * Returns a Quaternion representing the position of the 3D object in world space. * * @param {Quaternion} target - The target Quaternion the result is stored to. * @return {Quaternion} The 3D object's rotation in world space. */ getWorldQuaternion( target ) { this.updateWorldMatrix( true, false ); this.matrixWorld.decompose( _position$3, target, _scale$2 ); return target; } /** * Returns a vector representing the scale of the 3D object in world space. * * @param {Vector3} target - The target vector the result is stored to. * @return {Vector3} The 3D object's scale in world space. */ getWorldScale( target ) { this.updateWorldMatrix( true, false ); this.matrixWorld.decompose( _position$3, _quaternion$2, target ); return target; } /** * Returns a vector representing the ("look") direction of the 3D object in world space. * * @param {Vector3} target - The target vector the result is stored to. * @return {Vector3} The 3D object's direction in world space. */ getWorldDirection( target ) { this.updateWorldMatrix( true, false ); const e = this.matrixWorld.elements; return target.set( e[ 8 ], e[ 9 ], e[ 10 ] ).normalize(); } /** * Abstract method to get intersections between a casted ray and this * 3D object. Renderable 3D objects such as {@link Mesh}, {@link Line} or {@link Points} * implement this method in order to use raycasting. * * @abstract * @param {Raycaster} raycaster - The raycaster. * @param {Array} intersects - An array holding the result of the method. */ raycast( /* raycaster, intersects */ ) {} /** * Executes the callback on this 3D object and all descendants. * * Note: Modifying the scene graph inside the callback is discouraged. * * @param {Function} callback - A callback function that allows to process the current 3D object. */ traverse( callback ) { callback( this ); const children = this.children; for ( let i = 0, l = children.length; i < l; i ++ ) { children[ i ].traverse( callback ); } } /** * Like {@link Object3D#traverse}, but the callback will only be executed for visible 3D objects. * Descendants of invisible 3D objects are not traversed. * * Note: Modifying the scene graph inside the callback is discouraged. * * @param {Function} callback - A callback function that allows to process the current 3D object. */ traverseVisible( callback ) { if ( this.visible === false ) return; callback( this ); const children = this.children; for ( let i = 0, l = children.length; i < l; i ++ ) { children[ i ].traverseVisible( callback ); } } /** * Like {@link Object3D#traverse}, but the callback will only be executed for all ancestors. * * Note: Modifying the scene graph inside the callback is discouraged. * * @param {Function} callback - A callback function that allows to process the current 3D object. */ traverseAncestors( callback ) { const parent = this.parent; if ( parent !== null ) { callback( parent ); parent.traverseAncestors( callback ); } } /** * Updates the transformation matrix in local space by computing it from the current * position, rotation and scale values. */ updateMatrix() { this.matrix.compose( this.position, this.quaternion, this.scale ); this.matrixWorldNeedsUpdate = true; } /** * Updates the transformation matrix in world space of this 3D objects and its descendants. * * To ensure correct results, this method also recomputes the 3D object's transformation matrix in * local space. The computation of the local and world matrix can be controlled with the * {@link Object3D#matrixAutoUpdate} and {@link Object3D#matrixWorldAutoUpdate} flags which are both * `true` by default. Set these flags to `false` if you need more control over the update matrix process. * * @param {boolean} [force=false] - When set to `true`, a recomputation of world matrices is forced even * when {@link Object3D#matrixWorldAutoUpdate} is set to `false`. */ updateMatrixWorld( force ) { if ( this.matrixAutoUpdate ) this.updateMatrix(); if ( this.matrixWorldNeedsUpdate || force ) { if ( this.matrixWorldAutoUpdate === true ) { if ( this.parent === null ) { this.matrixWorld.copy( this.matrix ); } else { this.matrixWorld.multiplyMatrices( this.parent.matrixWorld, this.matrix ); } } this.matrixWorldNeedsUpdate = false; force = true; } // make sure descendants are updated if required const children = this.children; for ( let i = 0, l = children.length; i < l; i ++ ) { const child = children[ i ]; child.updateMatrixWorld( force ); } } /** * An alternative version of {@link Object3D#updateMatrixWorld} with more control over the * update of ancestor and descendant nodes. * * @param {boolean} [updateParents=false] Whether ancestor nodes should be updated or not. * @param {boolean} [updateChildren=false] Whether descendant nodes should be updated or not. */ updateWorldMatrix( updateParents, updateChildren ) { const parent = this.parent; if ( updateParents === true && parent !== null ) { parent.updateWorldMatrix( true, false ); } if ( this.matrixAutoUpdate ) this.updateMatrix(); if ( this.matrixWorldAutoUpdate === true ) { if ( this.parent === null ) { this.matrixWorld.copy( this.matrix ); } else { this.matrixWorld.multiplyMatrices( this.parent.matrixWorld, this.matrix ); } } // make sure descendants are updated if ( updateChildren === true ) { const children = this.children; for ( let i = 0, l = children.length; i < l; i ++ ) { const child = children[ i ]; child.updateWorldMatrix( false, true ); } } } /** * Serializes the 3D object into JSON. * * @param {?(Object|string)} meta - An optional value holding meta information about the serialization. * @return {Object} A JSON object representing the serialized 3D object. * @see {@link ObjectLoader#parse} */ toJSON( meta ) { // meta is a string when called from JSON.stringify const isRootObject = ( meta === undefined || typeof meta === 'string' ); const output = {}; // meta is a hash used to collect geometries, materials. // not providing it implies that this is the root object // being serialized. if ( isRootObject ) { // initialize meta obj meta = { geometries: {}, materials: {}, textures: {}, images: {}, shapes: {}, skeletons: {}, animations: {}, nodes: {} }; output.metadata = { version: 4.6, type: 'Object', generator: 'Object3D.toJSON' }; } // standard Object3D serialization const object = {}; object.uuid = this.uuid; object.type = this.type; if ( this.name !== '' ) object.name = this.name; if ( this.castShadow === true ) object.castShadow = true; if ( this.receiveShadow === true ) object.receiveShadow = true; if ( this.visible === false ) object.visible = false; if ( this.frustumCulled === false ) object.frustumCulled = false; if ( this.renderOrder !== 0 ) object.renderOrder = this.renderOrder; if ( Object.keys( this.userData ).length > 0 ) object.userData = this.userData; object.layers = this.layers.mask; object.matrix = this.matrix.toArray(); object.up = this.up.toArray(); if ( this.matrixAutoUpdate === false ) object.matrixAutoUpdate = false; // object specific properties if ( this.isInstancedMesh ) { object.type = 'InstancedMesh'; object.count = this.count; object.instanceMatrix = this.instanceMatrix.toJSON(); if ( this.instanceColor !== null ) object.instanceColor = this.instanceColor.toJSON(); } if ( this.isBatchedMesh ) { object.type = 'BatchedMesh'; object.perObjectFrustumCulled = this.perObjectFrustumCulled; object.sortObjects = this.sortObjects; object.drawRanges = this._drawRanges; object.reservedRanges = this._reservedRanges; object.visibility = this._visibility; object.active = this._active; object.bounds = this._bounds.map( bound => ( { boxInitialized: bound.boxInitialized, boxMin: bound.box.min.toArray(), boxMax: bound.box.max.toArray(), sphereInitialized: bound.sphereInitialized, sphereRadius: bound.sphere.radius, sphereCenter: bound.sphere.center.toArray() } ) ); object.maxInstanceCount = this._maxInstanceCount; object.maxVertexCount = this._maxVertexCount; object.maxIndexCount = this._maxIndexCount; object.geometryInitialized = this._geometryInitialized; object.geometryCount = this._geometryCount; object.matricesTexture = this._matricesTexture.toJSON( meta ); if ( this._colorsTexture !== null ) object.colorsTexture = this._colorsTexture.toJSON( meta ); if ( this.boundingSphere !== null ) { object.boundingSphere = { center: object.boundingSphere.center.toArray(), radius: object.boundingSphere.radius }; } if ( this.boundingBox !== null ) { object.boundingBox = { min: object.boundingBox.min.toArray(), max: object.boundingBox.max.toArray() }; } } // function serialize( library, element ) { if ( library[ element.uuid ] === undefined ) { library[ element.uuid ] = element.toJSON( meta ); } return element.uuid; } if ( this.isScene ) { if ( this.background ) { if ( this.background.isColor ) { object.background = this.background.toJSON(); } else if ( this.background.isTexture ) { object.background = this.background.toJSON( meta ).uuid; } } if ( this.environment && this.environment.isTexture && this.environment.isRenderTargetTexture !== true ) { object.environment = this.environment.toJSON( meta ).uuid; } } else if ( this.isMesh || this.isLine || this.isPoints ) { object.geometry = serialize( meta.geometries, this.geometry ); const parameters = this.geometry.parameters; if ( parameters !== undefined && parameters.shapes !== undefined ) { const shapes = parameters.shapes; if ( Array.isArray( shapes ) ) { for ( let i = 0, l = shapes.length; i < l; i ++ ) { const shape = shapes[ i ]; serialize( meta.shapes, shape ); } } else { serialize( meta.shapes, shapes ); } } } if ( this.isSkinnedMesh ) { object.bindMode = this.bindMode; object.bindMatrix = this.bindMatrix.toArray(); if ( this.skeleton !== undefined ) { serialize( meta.skeletons, this.skeleton ); object.skeleton = this.skeleton.uuid; } } if ( this.material !== undefined ) { if ( Array.isArray( this.material ) ) { const uuids = []; for ( let i = 0, l = this.material.length; i < l; i ++ ) { uuids.push( serialize( meta.materials, this.material[ i ] ) ); } object.material = uuids; } else { object.material = serialize( meta.materials, this.material ); } } // if ( this.children.length > 0 ) { object.children = []; for ( let i = 0; i < this.children.length; i ++ ) { object.children.push( this.children[ i ].toJSON( meta ).object ); } } // if ( this.animations.length > 0 ) { object.animations = []; for ( let i = 0; i < this.animations.length; i ++ ) { const animation = this.animations[ i ]; object.animations.push( serialize( meta.animations, animation ) ); } } if ( isRootObject ) { const geometries = extractFromCache( meta.geometries ); const materials = extractFromCache( meta.materials ); const textures = extractFromCache( meta.textures ); const images = extractFromCache( meta.images ); const shapes = extractFromCache( meta.shapes ); const skeletons = extractFromCache( meta.skeletons ); const animations = extractFromCache( meta.animations ); const nodes = extractFromCache( meta.nodes ); if ( geometries.length > 0 ) output.geometries = geometries; if ( materials.length > 0 ) output.materials = materials; if ( textures.length > 0 ) output.textures = textures; if ( images.length > 0 ) output.images = images; if ( shapes.length > 0 ) output.shapes = shapes; if ( skeletons.length > 0 ) output.skeletons = skeletons; if ( animations.length > 0 ) output.animations = animations; if ( nodes.length > 0 ) output.nodes = nodes; } output.object = object; return output; // extract data from the cache hash // remove metadata on each item // and return as array function extractFromCache( cache ) { const values = []; for ( const key in cache ) { const data = cache[ key ]; delete data.metadata; values.push( data ); } return values; } } /** * Returns a new 3D object with copied values from this instance. * * @param {boolean} [recursive=true] - When set to `true`, descendants of the 3D object are also cloned. * @return {Object3D} A clone of this instance. */ clone( recursive ) { return new this.constructor().copy( this, recursive ); } /** * Copies the values of the given 3D object to this instance. * * @param {Object3D} source - The 3D object to copy. * @param {boolean} [recursive=true] - When set to `true`, descendants of the 3D object are cloned. * @return {Object3D} A reference to this instance. */ copy( source, recursive = true ) { this.name = source.name; this.up.copy( source.up ); this.position.copy( source.position ); this.rotation.order = source.rotation.order; this.quaternion.copy( source.quaternion ); this.scale.copy( source.scale ); this.matrix.copy( source.matrix ); this.matrixWorld.copy( source.matrixWorld ); this.matrixAutoUpdate = source.matrixAutoUpdate; this.matrixWorldAutoUpdate = source.matrixWorldAutoUpdate; this.matrixWorldNeedsUpdate = source.matrixWorldNeedsUpdate; this.layers.mask = source.layers.mask; this.visible = source.visible; this.castShadow = source.castShadow; this.receiveShadow = source.receiveShadow; this.frustumCulled = source.frustumCulled; this.renderOrder = source.renderOrder; this.animations = source.animations.slice(); this.userData = JSON.parse( JSON.stringify( source.userData ) ); if ( recursive === true ) { for ( let i = 0; i < source.children.length; i ++ ) { const child = source.children[ i ]; this.add( child.clone() ); } } return this; } } /** * The default up direction for objects, also used as the default * position for {@link DirectionalLight} and {@link HemisphereLight}. * * @static * @type {Vector3} * @default (0,1,0) */ Object3D.DEFAULT_UP = /*@__PURE__*/ new Vector3( 0, 1, 0 ); /** * The default setting for {@link Object3D#matrixAutoUpdate} for * newly created 3D objects. * * @static * @type {boolean} * @default true */ Object3D.DEFAULT_MATRIX_AUTO_UPDATE = true; /** * The default setting for {@link Object3D#matrixWorldAutoUpdate} for * newly created 3D objects. * * @static * @type {boolean} * @default true */ Object3D.DEFAULT_MATRIX_WORLD_AUTO_UPDATE = true; const _v0$2 = /*@__PURE__*/ new Vector3(); const _v1$3 = /*@__PURE__*/ new Vector3(); const _v2$2 = /*@__PURE__*/ new Vector3(); const _v3$2 = /*@__PURE__*/ new Vector3(); const _vab = /*@__PURE__*/ new Vector3(); const _vac = /*@__PURE__*/ new Vector3(); const _vbc = /*@__PURE__*/ new Vector3(); const _vap = /*@__PURE__*/ new Vector3(); const _vbp = /*@__PURE__*/ new Vector3(); const _vcp = /*@__PURE__*/ new Vector3(); const _v40 = /*@__PURE__*/ new Vector4(); const _v41 = /*@__PURE__*/ new Vector4(); const _v42 = /*@__PURE__*/ new Vector4(); /** * A geometric triangle as defined by three vectors representing its three corners. */ class Triangle { /** * Constructs a new triangle. * * @param {Vector3} [a=(0,0,0)] - The first corner of the triangle. * @param {Vector3} [b=(0,0,0)] - The second corner of the triangle. * @param {Vector3} [c=(0,0,0)] - The third corner of the triangle. */ constructor( a = new Vector3(), b = new Vector3(), c = new Vector3() ) { /** * The first corner of the triangle. * * @type {Vector3} */ this.a = a; /** * The second corner of the triangle. * * @type {Vector3} */ this.b = b; /** * The third corner of the triangle. * * @type {Vector3} */ this.c = c; } /** * Computes the normal vector of a triangle. * * @param {Vector3} a - The first corner of the triangle. * @param {Vector3} b - The second corner of the triangle. * @param {Vector3} c - The third corner of the triangle. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} The triangle's normal. */ static getNormal( a, b, c, target ) { target.subVectors( c, b ); _v0$2.subVectors( a, b ); target.cross( _v0$2 ); const targetLengthSq = target.lengthSq(); if ( targetLengthSq > 0 ) { return target.multiplyScalar( 1 / Math.sqrt( targetLengthSq ) ); } return target.set( 0, 0, 0 ); } /** * Computes a barycentric coordinates from the given vector. * Returns `null` if the triangle is degenerate. * * @param {Vector3} point - A point in 3D space. * @param {Vector3} a - The first corner of the triangle. * @param {Vector3} b - The second corner of the triangle. * @param {Vector3} c - The third corner of the triangle. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {?Vector3} The barycentric coordinates for the given point */ static getBarycoord( point, a, b, c, target ) { // based on: http://www.blackpawn.com/texts/pointinpoly/default.html _v0$2.subVectors( c, a ); _v1$3.subVectors( b, a ); _v2$2.subVectors( point, a ); const dot00 = _v0$2.dot( _v0$2 ); const dot01 = _v0$2.dot( _v1$3 ); const dot02 = _v0$2.dot( _v2$2 ); const dot11 = _v1$3.dot( _v1$3 ); const dot12 = _v1$3.dot( _v2$2 ); const denom = ( dot00 * dot11 - dot01 * dot01 ); // collinear or singular triangle if ( denom === 0 ) { target.set( 0, 0, 0 ); return null; } const invDenom = 1 / denom; const u = ( dot11 * dot02 - dot01 * dot12 ) * invDenom; const v = ( dot00 * dot12 - dot01 * dot02 ) * invDenom; // barycentric coordinates must always sum to 1 return target.set( 1 - u - v, v, u ); } /** * Returns `true` if the given point, when projected onto the plane of the * triangle, lies within the triangle. * * @param {Vector3} point - The point in 3D space to test. * @param {Vector3} a - The first corner of the triangle. * @param {Vector3} b - The second corner of the triangle. * @param {Vector3} c - The third corner of the triangle. * @return {boolean} Whether the given point, when projected onto the plane of the * triangle, lies within the triangle or not. */ static containsPoint( point, a, b, c ) { // if the triangle is degenerate then we can't contain a point if ( this.getBarycoord( point, a, b, c, _v3$2 ) === null ) { return false; } return ( _v3$2.x >= 0 ) && ( _v3$2.y >= 0 ) && ( ( _v3$2.x + _v3$2.y ) <= 1 ); } /** * Computes the value barycentrically interpolated for the given point on the * triangle. Returns `null` if the triangle is degenerate. * * @param {Vector3} point - Position of interpolated point. * @param {Vector3} p1 - The first corner of the triangle. * @param {Vector3} p2 - The second corner of the triangle. * @param {Vector3} p3 - The third corner of the triangle. * @param {Vector3} v1 - Value to interpolate of first vertex. * @param {Vector3} v2 - Value to interpolate of second vertex. * @param {Vector3} v3 - Value to interpolate of third vertex. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {?Vector3} The interpolated value. */ static getInterpolation( point, p1, p2, p3, v1, v2, v3, target ) { if ( this.getBarycoord( point, p1, p2, p3, _v3$2 ) === null ) { target.x = 0; target.y = 0; if ( 'z' in target ) target.z = 0; if ( 'w' in target ) target.w = 0; return null; } target.setScalar( 0 ); target.addScaledVector( v1, _v3$2.x ); target.addScaledVector( v2, _v3$2.y ); target.addScaledVector( v3, _v3$2.z ); return target; } /** * Computes the value barycentrically interpolated for the given attribute and indices. * * @param {BufferAttribute} attr - The attribute to interpolate. * @param {number} i1 - Index of first vertex. * @param {number} i2 - Index of second vertex. * @param {number} i3 - Index of third vertex. * @param {Vector3} barycoord - The barycoordinate value to use to interpolate. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} The interpolated attribute value. */ static getInterpolatedAttribute( attr, i1, i2, i3, barycoord, target ) { _v40.setScalar( 0 ); _v41.setScalar( 0 ); _v42.setScalar( 0 ); _v40.fromBufferAttribute( attr, i1 ); _v41.fromBufferAttribute( attr, i2 ); _v42.fromBufferAttribute( attr, i3 ); target.setScalar( 0 ); target.addScaledVector( _v40, barycoord.x ); target.addScaledVector( _v41, barycoord.y ); target.addScaledVector( _v42, barycoord.z ); return target; } /** * Returns `true` if the triangle is oriented towards the given direction. * * @param {Vector3} a - The first corner of the triangle. * @param {Vector3} b - The second corner of the triangle. * @param {Vector3} c - The third corner of the triangle. * @param {Vector3} direction - The (normalized) direction vector. * @return {boolean} Whether the triangle is oriented towards the given direction or not. */ static isFrontFacing( a, b, c, direction ) { _v0$2.subVectors( c, b ); _v1$3.subVectors( a, b ); // strictly front facing return ( _v0$2.cross( _v1$3 ).dot( direction ) < 0 ) ? true : false; } /** * Sets the triangle's vertices by copying the given values. * * @param {Vector3} a - The first corner of the triangle. * @param {Vector3} b - The second corner of the triangle. * @param {Vector3} c - The third corner of the triangle. * @return {Triangle} A reference to this triangle. */ set( a, b, c ) { this.a.copy( a ); this.b.copy( b ); this.c.copy( c ); return this; } /** * Sets the triangle's vertices by copying the given array values. * * @param {Array} points - An array with 3D points. * @param {number} i0 - The array index representing the first corner of the triangle. * @param {number} i1 - The array index representing the second corner of the triangle. * @param {number} i2 - The array index representing the third corner of the triangle. * @return {Triangle} A reference to this triangle. */ setFromPointsAndIndices( points, i0, i1, i2 ) { this.a.copy( points[ i0 ] ); this.b.copy( points[ i1 ] ); this.c.copy( points[ i2 ] ); return this; } /** * Sets the triangle's vertices by copying the given attribute values. * * @param {BufferAttribute} attribute - A buffer attribute with 3D points data. * @param {number} i0 - The attribute index representing the first corner of the triangle. * @param {number} i1 - The attribute index representing the second corner of the triangle. * @param {number} i2 - The attribute index representing the third corner of the triangle. * @return {Triangle} A reference to this triangle. */ setFromAttributeAndIndices( attribute, i0, i1, i2 ) { this.a.fromBufferAttribute( attribute, i0 ); this.b.fromBufferAttribute( attribute, i1 ); this.c.fromBufferAttribute( attribute, i2 ); return this; } /** * Returns a new triangle with copied values from this instance. * * @return {Triangle} A clone of this instance. */ clone() { return new this.constructor().copy( this ); } /** * Copies the values of the given triangle to this instance. * * @param {Triangle} triangle - The triangle to copy. * @return {Triangle} A reference to this triangle. */ copy( triangle ) { this.a.copy( triangle.a ); this.b.copy( triangle.b ); this.c.copy( triangle.c ); return this; } /** * Computes the area of the triangle. * * @return {number} The triangle's area. */ getArea() { _v0$2.subVectors( this.c, this.b ); _v1$3.subVectors( this.a, this.b ); return _v0$2.cross( _v1$3 ).length() * 0.5; } /** * Computes the midpoint of the triangle. * * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} The triangle's midpoint. */ getMidpoint( target ) { return target.addVectors( this.a, this.b ).add( this.c ).multiplyScalar( 1 / 3 ); } /** * Computes the normal of the triangle. * * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} The triangle's normal. */ getNormal( target ) { return Triangle.getNormal( this.a, this.b, this.c, target ); } /** * Computes a plane the triangle lies within. * * @param {Plane} target - The target vector that is used to store the method's result. * @return {Plane} The plane the triangle lies within. */ getPlane( target ) { return target.setFromCoplanarPoints( this.a, this.b, this.c ); } /** * Computes a barycentric coordinates from the given vector. * Returns `null` if the triangle is degenerate. * * @param {Vector3} point - A point in 3D space. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {?Vector3} The barycentric coordinates for the given point */ getBarycoord( point, target ) { return Triangle.getBarycoord( point, this.a, this.b, this.c, target ); } /** * Computes the value barycentrically interpolated for the given point on the * triangle. Returns `null` if the triangle is degenerate. * * @param {Vector3} point - Position of interpolated point. * @param {Vector3} v1 - Value to interpolate of first vertex. * @param {Vector3} v2 - Value to interpolate of second vertex. * @param {Vector3} v3 - Value to interpolate of third vertex. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {?Vector3} The interpolated value. */ getInterpolation( point, v1, v2, v3, target ) { return Triangle.getInterpolation( point, this.a, this.b, this.c, v1, v2, v3, target ); } /** * Returns `true` if the given point, when projected onto the plane of the * triangle, lies within the triangle. * * @param {Vector3} point - The point in 3D space to test. * @return {boolean} Whether the given point, when projected onto the plane of the * triangle, lies within the triangle or not. */ containsPoint( point ) { return Triangle.containsPoint( point, this.a, this.b, this.c ); } /** * Returns `true` if the triangle is oriented towards the given direction. * * @param {Vector3} direction - The (normalized) direction vector. * @return {boolean} Whether the triangle is oriented towards the given direction or not. */ isFrontFacing( direction ) { return Triangle.isFrontFacing( this.a, this.b, this.c, direction ); } /** * Returns `true` if this triangle intersects with the given box. * * @param {Box3} box - The box to intersect. * @return {boolean} Whether this triangle intersects with the given box or not. */ intersectsBox( box ) { return box.intersectsTriangle( this ); } /** * Returns the closest point on the triangle to the given point. * * @param {Vector3} p - The point to compute the closest point for. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} The closest point on the triangle. */ closestPointToPoint( p, target ) { const a = this.a, b = this.b, c = this.c; let v, w; // algorithm thanks to Real-Time Collision Detection by Christer Ericson, // published by Morgan Kaufmann Publishers, (c) 2005 Elsevier Inc., // under the accompanying license; see chapter 5.1.5 for detailed explanation. // basically, we're distinguishing which of the voronoi regions of the triangle // the point lies in with the minimum amount of redundant computation. _vab.subVectors( b, a ); _vac.subVectors( c, a ); _vap.subVectors( p, a ); const d1 = _vab.dot( _vap ); const d2 = _vac.dot( _vap ); if ( d1 <= 0 && d2 <= 0 ) { // vertex region of A; barycentric coords (1, 0, 0) return target.copy( a ); } _vbp.subVectors( p, b ); const d3 = _vab.dot( _vbp ); const d4 = _vac.dot( _vbp ); if ( d3 >= 0 && d4 <= d3 ) { // vertex region of B; barycentric coords (0, 1, 0) return target.copy( b ); } const vc = d1 * d4 - d3 * d2; if ( vc <= 0 && d1 >= 0 && d3 <= 0 ) { v = d1 / ( d1 - d3 ); // edge region of AB; barycentric coords (1-v, v, 0) return target.copy( a ).addScaledVector( _vab, v ); } _vcp.subVectors( p, c ); const d5 = _vab.dot( _vcp ); const d6 = _vac.dot( _vcp ); if ( d6 >= 0 && d5 <= d6 ) { // vertex region of C; barycentric coords (0, 0, 1) return target.copy( c ); } const vb = d5 * d2 - d1 * d6; if ( vb <= 0 && d2 >= 0 && d6 <= 0 ) { w = d2 / ( d2 - d6 ); // edge region of AC; barycentric coords (1-w, 0, w) return target.copy( a ).addScaledVector( _vac, w ); } const va = d3 * d6 - d5 * d4; if ( va <= 0 && ( d4 - d3 ) >= 0 && ( d5 - d6 ) >= 0 ) { _vbc.subVectors( c, b ); w = ( d4 - d3 ) / ( ( d4 - d3 ) + ( d5 - d6 ) ); // edge region of BC; barycentric coords (0, 1-w, w) return target.copy( b ).addScaledVector( _vbc, w ); // edge region of BC } // face region const denom = 1 / ( va + vb + vc ); // u = va * denom v = vb * denom; w = vc * denom; return target.copy( a ).addScaledVector( _vab, v ).addScaledVector( _vac, w ); } /** * Returns `true` if this triangle is equal with the given one. * * @param {Triangle} triangle - The triangle to test for equality. * @return {boolean} Whether this triangle is equal with the given one. */ equals( triangle ) { return triangle.a.equals( this.a ) && triangle.b.equals( this.b ) && triangle.c.equals( this.c ); } } const _colorKeywords = { 'aliceblue': 0xF0F8FF, 'antiquewhite': 0xFAEBD7, 'aqua': 0x00FFFF, 'aquamarine': 0x7FFFD4, 'azure': 0xF0FFFF, 'beige': 0xF5F5DC, 'bisque': 0xFFE4C4, 'black': 0x000000, 'blanchedalmond': 0xFFEBCD, 'blue': 0x0000FF, 'blueviolet': 0x8A2BE2, 'brown': 0xA52A2A, 'burlywood': 0xDEB887, 'cadetblue': 0x5F9EA0, 'chartreuse': 0x7FFF00, 'chocolate': 0xD2691E, 'coral': 0xFF7F50, 'cornflowerblue': 0x6495ED, 'cornsilk': 0xFFF8DC, 'crimson': 0xDC143C, 'cyan': 0x00FFFF, 'darkblue': 0x00008B, 'darkcyan': 0x008B8B, 'darkgoldenrod': 0xB8860B, 'darkgray': 0xA9A9A9, 'darkgreen': 0x006400, 'darkgrey': 0xA9A9A9, 'darkkhaki': 0xBDB76B, 'darkmagenta': 0x8B008B, 'darkolivegreen': 0x556B2F, 'darkorange': 0xFF8C00, 'darkorchid': 0x9932CC, 'darkred': 0x8B0000, 'darksalmon': 0xE9967A, 'darkseagreen': 0x8FBC8F, 'darkslateblue': 0x483D8B, 'darkslategray': 0x2F4F4F, 'darkslategrey': 0x2F4F4F, 'darkturquoise': 0x00CED1, 'darkviolet': 0x9400D3, 'deeppink': 0xFF1493, 'deepskyblue': 0x00BFFF, 'dimgray': 0x696969, 'dimgrey': 0x696969, 'dodgerblue': 0x1E90FF, 'firebrick': 0xB22222, 'floralwhite': 0xFFFAF0, 'forestgreen': 0x228B22, 'fuchsia': 0xFF00FF, 'gainsboro': 0xDCDCDC, 'ghostwhite': 0xF8F8FF, 'gold': 0xFFD700, 'goldenrod': 0xDAA520, 'gray': 0x808080, 'green': 0x008000, 'greenyellow': 0xADFF2F, 'grey': 0x808080, 'honeydew': 0xF0FFF0, 'hotpink': 0xFF69B4, 'indianred': 0xCD5C5C, 'indigo': 0x4B0082, 'ivory': 0xFFFFF0, 'khaki': 0xF0E68C, 'lavender': 0xE6E6FA, 'lavenderblush': 0xFFF0F5, 'lawngreen': 0x7CFC00, 'lemonchiffon': 0xFFFACD, 'lightblue': 0xADD8E6, 'lightcoral': 0xF08080, 'lightcyan': 0xE0FFFF, 'lightgoldenrodyellow': 0xFAFAD2, 'lightgray': 0xD3D3D3, 'lightgreen': 0x90EE90, 'lightgrey': 0xD3D3D3, 'lightpink': 0xFFB6C1, 'lightsalmon': 0xFFA07A, 'lightseagreen': 0x20B2AA, 'lightskyblue': 0x87CEFA, 'lightslategray': 0x778899, 'lightslategrey': 0x778899, 'lightsteelblue': 0xB0C4DE, 'lightyellow': 0xFFFFE0, 'lime': 0x00FF00, 'limegreen': 0x32CD32, 'linen': 0xFAF0E6, 'magenta': 0xFF00FF, 'maroon': 0x800000, 'mediumaquamarine': 0x66CDAA, 'mediumblue': 0x0000CD, 'mediumorchid': 0xBA55D3, 'mediumpurple': 0x9370DB, 'mediumseagreen': 0x3CB371, 'mediumslateblue': 0x7B68EE, 'mediumspringgreen': 0x00FA9A, 'mediumturquoise': 0x48D1CC, 'mediumvioletred': 0xC71585, 'midnightblue': 0x191970, 'mintcream': 0xF5FFFA, 'mistyrose': 0xFFE4E1, 'moccasin': 0xFFE4B5, 'navajowhite': 0xFFDEAD, 'navy': 0x000080, 'oldlace': 0xFDF5E6, 'olive': 0x808000, 'olivedrab': 0x6B8E23, 'orange': 0xFFA500, 'orangered': 0xFF4500, 'orchid': 0xDA70D6, 'palegoldenrod': 0xEEE8AA, 'palegreen': 0x98FB98, 'paleturquoise': 0xAFEEEE, 'palevioletred': 0xDB7093, 'papayawhip': 0xFFEFD5, 'peachpuff': 0xFFDAB9, 'peru': 0xCD853F, 'pink': 0xFFC0CB, 'plum': 0xDDA0DD, 'powderblue': 0xB0E0E6, 'purple': 0x800080, 'rebeccapurple': 0x663399, 'red': 0xFF0000, 'rosybrown': 0xBC8F8F, 'royalblue': 0x4169E1, 'saddlebrown': 0x8B4513, 'salmon': 0xFA8072, 'sandybrown': 0xF4A460, 'seagreen': 0x2E8B57, 'seashell': 0xFFF5EE, 'sienna': 0xA0522D, 'silver': 0xC0C0C0, 'skyblue': 0x87CEEB, 'slateblue': 0x6A5ACD, 'slategray': 0x708090, 'slategrey': 0x708090, 'snow': 0xFFFAFA, 'springgreen': 0x00FF7F, 'steelblue': 0x4682B4, 'tan': 0xD2B48C, 'teal': 0x008080, 'thistle': 0xD8BFD8, 'tomato': 0xFF6347, 'turquoise': 0x40E0D0, 'violet': 0xEE82EE, 'wheat': 0xF5DEB3, 'white': 0xFFFFFF, 'whitesmoke': 0xF5F5F5, 'yellow': 0xFFFF00, 'yellowgreen': 0x9ACD32 }; const _hslA = { h: 0, s: 0, l: 0 }; const _hslB = { h: 0, s: 0, l: 0 }; function hue2rgb( p, q, t ) { if ( t < 0 ) t += 1; if ( t > 1 ) t -= 1; if ( t < 1 / 6 ) return p + ( q - p ) * 6 * t; if ( t < 1 / 2 ) return q; if ( t < 2 / 3 ) return p + ( q - p ) * 6 * ( 2 / 3 - t ); return p; } /** * A Color instance is represented by RGB components in the linear working * color space, which defaults to `LinearSRGBColorSpace`. Inputs * conventionally using `SRGBColorSpace` (such as hexadecimals and CSS * strings) are converted to the working color space automatically. * * ```js * // converted automatically from SRGBColorSpace to LinearSRGBColorSpace * const color = new THREE.Color().setHex( 0x112233 ); * ``` * Source color spaces may be specified explicitly, to ensure correct conversions. * ```js * // assumed already LinearSRGBColorSpace; no conversion * const color = new THREE.Color().setRGB( 0.5, 0.5, 0.5 ); * * // converted explicitly from SRGBColorSpace to LinearSRGBColorSpace * const color = new THREE.Color().setRGB( 0.5, 0.5, 0.5, SRGBColorSpace ); * ``` * If THREE.ColorManagement is disabled, no conversions occur. For details, * see Color management. Iterating through a Color instance will yield * its components (r, g, b) in the corresponding order. A Color can be initialised * in any of the following ways: * ```js * //empty constructor - will default white * const color1 = new THREE.Color(); * * //Hexadecimal color (recommended) * const color2 = new THREE.Color( 0xff0000 ); * * //RGB string * const color3 = new THREE.Color("rgb(255, 0, 0)"); * const color4 = new THREE.Color("rgb(100%, 0%, 0%)"); * * //X11 color name - all 140 color names are supported. * //Note the lack of CamelCase in the name * const color5 = new THREE.Color( 'skyblue' ); * //HSL string * const color6 = new THREE.Color("hsl(0, 100%, 50%)"); * * //Separate RGB values between 0 and 1 * const color7 = new THREE.Color( 1, 0, 0 ); * ``` */ class Color { /** * Constructs a new color. * * Note that standard method of specifying color in three.js is with a hexadecimal triplet, * and that method is used throughout the rest of the documentation. * * @param {(number|string|Color)} [r] - The red component of the color. If `g` and `b` are * not provided, it can be hexadecimal triplet, a CSS-style string or another `Color` instance. * @param {number} [g] - The green component. * @param {number} [b] - The blue component. */ constructor( r, g, b ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isColor = true; /** * The red component. * * @type {number} * @default 1 */ this.r = 1; /** * The green component. * * @type {number} * @default 1 */ this.g = 1; /** * The blue component. * * @type {number} * @default 1 */ this.b = 1; return this.set( r, g, b ); } /** * Sets the colors's components from the given values. * * @param {(number|string|Color)} [r] - The red component of the color. If `g` and `b` are * not provided, it can be hexadecimal triplet, a CSS-style string or another `Color` instance. * @param {number} [g] - The green component. * @param {number} [b] - The blue component. * @return {Color} A reference to this color. */ set( r, g, b ) { if ( g === undefined && b === undefined ) { // r is THREE.Color, hex or string const value = r; if ( value && value.isColor ) { this.copy( value ); } else if ( typeof value === 'number' ) { this.setHex( value ); } else if ( typeof value === 'string' ) { this.setStyle( value ); } } else { this.setRGB( r, g, b ); } return this; } /** * Sets the colors's components to the given scalar value. * * @param {number} scalar - The scalar value. * @return {Color} A reference to this color. */ setScalar( scalar ) { this.r = scalar; this.g = scalar; this.b = scalar; return this; } /** * Sets this color from a hexadecimal value. * * @param {number} hex - The hexadecimal value. * @param {string} [colorSpace=SRGBColorSpace] - The color space. * @return {Color} A reference to this color. */ setHex( hex, colorSpace = SRGBColorSpace ) { hex = Math.floor( hex ); this.r = ( hex >> 16 & 255 ) / 255; this.g = ( hex >> 8 & 255 ) / 255; this.b = ( hex & 255 ) / 255; ColorManagement.toWorkingColorSpace( this, colorSpace ); return this; } /** * Sets this color from RGB values. * * @param {number} r - Red channel value between `0.0` and `1.0`. * @param {number} g - Green channel value between `0.0` and `1.0`. * @param {number} b - Blue channel value between `0.0` and `1.0`. * @param {string} [colorSpace=ColorManagement.workingColorSpace] - The color space. * @return {Color} A reference to this color. */ setRGB( r, g, b, colorSpace = ColorManagement.workingColorSpace ) { this.r = r; this.g = g; this.b = b; ColorManagement.toWorkingColorSpace( this, colorSpace ); return this; } /** * Sets this color from RGB values. * * @param {number} h - Hue value between `0.0` and `1.0`. * @param {number} s - Saturation value between `0.0` and `1.0`. * @param {number} l - Lightness value between `0.0` and `1.0`. * @param {string} [colorSpace=ColorManagement.workingColorSpace] - The color space. * @return {Color} A reference to this color. */ setHSL( h, s, l, colorSpace = ColorManagement.workingColorSpace ) { // h,s,l ranges are in 0.0 - 1.0 h = euclideanModulo( h, 1 ); s = clamp( s, 0, 1 ); l = clamp( l, 0, 1 ); if ( s === 0 ) { this.r = this.g = this.b = l; } else { const p = l <= 0.5 ? l * ( 1 + s ) : l + s - ( l * s ); const q = ( 2 * l ) - p; this.r = hue2rgb( q, p, h + 1 / 3 ); this.g = hue2rgb( q, p, h ); this.b = hue2rgb( q, p, h - 1 / 3 ); } ColorManagement.toWorkingColorSpace( this, colorSpace ); return this; } /** * Sets this color from a CSS-style string. For example, `rgb(250, 0,0)`, * `rgb(100%, 0%, 0%)`, `hsl(0, 100%, 50%)`, `#ff0000`, `#f00`, or `red` ( or * any [X11 color name]{@link https://en.wikipedia.org/wiki/X11_color_names#Color_name_chart} - * all 140 color names are supported). * * @param {string} style - Color as a CSS-style string. * @param {string} [colorSpace=SRGBColorSpace] - The color space. * @return {Color} A reference to this color. */ setStyle( style, colorSpace = SRGBColorSpace ) { function handleAlpha( string ) { if ( string === undefined ) return; if ( parseFloat( string ) < 1 ) { console.warn( 'THREE.Color: Alpha component of ' + style + ' will be ignored.' ); } } let m; if ( m = /^(\w+)\(([^\)]*)\)/.exec( style ) ) { // rgb / hsl let color; const name = m[ 1 ]; const components = m[ 2 ]; switch ( name ) { case 'rgb': case 'rgba': if ( color = /^\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*(?:,\s*(\d*\.?\d+)\s*)?$/.exec( components ) ) { // rgb(255,0,0) rgba(255,0,0,0.5) handleAlpha( color[ 4 ] ); return this.setRGB( Math.min( 255, parseInt( color[ 1 ], 10 ) ) / 255, Math.min( 255, parseInt( color[ 2 ], 10 ) ) / 255, Math.min( 255, parseInt( color[ 3 ], 10 ) ) / 255, colorSpace ); } if ( color = /^\s*(\d+)\%\s*,\s*(\d+)\%\s*,\s*(\d+)\%\s*(?:,\s*(\d*\.?\d+)\s*)?$/.exec( components ) ) { // rgb(100%,0%,0%) rgba(100%,0%,0%,0.5) handleAlpha( color[ 4 ] ); return this.setRGB( Math.min( 100, parseInt( color[ 1 ], 10 ) ) / 100, Math.min( 100, parseInt( color[ 2 ], 10 ) ) / 100, Math.min( 100, parseInt( color[ 3 ], 10 ) ) / 100, colorSpace ); } break; case 'hsl': case 'hsla': if ( color = /^\s*(\d*\.?\d+)\s*,\s*(\d*\.?\d+)\%\s*,\s*(\d*\.?\d+)\%\s*(?:,\s*(\d*\.?\d+)\s*)?$/.exec( components ) ) { // hsl(120,50%,50%) hsla(120,50%,50%,0.5) handleAlpha( color[ 4 ] ); return this.setHSL( parseFloat( color[ 1 ] ) / 360, parseFloat( color[ 2 ] ) / 100, parseFloat( color[ 3 ] ) / 100, colorSpace ); } break; default: console.warn( 'THREE.Color: Unknown color model ' + style ); } } else if ( m = /^\#([A-Fa-f\d]+)$/.exec( style ) ) { // hex color const hex = m[ 1 ]; const size = hex.length; if ( size === 3 ) { // #ff0 return this.setRGB( parseInt( hex.charAt( 0 ), 16 ) / 15, parseInt( hex.charAt( 1 ), 16 ) / 15, parseInt( hex.charAt( 2 ), 16 ) / 15, colorSpace ); } else if ( size === 6 ) { // #ff0000 return this.setHex( parseInt( hex, 16 ), colorSpace ); } else { console.warn( 'THREE.Color: Invalid hex color ' + style ); } } else if ( style && style.length > 0 ) { return this.setColorName( style, colorSpace ); } return this; } /** * Sets this color from a color name. Faster than {@link Color#setStyle} if * you don't need the other CSS-style formats. * * For convenience, the list of names is exposed in `Color.NAMES` as a hash. * ```js * Color.NAMES.aliceblue // returns 0xF0F8FF * ``` * * @param {string} style - The color name. * @param {string} [colorSpace=SRGBColorSpace] - The color space. * @return {Color} A reference to this color. */ setColorName( style, colorSpace = SRGBColorSpace ) { // color keywords const hex = _colorKeywords[ style.toLowerCase() ]; if ( hex !== undefined ) { // red this.setHex( hex, colorSpace ); } else { // unknown color console.warn( 'THREE.Color: Unknown color ' + style ); } return this; } /** * Returns a new color with copied values from this instance. * * @return {Color} A clone of this instance. */ clone() { return new this.constructor( this.r, this.g, this.b ); } /** * Copies the values of the given color to this instance. * * @param {Color} color - The color to copy. * @return {Color} A reference to this color. */ copy( color ) { this.r = color.r; this.g = color.g; this.b = color.b; return this; } /** * Copies the given color into this color, and then converts this color from * `SRGBColorSpace` to `LinearSRGBColorSpace`. * * @param {Color} color - The color to copy/convert. * @return {Color} A reference to this color. */ copySRGBToLinear( color ) { this.r = SRGBToLinear( color.r ); this.g = SRGBToLinear( color.g ); this.b = SRGBToLinear( color.b ); return this; } /** * Copies the given color into this color, and then converts this color from * `LinearSRGBColorSpace` to `SRGBColorSpace`. * * @param {Color} color - The color to copy/convert. * @return {Color} A reference to this color. */ copyLinearToSRGB( color ) { this.r = LinearToSRGB( color.r ); this.g = LinearToSRGB( color.g ); this.b = LinearToSRGB( color.b ); return this; } /** * Converts this color from `SRGBColorSpace` to `LinearSRGBColorSpace`. * * @return {Color} A reference to this color. */ convertSRGBToLinear() { this.copySRGBToLinear( this ); return this; } /** * Converts this color from `LinearSRGBColorSpace` to `SRGBColorSpace`. * * @return {Color} A reference to this color. */ convertLinearToSRGB() { this.copyLinearToSRGB( this ); return this; } /** * Returns the hexadecimal value of this color. * * @param {string} [colorSpace=SRGBColorSpace] - The color space. * @return {number} The hexadecimal value. */ getHex( colorSpace = SRGBColorSpace ) { ColorManagement.fromWorkingColorSpace( _color.copy( this ), colorSpace ); return Math.round( clamp( _color.r * 255, 0, 255 ) ) * 65536 + Math.round( clamp( _color.g * 255, 0, 255 ) ) * 256 + Math.round( clamp( _color.b * 255, 0, 255 ) ); } /** * Returns the hexadecimal value of this color as a string (for example, 'FFFFFF'). * * @param {string} [colorSpace=SRGBColorSpace] - The color space. * @return {string} The hexadecimal value as a string. */ getHexString( colorSpace = SRGBColorSpace ) { return ( '000000' + this.getHex( colorSpace ).toString( 16 ) ).slice( -6 ); } /** * Converts the colors RGB values into the HSL format and stores them into the * given target object. * * @param {{h:number,s:number,l:number}} target - The target object that is used to store the method's result. * @param {string} [colorSpace=ColorManagement.workingColorSpace] - The color space. * @return {{h:number,s:number,l:number}} The HSL representation of this color. */ getHSL( target, colorSpace = ColorManagement.workingColorSpace ) { // h,s,l ranges are in 0.0 - 1.0 ColorManagement.fromWorkingColorSpace( _color.copy( this ), colorSpace ); const r = _color.r, g = _color.g, b = _color.b; const max = Math.max( r, g, b ); const min = Math.min( r, g, b ); let hue, saturation; const lightness = ( min + max ) / 2.0; if ( min === max ) { hue = 0; saturation = 0; } else { const delta = max - min; saturation = lightness <= 0.5 ? delta / ( max + min ) : delta / ( 2 - max - min ); switch ( max ) { case r: hue = ( g - b ) / delta + ( g < b ? 6 : 0 ); break; case g: hue = ( b - r ) / delta + 2; break; case b: hue = ( r - g ) / delta + 4; break; } hue /= 6; } target.h = hue; target.s = saturation; target.l = lightness; return target; } /** * Returns the RGB values of this color and stores them into the given target object. * * @param {Color} target - The target color that is used to store the method's result. * @param {string} [colorSpace=ColorManagement.workingColorSpace] - The color space. * @return {Color} The RGB representation of this color. */ getRGB( target, colorSpace = ColorManagement.workingColorSpace ) { ColorManagement.fromWorkingColorSpace( _color.copy( this ), colorSpace ); target.r = _color.r; target.g = _color.g; target.b = _color.b; return target; } /** * Returns the value of this color as a CSS style string. Example: `rgb(255,0,0)`. * * @param {string} [colorSpace=SRGBColorSpace] - The color space. * @return {string} The CSS representation of this color. */ getStyle( colorSpace = SRGBColorSpace ) { ColorManagement.fromWorkingColorSpace( _color.copy( this ), colorSpace ); const r = _color.r, g = _color.g, b = _color.b; if ( colorSpace !== SRGBColorSpace ) { // Requires CSS Color Module Level 4 (https://www.w3.org/TR/css-color-4/). return `color(${ colorSpace } ${ r.toFixed( 3 ) } ${ g.toFixed( 3 ) } ${ b.toFixed( 3 ) })`; } return `rgb(${ Math.round( r * 255 ) },${ Math.round( g * 255 ) },${ Math.round( b * 255 ) })`; } /** * Adds the given HSL values to this color's values. * Internally, this converts the color's RGB values to HSL, adds HSL * and then converts the color back to RGB. * * @param {number} h - Hue value between `0.0` and `1.0`. * @param {number} s - Saturation value between `0.0` and `1.0`. * @param {number} l - Lightness value between `0.0` and `1.0`. * @return {Color} A reference to this color. */ offsetHSL( h, s, l ) { this.getHSL( _hslA ); return this.setHSL( _hslA.h + h, _hslA.s + s, _hslA.l + l ); } /** * Adds the RGB values of the given color to the RGB values of this color. * * @param {Color} color - The color to add. * @return {Color} A reference to this color. */ add( color ) { this.r += color.r; this.g += color.g; this.b += color.b; return this; } /** * Adds the RGB values of the given colors and stores the result in this instance. * * @param {Color} color1 - The first color. * @param {Color} color2 - The second color. * @return {Color} A reference to this color. */ addColors( color1, color2 ) { this.r = color1.r + color2.r; this.g = color1.g + color2.g; this.b = color1.b + color2.b; return this; } /** * Adds the given scalar value to the RGB values of this color. * * @param {number} s - The scalar to add. * @return {Color} A reference to this color. */ addScalar( s ) { this.r += s; this.g += s; this.b += s; return this; } /** * Subtracts the RGB values of the given color from the RGB values of this color. * * @param {Color} color - The color to subtract. * @return {Color} A reference to this color. */ sub( color ) { this.r = Math.max( 0, this.r - color.r ); this.g = Math.max( 0, this.g - color.g ); this.b = Math.max( 0, this.b - color.b ); return this; } /** * Multiplies the RGB values of the given color with the RGB values of this color. * * @param {Color} color - The color to multiply. * @return {Color} A reference to this color. */ multiply( color ) { this.r *= color.r; this.g *= color.g; this.b *= color.b; return this; } /** * Multiplies the given scalar value with the RGB values of this color. * * @param {number} s - The scalar to multiply. * @return {Color} A reference to this color. */ multiplyScalar( s ) { this.r *= s; this.g *= s; this.b *= s; return this; } /** * Linearly interpolates this color's RGB values toward the RGB values of the * given color. The alpha argument can be thought of as the ratio between * the two colors, where `0.0` is this color and `1.0` is the first argument. * * @param {Color} color - The color to converge on. * @param {number} alpha - The interpolation factor in the closed interval `[0,1]`. * @return {Color} A reference to this color. */ lerp( color, alpha ) { this.r += ( color.r - this.r ) * alpha; this.g += ( color.g - this.g ) * alpha; this.b += ( color.b - this.b ) * alpha; return this; } /** * Linearly interpolates between the given colors and stores the result in this instance. * The alpha argument can be thought of as the ratio between the two colors, where `0.0` * is the first and `1.0` is the second color. * * @param {Color} color1 - The first color. * @param {Color} color2 - The second color. * @param {number} alpha - The interpolation factor in the closed interval `[0,1]`. * @return {Color} A reference to this color. */ lerpColors( color1, color2, alpha ) { this.r = color1.r + ( color2.r - color1.r ) * alpha; this.g = color1.g + ( color2.g - color1.g ) * alpha; this.b = color1.b + ( color2.b - color1.b ) * alpha; return this; } /** * Linearly interpolates this color's HSL values toward the HSL values of the * given color. It differs from {@link Color#lerp} by not interpolating straight * from one color to the other, but instead going through all the hues in between * those two colors. The alpha argument can be thought of as the ratio between * the two colors, where 0.0 is this color and 1.0 is the first argument. * * @param {Color} color - The color to converge on. * @param {number} alpha - The interpolation factor in the closed interval `[0,1]`. * @return {Color} A reference to this color. */ lerpHSL( color, alpha ) { this.getHSL( _hslA ); color.getHSL( _hslB ); const h = lerp( _hslA.h, _hslB.h, alpha ); const s = lerp( _hslA.s, _hslB.s, alpha ); const l = lerp( _hslA.l, _hslB.l, alpha ); this.setHSL( h, s, l ); return this; } /** * Sets the color's RGB components from the given 3D vector. * * @param {Vector3} v - The vector to set. * @return {Color} A reference to this color. */ setFromVector3( v ) { this.r = v.x; this.g = v.y; this.b = v.z; return this; } /** * Transforms this color with the given 3x3 matrix. * * @param {Matrix3} m - The matrix. * @return {Color} A reference to this color. */ applyMatrix3( m ) { const r = this.r, g = this.g, b = this.b; const e = m.elements; this.r = e[ 0 ] * r + e[ 3 ] * g + e[ 6 ] * b; this.g = e[ 1 ] * r + e[ 4 ] * g + e[ 7 ] * b; this.b = e[ 2 ] * r + e[ 5 ] * g + e[ 8 ] * b; return this; } /** * Returns `true` if this color is equal with the given one. * * @param {Color} c - The color to test for equality. * @return {boolean} Whether this bounding color is equal with the given one. */ equals( c ) { return ( c.r === this.r ) && ( c.g === this.g ) && ( c.b === this.b ); } /** * Sets this color's RGB components from the given array. * * @param {Array} array - An array holding the RGB values. * @param {number} [offset=0] - The offset into the array. * @return {Color} A reference to this color. */ fromArray( array, offset = 0 ) { this.r = array[ offset ]; this.g = array[ offset + 1 ]; this.b = array[ offset + 2 ]; return this; } /** * Writes the RGB components of this color to the given array. If no array is provided, * the method returns a new instance. * * @param {Array} [array=[]] - The target array holding the color components. * @param {number} [offset=0] - Index of the first element in the array. * @return {Array} The color components. */ toArray( array = [], offset = 0 ) { array[ offset ] = this.r; array[ offset + 1 ] = this.g; array[ offset + 2 ] = this.b; return array; } /** * Sets the components of this color from the given buffer attribute. * * @param {BufferAttribute} attribute - The buffer attribute holding color data. * @param {number} index - The index into the attribute. * @return {Color} A reference to this color. */ fromBufferAttribute( attribute, index ) { this.r = attribute.getX( index ); this.g = attribute.getY( index ); this.b = attribute.getZ( index ); return this; } /** * This methods defines the serialization result of this class. Returns the color * as a hexadecimal value. * * @return {number} The hexadecimal value. */ toJSON() { return this.getHex(); } *[ Symbol.iterator ]() { yield this.r; yield this.g; yield this.b; } } const _color = /*@__PURE__*/ new Color(); /** * A dictionary with X11 color names. * * Note that multiple words such as Dark Orange become the string 'darkorange'. * * @static * @type {Object} */ Color.NAMES = _colorKeywords; let _materialId = 0; /** * Abstract base class for materials. * * Materials define the appearance of renderable 3D objects. * * @abstract * @augments EventDispatcher */ class Material extends EventDispatcher { /** * Constructs a new material. */ constructor() { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isMaterial = true; /** * The ID of the material. * * @name Material#id * @type {number} * @readonly */ Object.defineProperty( this, 'id', { value: _materialId ++ } ); /** * The UUID of the material. * * @type {string} * @readonly */ this.uuid = generateUUID(); /** * The name of the material. * * @type {string} */ this.name = ''; /** * The type property is used for detecting the object type * in context of serialization/deserialization. * * @type {string} * @readonly */ this.type = 'Material'; /** * Defines the blending type of the material. * * It must be set to `CustomBlending` if custom blending properties like * {@link Material#blendSrc}, {@link Material#blendDst} or {@link Material#blendEquation} * should have any effect. * * @type {(NoBlending|NormalBlending|AdditiveBlending|SubtractiveBlending|MultiplyBlending|CustomBlending)} * @default NormalBlending */ this.blending = NormalBlending; /** * Defines which side of faces will be rendered - front, back or both. * * @type {(FrontSide|BackSide|DoubleSide)} * @default FrontSide */ this.side = FrontSide; /** * If set to `true`, vertex colors should be used. * * The engine supports RGB and RGBA vertex colors depending on whether a three (RGB) or * four (RGBA) component color buffer attribute is used. * * @type {boolean} * @default false */ this.vertexColors = false; /** * Defines how transparent the material is. * A value of `0.0` indicates fully transparent, `1.0` is fully opaque. * * If the {@link Material#transparent} is not set to `true`, * the material will remain fully opaque and this value will only affect its color. * * @type {number} * @default 1 */ this.opacity = 1; /** * Defines whether this material is transparent. This has an effect on * rendering as transparent objects need special treatment and are rendered * after non-transparent objects. * * When set to true, the extent to which the material is transparent is * controlled by {@link Material#opacity}. * * @type {boolean} * @default false */ this.transparent = false; /** * Enables alpha hashed transparency, an alternative to {@link Material#transparent} or * {@link Material#alphaTest}. The material will not be rendered if opacity is lower than * a random threshold. Randomization introduces some grain or noise, but approximates alpha * blending without the associated problems of sorting. Using TAA can reduce the resulting noise. * * @type {boolean} * @default false */ this.alphaHash = false; /** * Defines the blending source factor. * * @type {(ZeroFactor|OneFactor|SrcColorFactor|OneMinusSrcColorFactor|SrcAlphaFactor|OneMinusSrcAlphaFactor|DstAlphaFactor|OneMinusDstAlphaFactor|DstColorFactor|OneMinusDstColorFactor|SrcAlphaSaturateFactor|ConstantColorFactor|OneMinusConstantColorFactor|ConstantAlphaFactor|OneMinusConstantAlphaFactor)} * @default SrcAlphaFactor */ this.blendSrc = SrcAlphaFactor; /** * Defines the blending destination factor. * * @type {(ZeroFactor|OneFactor|SrcColorFactor|OneMinusSrcColorFactor|SrcAlphaFactor|OneMinusSrcAlphaFactor|DstAlphaFactor|OneMinusDstAlphaFactor|DstColorFactor|OneMinusDstColorFactor|SrcAlphaSaturateFactor|ConstantColorFactor|OneMinusConstantColorFactor|ConstantAlphaFactor|OneMinusConstantAlphaFactor)} * @default OneMinusSrcAlphaFactor */ this.blendDst = OneMinusSrcAlphaFactor; /** * Defines the blending equation. * * @type {(AddEquation|SubtractEquation|ReverseSubtractEquation|MinEquation|MaxEquation)} * @default AddEquation */ this.blendEquation = AddEquation; /** * Defines the blending source alpha factor. * * @type {?(ZeroFactor|OneFactor|SrcColorFactor|OneMinusSrcColorFactor|SrcAlphaFactor|OneMinusSrcAlphaFactor|DstAlphaFactor|OneMinusDstAlphaFactor|DstColorFactor|OneMinusDstColorFactor|SrcAlphaSaturateFactor|ConstantColorFactor|OneMinusConstantColorFactor|ConstantAlphaFactor|OneMinusConstantAlphaFactor)} * @default null */ this.blendSrcAlpha = null; /** * Defines the blending destination alpha factor. * * @type {?(ZeroFactor|OneFactor|SrcColorFactor|OneMinusSrcColorFactor|SrcAlphaFactor|OneMinusSrcAlphaFactor|DstAlphaFactor|OneMinusDstAlphaFactor|DstColorFactor|OneMinusDstColorFactor|SrcAlphaSaturateFactor|ConstantColorFactor|OneMinusConstantColorFactor|ConstantAlphaFactor|OneMinusConstantAlphaFactor)} * @default null */ this.blendDstAlpha = null; /** * Defines the blending equation of the alpha channel. * * @type {?(AddEquation|SubtractEquation|ReverseSubtractEquation|MinEquation|MaxEquation)} * @default null */ this.blendEquationAlpha = null; /** * Represents the RGB values of the constant blend color. * * This property has only an effect when using custom blending with `ConstantColor` or `OneMinusConstantColor`. * * @type {Color} * @default (0,0,0) */ this.blendColor = new Color( 0, 0, 0 ); /** * Represents the alpha value of the constant blend color. * * This property has only an effect when using custom blending with `ConstantAlpha` or `OneMinusConstantAlpha`. * * @type {number} * @default 0 */ this.blendAlpha = 0; /** * Defines the depth function. * * @type {(NeverDepth|AlwaysDepth|LessDepth|LessEqualDepth|EqualDepth|GreaterEqualDepth|GreaterDepth|NotEqualDepth)} * @default LessEqualDepth */ this.depthFunc = LessEqualDepth; /** * Whether to have depth test enabled when rendering this material. * When the depth test is disabled, the depth write will also be implicitly disabled. * * @type {boolean} * @default true */ this.depthTest = true; /** * Whether rendering this material has any effect on the depth buffer. * * When drawing 2D overlays it can be useful to disable the depth writing in * order to layer several things together without creating z-index artifacts. * * @type {boolean} * @default true */ this.depthWrite = true; /** * The bit mask to use when writing to the stencil buffer. * * @type {number} * @default 0xff */ this.stencilWriteMask = 0xff; /** * The stencil comparison function to use. * * @type {NeverStencilFunc|LessStencilFunc|EqualStencilFunc|LessEqualStencilFunc|GreaterStencilFunc|NotEqualStencilFunc|GreaterEqualStencilFunc|AlwaysStencilFunc} * @default AlwaysStencilFunc */ this.stencilFunc = AlwaysStencilFunc; /** * The value to use when performing stencil comparisons or stencil operations. * * @type {number} * @default 0 */ this.stencilRef = 0; /** * The bit mask to use when comparing against the stencil buffer. * * @type {number} * @default 0xff */ this.stencilFuncMask = 0xff; /** * Which stencil operation to perform when the comparison function returns `false`. * * @type {ZeroStencilOp|KeepStencilOp|ReplaceStencilOp|IncrementStencilOp|DecrementStencilOp|IncrementWrapStencilOp|DecrementWrapStencilOp|InvertStencilOp} * @default KeepStencilOp */ this.stencilFail = KeepStencilOp; /** * Which stencil operation to perform when the comparison function returns * `true` but the depth test fails. * * @type {ZeroStencilOp|KeepStencilOp|ReplaceStencilOp|IncrementStencilOp|DecrementStencilOp|IncrementWrapStencilOp|DecrementWrapStencilOp|InvertStencilOp} * @default KeepStencilOp */ this.stencilZFail = KeepStencilOp; /** * Which stencil operation to perform when the comparison function returns * `true` and the depth test passes. * * @type {ZeroStencilOp|KeepStencilOp|ReplaceStencilOp|IncrementStencilOp|DecrementStencilOp|IncrementWrapStencilOp|DecrementWrapStencilOp|InvertStencilOp} * @default KeepStencilOp */ this.stencilZPass = KeepStencilOp; /** * Whether stencil operations are performed against the stencil buffer. In * order to perform writes or comparisons against the stencil buffer this * value must be `true`. * * @type {boolean} * @default false */ this.stencilWrite = false; /** * User-defined clipping planes specified as THREE.Plane objects in world * space. These planes apply to the objects this material is attached to. * Points in space whose signed distance to the plane is negative are clipped * (not rendered). This requires {@link WebGLRenderer#localClippingEnabled} to * be `true`. * * @type {?Array} * @default null */ this.clippingPlanes = null; /** * Changes the behavior of clipping planes so that only their intersection is * clipped, rather than their union. * * @type {boolean} * @default false */ this.clipIntersection = false; /** * Defines whether to clip shadows according to the clipping planes specified * on this material. * * @type {boolean} * @default false */ this.clipShadows = false; /** * Defines which side of faces cast shadows. If `null`, the side casting shadows * is determined as follows: * * - When {@link Material#side} is set to `FrontSide`, the back side cast shadows. * - When {@link Material#side} is set to `BackSide`, the front side cast shadows. * - When {@link Material#side} is set to `DoubleSide`, both sides cast shadows. * * @type {?(FrontSide|BackSide|DoubleSide)} * @default null */ this.shadowSide = null; /** * Whether to render the material's color. * * This can be used in conjunction with {@link Object3D#renderOder} to create invisible * objects that occlude other objects. * * @type {boolean} * @default true */ this.colorWrite = true; /** * Override the renderer's default precision for this material. * * @type {?('highp'|'mediump'|'lowp')} * @default null */ this.precision = null; /** * Whether to use polygon offset or not. When enabled, each fragment's depth value will * be offset after it is interpolated from the depth values of the appropriate vertices. * The offset is added before the depth test is performed and before the value is written * into the depth buffer. * * Can be useful for rendering hidden-line images, for applying decals to surfaces, and for * rendering solids with highlighted edges. * * @type {boolean} * @default false */ this.polygonOffset = false; /** * Specifies a scale factor that is used to create a variable depth offset for each polygon. * * @type {number} * @default 0 */ this.polygonOffsetFactor = 0; /** * Is multiplied by an implementation-specific value to create a constant depth offset. * * @type {number} * @default 0 */ this.polygonOffsetUnits = 0; /** * Whether to apply dithering to the color to remove the appearance of banding. * * @type {boolean} * @default false */ this.dithering = false; /** * Whether alpha to coverage should be enabled or not. Can only be used with MSAA-enabled contexts * (meaning when the renderer was created with *antialias* parameter set to `true`). Enabling this * will smooth aliasing on clip plane edges and alphaTest-clipped edges. * * @type {boolean} * @default false */ this.alphaToCoverage = false; /** * Whether to premultiply the alpha (transparency) value. * * @type {boolean} * @default false */ this.premultipliedAlpha = false; /** * Whether double-sided, transparent objects should be rendered with a single pass or not. * * The engine renders double-sided, transparent objects with two draw calls (back faces first, * then front faces) to mitigate transparency artifacts. There are scenarios however where this * approach produces no quality gains but still doubles draw calls e.g. when rendering flat * vegetation like grass sprites. In these cases, set the `forceSinglePass` flag to `true` to * disable the two pass rendering to avoid performance issues. * * @type {boolean} * @default false */ this.forceSinglePass = false; /** * Whether it's possible to override the material with {@link Scene#overrideMaterial} or not. * * @type {boolean} * @default true */ this.allowOverride = true; /** * Defines whether 3D objects using this material are visible. * * @type {boolean} * @default true */ this.visible = true; /** * Defines whether this material is tone mapped according to the renderer's tone mapping setting. * * It is ignored when rendering to a render target or using post processing or when using * `WebGPURenderer`. In all these cases, all materials are honored by tone mapping. * * @type {boolean} * @default true */ this.toneMapped = true; /** * An object that can be used to store custom data about the Material. It * should not hold references to functions as these will not be cloned. * * @type {Object} */ this.userData = {}; /** * This starts at `0` and counts how many times {@link Material#needsUpdate} is set to `true`. * * @type {number} * @readonly * @default 0 */ this.version = 0; this._alphaTest = 0; } /** * Sets the alpha value to be used when running an alpha test. The material * will not be rendered if the opacity is lower than this value. * * @type {number} * @readonly * @default 0 */ get alphaTest() { return this._alphaTest; } set alphaTest( value ) { if ( this._alphaTest > 0 !== value > 0 ) { this.version ++; } this._alphaTest = value; } /** * An optional callback that is executed immediately before the material is used to render a 3D object. * * This method can only be used when rendering with {@link WebGLRenderer}. * * @param {WebGLRenderer} renderer - The renderer. * @param {Scene} scene - The scene. * @param {Camera} camera - The camera that is used to render the scene. * @param {BufferGeometry} geometry - The 3D object's geometry. * @param {Object3D} object - The 3D object. * @param {Object} group - The geometry group data. */ onBeforeRender( /* renderer, scene, camera, geometry, object, group */ ) {} /** * An optional callback that is executed immediately before the shader * program is compiled. This function is called with the shader source code * as a parameter. Useful for the modification of built-in materials. * * This method can only be used when rendering with {@link WebGLRenderer}. The * recommended approach when customizing materials is to use `WebGPURenderer` with the new * Node Material system and [TSL]{@link https://github.com/mrdoob/three.js/wiki/Three.js-Shading-Language}. * * @param {{vertexShader:string,fragmentShader:string,uniforms:Object}} shaderobject - The object holds the uniforms and the vertex and fragment shader source. * @param {WebGLRenderer} renderer - A reference to the renderer. */ onBeforeCompile( /* shaderobject, renderer */ ) {} /** * In case {@link Material#onBeforeCompile} is used, this callback can be used to identify * values of settings used in `onBeforeCompile()`, so three.js can reuse a cached * shader or recompile the shader for this material as needed. * * This method can only be used when rendering with {@link WebGLRenderer}. * * @return {string} The custom program cache key. */ customProgramCacheKey() { return this.onBeforeCompile.toString(); } /** * This method can be used to set default values from parameter objects. * It is a generic implementation so it can be used with different types * of materials. * * @param {Object} [values] - The material values to set. */ setValues( values ) { if ( values === undefined ) return; for ( const key in values ) { const newValue = values[ key ]; if ( newValue === undefined ) { console.warn( `THREE.Material: parameter '${ key }' has value of undefined.` ); continue; } const currentValue = this[ key ]; if ( currentValue === undefined ) { console.warn( `THREE.Material: '${ key }' is not a property of THREE.${ this.type }.` ); continue; } if ( currentValue && currentValue.isColor ) { currentValue.set( newValue ); } else if ( ( currentValue && currentValue.isVector3 ) && ( newValue && newValue.isVector3 ) ) { currentValue.copy( newValue ); } else { this[ key ] = newValue; } } } /** * Serializes the material into JSON. * * @param {?(Object|string)} meta - An optional value holding meta information about the serialization. * @return {Object} A JSON object representing the serialized material. * @see {@link ObjectLoader#parse} */ toJSON( meta ) { const isRootObject = ( meta === undefined || typeof meta === 'string' ); if ( isRootObject ) { meta = { textures: {}, images: {} }; } const data = { metadata: { version: 4.6, type: 'Material', generator: 'Material.toJSON' } }; // standard Material serialization data.uuid = this.uuid; data.type = this.type; if ( this.name !== '' ) data.name = this.name; if ( this.color && this.color.isColor ) data.color = this.color.getHex(); if ( this.roughness !== undefined ) data.roughness = this.roughness; if ( this.metalness !== undefined ) data.metalness = this.metalness; if ( this.sheen !== undefined ) data.sheen = this.sheen; if ( this.sheenColor && this.sheenColor.isColor ) data.sheenColor = this.sheenColor.getHex(); if ( this.sheenRoughness !== undefined ) data.sheenRoughness = this.sheenRoughness; if ( this.emissive && this.emissive.isColor ) data.emissive = this.emissive.getHex(); if ( this.emissiveIntensity !== undefined && this.emissiveIntensity !== 1 ) data.emissiveIntensity = this.emissiveIntensity; if ( this.specular && this.specular.isColor ) data.specular = this.specular.getHex(); if ( this.specularIntensity !== undefined ) data.specularIntensity = this.specularIntensity; if ( this.specularColor && this.specularColor.isColor ) data.specularColor = this.specularColor.getHex(); if ( this.shininess !== undefined ) data.shininess = this.shininess; if ( this.clearcoat !== undefined ) data.clearcoat = this.clearcoat; if ( this.clearcoatRoughness !== undefined ) data.clearcoatRoughness = this.clearcoatRoughness; if ( this.clearcoatMap && this.clearcoatMap.isTexture ) { data.clearcoatMap = this.clearcoatMap.toJSON( meta ).uuid; } if ( this.clearcoatRoughnessMap && this.clearcoatRoughnessMap.isTexture ) { data.clearcoatRoughnessMap = this.clearcoatRoughnessMap.toJSON( meta ).uuid; } if ( this.clearcoatNormalMap && this.clearcoatNormalMap.isTexture ) { data.clearcoatNormalMap = this.clearcoatNormalMap.toJSON( meta ).uuid; data.clearcoatNormalScale = this.clearcoatNormalScale.toArray(); } if ( this.dispersion !== undefined ) data.dispersion = this.dispersion; if ( this.iridescence !== undefined ) data.iridescence = this.iridescence; if ( this.iridescenceIOR !== undefined ) data.iridescenceIOR = this.iridescenceIOR; if ( this.iridescenceThicknessRange !== undefined ) data.iridescenceThicknessRange = this.iridescenceThicknessRange; if ( this.iridescenceMap && this.iridescenceMap.isTexture ) { data.iridescenceMap = this.iridescenceMap.toJSON( meta ).uuid; } if ( this.iridescenceThicknessMap && this.iridescenceThicknessMap.isTexture ) { data.iridescenceThicknessMap = this.iridescenceThicknessMap.toJSON( meta ).uuid; } if ( this.anisotropy !== undefined ) data.anisotropy = this.anisotropy; if ( this.anisotropyRotation !== undefined ) data.anisotropyRotation = this.anisotropyRotation; if ( this.anisotropyMap && this.anisotropyMap.isTexture ) { data.anisotropyMap = this.anisotropyMap.toJSON( meta ).uuid; } if ( this.map && this.map.isTexture ) data.map = this.map.toJSON( meta ).uuid; if ( this.matcap && this.matcap.isTexture ) data.matcap = this.matcap.toJSON( meta ).uuid; if ( this.alphaMap && this.alphaMap.isTexture ) data.alphaMap = this.alphaMap.toJSON( meta ).uuid; if ( this.lightMap && this.lightMap.isTexture ) { data.lightMap = this.lightMap.toJSON( meta ).uuid; data.lightMapIntensity = this.lightMapIntensity; } if ( this.aoMap && this.aoMap.isTexture ) { data.aoMap = this.aoMap.toJSON( meta ).uuid; data.aoMapIntensity = this.aoMapIntensity; } if ( this.bumpMap && this.bumpMap.isTexture ) { data.bumpMap = this.bumpMap.toJSON( meta ).uuid; data.bumpScale = this.bumpScale; } if ( this.normalMap && this.normalMap.isTexture ) { data.normalMap = this.normalMap.toJSON( meta ).uuid; data.normalMapType = this.normalMapType; data.normalScale = this.normalScale.toArray(); } if ( this.displacementMap && this.displacementMap.isTexture ) { data.displacementMap = this.displacementMap.toJSON( meta ).uuid; data.displacementScale = this.displacementScale; data.displacementBias = this.displacementBias; } if ( this.roughnessMap && this.roughnessMap.isTexture ) data.roughnessMap = this.roughnessMap.toJSON( meta ).uuid; if ( this.metalnessMap && this.metalnessMap.isTexture ) data.metalnessMap = this.metalnessMap.toJSON( meta ).uuid; if ( this.emissiveMap && this.emissiveMap.isTexture ) data.emissiveMap = this.emissiveMap.toJSON( meta ).uuid; if ( this.specularMap && this.specularMap.isTexture ) data.specularMap = this.specularMap.toJSON( meta ).uuid; if ( this.specularIntensityMap && this.specularIntensityMap.isTexture ) data.specularIntensityMap = this.specularIntensityMap.toJSON( meta ).uuid; if ( this.specularColorMap && this.specularColorMap.isTexture ) data.specularColorMap = this.specularColorMap.toJSON( meta ).uuid; if ( this.envMap && this.envMap.isTexture ) { data.envMap = this.envMap.toJSON( meta ).uuid; if ( this.combine !== undefined ) data.combine = this.combine; } if ( this.envMapRotation !== undefined ) data.envMapRotation = this.envMapRotation.toArray(); if ( this.envMapIntensity !== undefined ) data.envMapIntensity = this.envMapIntensity; if ( this.reflectivity !== undefined ) data.reflectivity = this.reflectivity; if ( this.refractionRatio !== undefined ) data.refractionRatio = this.refractionRatio; if ( this.gradientMap && this.gradientMap.isTexture ) { data.gradientMap = this.gradientMap.toJSON( meta ).uuid; } if ( this.transmission !== undefined ) data.transmission = this.transmission; if ( this.transmissionMap && this.transmissionMap.isTexture ) data.transmissionMap = this.transmissionMap.toJSON( meta ).uuid; if ( this.thickness !== undefined ) data.thickness = this.thickness; if ( this.thicknessMap && this.thicknessMap.isTexture ) data.thicknessMap = this.thicknessMap.toJSON( meta ).uuid; if ( this.attenuationDistance !== undefined && this.attenuationDistance !== Infinity ) data.attenuationDistance = this.attenuationDistance; if ( this.attenuationColor !== undefined ) data.attenuationColor = this.attenuationColor.getHex(); if ( this.size !== undefined ) data.size = this.size; if ( this.shadowSide !== null ) data.shadowSide = this.shadowSide; if ( this.sizeAttenuation !== undefined ) data.sizeAttenuation = this.sizeAttenuation; if ( this.blending !== NormalBlending ) data.blending = this.blending; if ( this.side !== FrontSide ) data.side = this.side; if ( this.vertexColors === true ) data.vertexColors = true; if ( this.opacity < 1 ) data.opacity = this.opacity; if ( this.transparent === true ) data.transparent = true; if ( this.blendSrc !== SrcAlphaFactor ) data.blendSrc = this.blendSrc; if ( this.blendDst !== OneMinusSrcAlphaFactor ) data.blendDst = this.blendDst; if ( this.blendEquation !== AddEquation ) data.blendEquation = this.blendEquation; if ( this.blendSrcAlpha !== null ) data.blendSrcAlpha = this.blendSrcAlpha; if ( this.blendDstAlpha !== null ) data.blendDstAlpha = this.blendDstAlpha; if ( this.blendEquationAlpha !== null ) data.blendEquationAlpha = this.blendEquationAlpha; if ( this.blendColor && this.blendColor.isColor ) data.blendColor = this.blendColor.getHex(); if ( this.blendAlpha !== 0 ) data.blendAlpha = this.blendAlpha; if ( this.depthFunc !== LessEqualDepth ) data.depthFunc = this.depthFunc; if ( this.depthTest === false ) data.depthTest = this.depthTest; if ( this.depthWrite === false ) data.depthWrite = this.depthWrite; if ( this.colorWrite === false ) data.colorWrite = this.colorWrite; if ( this.stencilWriteMask !== 0xff ) data.stencilWriteMask = this.stencilWriteMask; if ( this.stencilFunc !== AlwaysStencilFunc ) data.stencilFunc = this.stencilFunc; if ( this.stencilRef !== 0 ) data.stencilRef = this.stencilRef; if ( this.stencilFuncMask !== 0xff ) data.stencilFuncMask = this.stencilFuncMask; if ( this.stencilFail !== KeepStencilOp ) data.stencilFail = this.stencilFail; if ( this.stencilZFail !== KeepStencilOp ) data.stencilZFail = this.stencilZFail; if ( this.stencilZPass !== KeepStencilOp ) data.stencilZPass = this.stencilZPass; if ( this.stencilWrite === true ) data.stencilWrite = this.stencilWrite; // rotation (SpriteMaterial) if ( this.rotation !== undefined && this.rotation !== 0 ) data.rotation = this.rotation; if ( this.polygonOffset === true ) data.polygonOffset = true; if ( this.polygonOffsetFactor !== 0 ) data.polygonOffsetFactor = this.polygonOffsetFactor; if ( this.polygonOffsetUnits !== 0 ) data.polygonOffsetUnits = this.polygonOffsetUnits; if ( this.linewidth !== undefined && this.linewidth !== 1 ) data.linewidth = this.linewidth; if ( this.dashSize !== undefined ) data.dashSize = this.dashSize; if ( this.gapSize !== undefined ) data.gapSize = this.gapSize; if ( this.scale !== undefined ) data.scale = this.scale; if ( this.dithering === true ) data.dithering = true; if ( this.alphaTest > 0 ) data.alphaTest = this.alphaTest; if ( this.alphaHash === true ) data.alphaHash = true; if ( this.alphaToCoverage === true ) data.alphaToCoverage = true; if ( this.premultipliedAlpha === true ) data.premultipliedAlpha = true; if ( this.forceSinglePass === true ) data.forceSinglePass = true; if ( this.wireframe === true ) data.wireframe = true; if ( this.wireframeLinewidth > 1 ) data.wireframeLinewidth = this.wireframeLinewidth; if ( this.wireframeLinecap !== 'round' ) data.wireframeLinecap = this.wireframeLinecap; if ( this.wireframeLinejoin !== 'round' ) data.wireframeLinejoin = this.wireframeLinejoin; if ( this.flatShading === true ) data.flatShading = true; if ( this.visible === false ) data.visible = false; if ( this.toneMapped === false ) data.toneMapped = false; if ( this.fog === false ) data.fog = false; if ( Object.keys( this.userData ).length > 0 ) data.userData = this.userData; // TODO: Copied from Object3D.toJSON function extractFromCache( cache ) { const values = []; for ( const key in cache ) { const data = cache[ key ]; delete data.metadata; values.push( data ); } return values; } if ( isRootObject ) { const textures = extractFromCache( meta.textures ); const images = extractFromCache( meta.images ); if ( textures.length > 0 ) data.textures = textures; if ( images.length > 0 ) data.images = images; } return data; } /** * Returns a new material with copied values from this instance. * * @return {Material} A clone of this instance. */ clone() { return new this.constructor().copy( this ); } /** * Copies the values of the given material to this instance. * * @param {Material} source - The material to copy. * @return {Material} A reference to this instance. */ copy( source ) { this.name = source.name; this.blending = source.blending; this.side = source.side; this.vertexColors = source.vertexColors; this.opacity = source.opacity; this.transparent = source.transparent; this.blendSrc = source.blendSrc; this.blendDst = source.blendDst; this.blendEquation = source.blendEquation; this.blendSrcAlpha = source.blendSrcAlpha; this.blendDstAlpha = source.blendDstAlpha; this.blendEquationAlpha = source.blendEquationAlpha; this.blendColor.copy( source.blendColor ); this.blendAlpha = source.blendAlpha; this.depthFunc = source.depthFunc; this.depthTest = source.depthTest; this.depthWrite = source.depthWrite; this.stencilWriteMask = source.stencilWriteMask; this.stencilFunc = source.stencilFunc; this.stencilRef = source.stencilRef; this.stencilFuncMask = source.stencilFuncMask; this.stencilFail = source.stencilFail; this.stencilZFail = source.stencilZFail; this.stencilZPass = source.stencilZPass; this.stencilWrite = source.stencilWrite; const srcPlanes = source.clippingPlanes; let dstPlanes = null; if ( srcPlanes !== null ) { const n = srcPlanes.length; dstPlanes = new Array( n ); for ( let i = 0; i !== n; ++ i ) { dstPlanes[ i ] = srcPlanes[ i ].clone(); } } this.clippingPlanes = dstPlanes; this.clipIntersection = source.clipIntersection; this.clipShadows = source.clipShadows; this.shadowSide = source.shadowSide; this.colorWrite = source.colorWrite; this.precision = source.precision; this.polygonOffset = source.polygonOffset; this.polygonOffsetFactor = source.polygonOffsetFactor; this.polygonOffsetUnits = source.polygonOffsetUnits; this.dithering = source.dithering; this.alphaTest = source.alphaTest; this.alphaHash = source.alphaHash; this.alphaToCoverage = source.alphaToCoverage; this.premultipliedAlpha = source.premultipliedAlpha; this.forceSinglePass = source.forceSinglePass; this.visible = source.visible; this.toneMapped = source.toneMapped; this.userData = JSON.parse( JSON.stringify( source.userData ) ); return this; } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. * * @fires Material#dispose */ dispose() { /** * Fires when the material has been disposed of. * * @event Material#dispose * @type {Object} */ this.dispatchEvent( { type: 'dispose' } ); } /** * Setting this property to `true` indicates the engine the material * needs to be recompiled. * * @type {boolean} * @default false * @param {boolean} value */ set needsUpdate( value ) { if ( value === true ) this.version ++; } onBuild( /* shaderobject, renderer */ ) { console.warn( 'Material: onBuild() has been removed.' ); // @deprecated, r166 } } /** * A material for drawing geometries in a simple shaded (flat or wireframe) way. * * This material is not affected by lights. * * @augments Material */ class MeshBasicMaterial extends Material { /** * Constructs a new mesh basic material. * * @param {Object} [parameters] - An object with one or more properties * defining the material's appearance. Any property of the material * (including any property from inherited materials) can be passed * in here. Color values can be passed any type of value accepted * by {@link Color#set}. */ constructor( parameters ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isMeshBasicMaterial = true; this.type = 'MeshBasicMaterial'; /** * Color of the material. * * @type {Color} * @default (1,1,1) */ this.color = new Color( 0xffffff ); // emissive /** * The color map. May optionally include an alpha channel, typically combined * with {@link Material#transparent} or {@link Material#alphaTest}. The texture map * color is modulated by the diffuse `color`. * * @type {?Texture} * @default null */ this.map = null; /** * The light map. Requires a second set of UVs. * * @type {?Texture} * @default null */ this.lightMap = null; /** * Intensity of the baked light. * * @type {number} * @default 1 */ this.lightMapIntensity = 1.0; /** * The red channel of this texture is used as the ambient occlusion map. * Requires a second set of UVs. * * @type {?Texture} * @default null */ this.aoMap = null; /** * Intensity of the ambient occlusion effect. Range is `[0,1]`, where `0` * disables ambient occlusion. Where intensity is `1` and the AO map's * red channel is also `1`, ambient light is fully occluded on a surface. * * @type {number} * @default 1 */ this.aoMapIntensity = 1.0; /** * Specular map used by the material. * * @type {?Texture} * @default null */ this.specularMap = null; /** * The alpha map is a grayscale texture that controls the opacity across the * surface (black: fully transparent; white: fully opaque). * * Only the color of the texture is used, ignoring the alpha channel if one * exists. For RGB and RGBA textures, the renderer will use the green channel * when sampling this texture due to the extra bit of precision provided for * green in DXT-compressed and uncompressed RGB 565 formats. Luminance-only and * luminance/alpha textures will also still work as expected. * * @type {?Texture} * @default null */ this.alphaMap = null; /** * The environment map. * * @type {?Texture} * @default null */ this.envMap = null; /** * The rotation of the environment map in radians. * * @type {Euler} * @default (0,0,0) */ this.envMapRotation = new Euler(); /** * How to combine the result of the surface's color with the environment map, if any. * * When set to `MixOperation`, the {@link MeshBasicMaterial#reflectivity} is used to * blend between the two colors. * * @type {(MultiplyOperation|MixOperation|AddOperation)} * @default MultiplyOperation */ this.combine = MultiplyOperation; /** * How much the environment map affects the surface. * The valid range is between `0` (no reflections) and `1` (full reflections). * * @type {number} * @default 1 */ this.reflectivity = 1; /** * The index of refraction (IOR) of air (approximately 1) divided by the * index of refraction of the material. It is used with environment mapping * modes {@link CubeRefractionMapping} and {@link EquirectangularRefractionMapping}. * The refraction ratio should not exceed `1`. * * @type {number} * @default 0.98 */ this.refractionRatio = 0.98; /** * Renders the geometry as a wireframe. * * @type {boolean} * @default false */ this.wireframe = false; /** * Controls the thickness of the wireframe. * * Can only be used with {@link SVGRenderer}. * * @type {number} * @default 1 */ this.wireframeLinewidth = 1; /** * Defines appearance of wireframe ends. * * Can only be used with {@link SVGRenderer}. * * @type {('round'|'bevel'|'miter')} * @default 'round' */ this.wireframeLinecap = 'round'; /** * Defines appearance of wireframe joints. * * Can only be used with {@link SVGRenderer}. * * @type {('round'|'bevel'|'miter')} * @default 'round' */ this.wireframeLinejoin = 'round'; /** * Whether the material is affected by fog or not. * * @type {boolean} * @default true */ this.fog = true; this.setValues( parameters ); } copy( source ) { super.copy( source ); this.color.copy( source.color ); this.map = source.map; this.lightMap = source.lightMap; this.lightMapIntensity = source.lightMapIntensity; this.aoMap = source.aoMap; this.aoMapIntensity = source.aoMapIntensity; this.specularMap = source.specularMap; this.alphaMap = source.alphaMap; this.envMap = source.envMap; this.envMapRotation.copy( source.envMapRotation ); this.combine = source.combine; this.reflectivity = source.reflectivity; this.refractionRatio = source.refractionRatio; this.wireframe = source.wireframe; this.wireframeLinewidth = source.wireframeLinewidth; this.wireframeLinecap = source.wireframeLinecap; this.wireframeLinejoin = source.wireframeLinejoin; this.fog = source.fog; return this; } } // Fast Half Float Conversions, http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf const _tables = /*@__PURE__*/ _generateTables(); function _generateTables() { // float32 to float16 helpers const buffer = new ArrayBuffer( 4 ); const floatView = new Float32Array( buffer ); const uint32View = new Uint32Array( buffer ); const baseTable = new Uint32Array( 512 ); const shiftTable = new Uint32Array( 512 ); for ( let i = 0; i < 256; ++ i ) { const e = i - 127; // very small number (0, -0) if ( e < -27 ) { baseTable[ i ] = 0x0000; baseTable[ i | 0x100 ] = 0x8000; shiftTable[ i ] = 24; shiftTable[ i | 0x100 ] = 24; // small number (denorm) } else if ( e < -14 ) { baseTable[ i ] = 0x0400 >> ( - e - 14 ); baseTable[ i | 0x100 ] = ( 0x0400 >> ( - e - 14 ) ) | 0x8000; shiftTable[ i ] = - e - 1; shiftTable[ i | 0x100 ] = - e - 1; // normal number } else if ( e <= 15 ) { baseTable[ i ] = ( e + 15 ) << 10; baseTable[ i | 0x100 ] = ( ( e + 15 ) << 10 ) | 0x8000; shiftTable[ i ] = 13; shiftTable[ i | 0x100 ] = 13; // large number (Infinity, -Infinity) } else if ( e < 128 ) { baseTable[ i ] = 0x7c00; baseTable[ i | 0x100 ] = 0xfc00; shiftTable[ i ] = 24; shiftTable[ i | 0x100 ] = 24; // stay (NaN, Infinity, -Infinity) } else { baseTable[ i ] = 0x7c00; baseTable[ i | 0x100 ] = 0xfc00; shiftTable[ i ] = 13; shiftTable[ i | 0x100 ] = 13; } } // float16 to float32 helpers const mantissaTable = new Uint32Array( 2048 ); const exponentTable = new Uint32Array( 64 ); const offsetTable = new Uint32Array( 64 ); for ( let i = 1; i < 1024; ++ i ) { let m = i << 13; // zero pad mantissa bits let e = 0; // zero exponent // normalized while ( ( m & 0x00800000 ) === 0 ) { m <<= 1; e -= 0x00800000; // decrement exponent } m &= -8388609; // clear leading 1 bit e += 0x38800000; // adjust bias mantissaTable[ i ] = m | e; } for ( let i = 1024; i < 2048; ++ i ) { mantissaTable[ i ] = 0x38000000 + ( ( i - 1024 ) << 13 ); } for ( let i = 1; i < 31; ++ i ) { exponentTable[ i ] = i << 23; } exponentTable[ 31 ] = 0x47800000; exponentTable[ 32 ] = 0x80000000; for ( let i = 33; i < 63; ++ i ) { exponentTable[ i ] = 0x80000000 + ( ( i - 32 ) << 23 ); } exponentTable[ 63 ] = 0xc7800000; for ( let i = 1; i < 64; ++ i ) { if ( i !== 32 ) { offsetTable[ i ] = 1024; } } return { floatView: floatView, uint32View: uint32View, baseTable: baseTable, shiftTable: shiftTable, mantissaTable: mantissaTable, exponentTable: exponentTable, offsetTable: offsetTable }; } /** * Returns a half precision floating point value (FP16) from the given single * precision floating point value (FP32). * * @param {number} val - A single precision floating point value. * @return {number} The FP16 value. */ function toHalfFloat( val ) { if ( Math.abs( val ) > 65504 ) console.warn( 'THREE.DataUtils.toHalfFloat(): Value out of range.' ); val = clamp( val, -65504, 65504 ); _tables.floatView[ 0 ] = val; const f = _tables.uint32View[ 0 ]; const e = ( f >> 23 ) & 0x1ff; return _tables.baseTable[ e ] + ( ( f & 0x007fffff ) >> _tables.shiftTable[ e ] ); } /** * Returns a single precision floating point value (FP32) from the given half * precision floating point value (FP16). * * @param {number} val - A half precision floating point value. * @return {number} The FP32 value. */ function fromHalfFloat( val ) { const m = val >> 10; _tables.uint32View[ 0 ] = _tables.mantissaTable[ _tables.offsetTable[ m ] + ( val & 0x3ff ) ] + _tables.exponentTable[ m ]; return _tables.floatView[ 0 ]; } /** * A class containing utility functions for data. * * @hideconstructor */ class DataUtils { /** * Returns a half precision floating point value (FP16) from the given single * precision floating point value (FP32). * * @param {number} val - A single precision floating point value. * @return {number} The FP16 value. */ static toHalfFloat( val ) { return toHalfFloat( val ); } /** * Returns a single precision floating point value (FP32) from the given half * precision floating point value (FP16). * * @param {number} val - A half precision floating point value. * @return {number} The FP32 value. */ static fromHalfFloat( val ) { return fromHalfFloat( val ); } } const _vector$9 = /*@__PURE__*/ new Vector3(); const _vector2$1 = /*@__PURE__*/ new Vector2(); let _id$3 = 0; /** * This class stores data for an attribute (such as vertex positions, face * indices, normals, colors, UVs, and any custom attributes ) associated with * a geometry, which allows for more efficient passing of data to the GPU. * * When working with vector-like data, the `fromBufferAttribute( attribute, index )` * helper methods on vector and color class might be helpful. E.g. {@link Vector3#fromBufferAttribute}. */ class BufferAttribute { /** * Constructs a new buffer attribute. * * @param {TypedArray} array - The array holding the attribute data. * @param {number} itemSize - The item size. * @param {boolean} [normalized=false] - Whether the data are normalized or not. */ constructor( array, itemSize, normalized = false ) { if ( Array.isArray( array ) ) { throw new TypeError( 'THREE.BufferAttribute: array should be a Typed Array.' ); } /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isBufferAttribute = true; /** * The ID of the buffer attribute. * * @name BufferAttribute#id * @type {number} * @readonly */ Object.defineProperty( this, 'id', { value: _id$3 ++ } ); /** * The name of the buffer attribute. * * @type {string} */ this.name = ''; /** * The array holding the attribute data. It should have `itemSize * numVertices` * elements, where `numVertices` is the number of vertices in the associated geometry. * * @type {TypedArray} */ this.array = array; /** * The number of values of the array that should be associated with a particular vertex. * For instance, if this attribute is storing a 3-component vector (such as a position, * normal, or color), then the value should be `3`. * * @type {number} */ this.itemSize = itemSize; /** * Represents the number of items this buffer attribute stores. It is internally computed * by dividing the `array` length by the `itemSize`. * * @type {number} * @readonly */ this.count = array !== undefined ? array.length / itemSize : 0; /** * Applies to integer data only. Indicates how the underlying data in the buffer maps to * the values in the GLSL code. For instance, if `array` is an instance of `UInt16Array`, * and `normalized` is `true`, the values `0 -+65535` in the array data will be mapped to * `0.0f - +1.0f` in the GLSL attribute. If `normalized` is `false`, the values will be converted * to floats unmodified, i.e. `65535` becomes `65535.0f`. * * @type {boolean} */ this.normalized = normalized; /** * Defines the intended usage pattern of the data store for optimization purposes. * * Note: After the initial use of a buffer, its usage cannot be changed. Instead, * instantiate a new one and set the desired usage before the next render. * * @type {(StaticDrawUsage|DynamicDrawUsage|StreamDrawUsage|StaticReadUsage|DynamicReadUsage|StreamReadUsage|StaticCopyUsage|DynamicCopyUsage|StreamCopyUsage)} * @default StaticDrawUsage */ this.usage = StaticDrawUsage; /** * This can be used to only update some components of stored vectors (for example, just the * component related to color). Use the `addUpdateRange()` function to add ranges to this array. * * @type {Array} */ this.updateRanges = []; /** * Configures the bound GPU type for use in shaders. * * Note: this only has an effect for integer arrays and is not configurable for float arrays. * For lower precision float types, use `Float16BufferAttribute`. * * @type {(FloatType|IntType)} * @default FloatType */ this.gpuType = FloatType; /** * A version number, incremented every time the `needsUpdate` is set to `true`. * * @type {number} */ this.version = 0; } /** * A callback function that is executed after the renderer has transferred the attribute * array data to the GPU. */ onUploadCallback() {} /** * Flag to indicate that this attribute has changed and should be re-sent to * the GPU. Set this to `true` when you modify the value of the array. * * @type {number} * @default false * @param {boolean} value */ set needsUpdate( value ) { if ( value === true ) this.version ++; } /** * Sets the usage of this buffer attribute. * * @param {(StaticDrawUsage|DynamicDrawUsage|StreamDrawUsage|StaticReadUsage|DynamicReadUsage|StreamReadUsage|StaticCopyUsage|DynamicCopyUsage|StreamCopyUsage)} value - The usage to set. * @return {BufferAttribute} A reference to this buffer attribute. */ setUsage( value ) { this.usage = value; return this; } /** * Adds a range of data in the data array to be updated on the GPU. * * @param {number} start - Position at which to start update. * @param {number} count - The number of components to update. */ addUpdateRange( start, count ) { this.updateRanges.push( { start, count } ); } /** * Clears the update ranges. */ clearUpdateRanges() { this.updateRanges.length = 0; } /** * Copies the values of the given buffer attribute to this instance. * * @param {BufferAttribute} source - The buffer attribute to copy. * @return {BufferAttribute} A reference to this instance. */ copy( source ) { this.name = source.name; this.array = new source.array.constructor( source.array ); this.itemSize = source.itemSize; this.count = source.count; this.normalized = source.normalized; this.usage = source.usage; this.gpuType = source.gpuType; return this; } /** * Copies a vector from the given buffer attribute to this one. The start * and destination position in the attribute buffers are represented by the * given indices. * * @param {number} index1 - The destination index into this buffer attribute. * @param {BufferAttribute} attribute - The buffer attribute to copy from. * @param {number} index2 - The source index into the given buffer attribute. * @return {BufferAttribute} A reference to this instance. */ copyAt( index1, attribute, index2 ) { index1 *= this.itemSize; index2 *= attribute.itemSize; for ( let i = 0, l = this.itemSize; i < l; i ++ ) { this.array[ index1 + i ] = attribute.array[ index2 + i ]; } return this; } /** * Copies the given array data into this buffer attribute. * * @param {(TypedArray|Array)} array - The array to copy. * @return {BufferAttribute} A reference to this instance. */ copyArray( array ) { this.array.set( array ); return this; } /** * Applies the given 3x3 matrix to the given attribute. Works with * item size `2` and `3`. * * @param {Matrix3} m - The matrix to apply. * @return {BufferAttribute} A reference to this instance. */ applyMatrix3( m ) { if ( this.itemSize === 2 ) { for ( let i = 0, l = this.count; i < l; i ++ ) { _vector2$1.fromBufferAttribute( this, i ); _vector2$1.applyMatrix3( m ); this.setXY( i, _vector2$1.x, _vector2$1.y ); } } else if ( this.itemSize === 3 ) { for ( let i = 0, l = this.count; i < l; i ++ ) { _vector$9.fromBufferAttribute( this, i ); _vector$9.applyMatrix3( m ); this.setXYZ( i, _vector$9.x, _vector$9.y, _vector$9.z ); } } return this; } /** * Applies the given 4x4 matrix to the given attribute. Only works with * item size `3`. * * @param {Matrix4} m - The matrix to apply. * @return {BufferAttribute} A reference to this instance. */ applyMatrix4( m ) { for ( let i = 0, l = this.count; i < l; i ++ ) { _vector$9.fromBufferAttribute( this, i ); _vector$9.applyMatrix4( m ); this.setXYZ( i, _vector$9.x, _vector$9.y, _vector$9.z ); } return this; } /** * Applies the given 3x3 normal matrix to the given attribute. Only works with * item size `3`. * * @param {Matrix3} m - The normal matrix to apply. * @return {BufferAttribute} A reference to this instance. */ applyNormalMatrix( m ) { for ( let i = 0, l = this.count; i < l; i ++ ) { _vector$9.fromBufferAttribute( this, i ); _vector$9.applyNormalMatrix( m ); this.setXYZ( i, _vector$9.x, _vector$9.y, _vector$9.z ); } return this; } /** * Applies the given 4x4 matrix to the given attribute. Only works with * item size `3` and with direction vectors. * * @param {Matrix4} m - The matrix to apply. * @return {BufferAttribute} A reference to this instance. */ transformDirection( m ) { for ( let i = 0, l = this.count; i < l; i ++ ) { _vector$9.fromBufferAttribute( this, i ); _vector$9.transformDirection( m ); this.setXYZ( i, _vector$9.x, _vector$9.y, _vector$9.z ); } return this; } /** * Sets the given array data in the buffer attribute. * * @param {(TypedArray|Array)} value - The array data to set. * @param {number} [offset=0] - The offset in this buffer attribute's array. * @return {BufferAttribute} A reference to this instance. */ set( value, offset = 0 ) { // Matching BufferAttribute constructor, do not normalize the array. this.array.set( value, offset ); return this; } /** * Returns the given component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @param {number} component - The component index. * @return {number} The returned value. */ getComponent( index, component ) { let value = this.array[ index * this.itemSize + component ]; if ( this.normalized ) value = denormalize( value, this.array ); return value; } /** * Sets the given value to the given component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @param {number} component - The component index. * @param {number} value - The value to set. * @return {BufferAttribute} A reference to this instance. */ setComponent( index, component, value ) { if ( this.normalized ) value = normalize( value, this.array ); this.array[ index * this.itemSize + component ] = value; return this; } /** * Returns the x component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @return {number} The x component. */ getX( index ) { let x = this.array[ index * this.itemSize ]; if ( this.normalized ) x = denormalize( x, this.array ); return x; } /** * Sets the x component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @param {number} x - The value to set. * @return {BufferAttribute} A reference to this instance. */ setX( index, x ) { if ( this.normalized ) x = normalize( x, this.array ); this.array[ index * this.itemSize ] = x; return this; } /** * Returns the y component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @return {number} The y component. */ getY( index ) { let y = this.array[ index * this.itemSize + 1 ]; if ( this.normalized ) y = denormalize( y, this.array ); return y; } /** * Sets the y component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @param {number} y - The value to set. * @return {BufferAttribute} A reference to this instance. */ setY( index, y ) { if ( this.normalized ) y = normalize( y, this.array ); this.array[ index * this.itemSize + 1 ] = y; return this; } /** * Returns the z component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @return {number} The z component. */ getZ( index ) { let z = this.array[ index * this.itemSize + 2 ]; if ( this.normalized ) z = denormalize( z, this.array ); return z; } /** * Sets the z component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @param {number} z - The value to set. * @return {BufferAttribute} A reference to this instance. */ setZ( index, z ) { if ( this.normalized ) z = normalize( z, this.array ); this.array[ index * this.itemSize + 2 ] = z; return this; } /** * Returns the w component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @return {number} The w component. */ getW( index ) { let w = this.array[ index * this.itemSize + 3 ]; if ( this.normalized ) w = denormalize( w, this.array ); return w; } /** * Sets the w component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @param {number} w - The value to set. * @return {BufferAttribute} A reference to this instance. */ setW( index, w ) { if ( this.normalized ) w = normalize( w, this.array ); this.array[ index * this.itemSize + 3 ] = w; return this; } /** * Sets the x and y component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @param {number} x - The value for the x component to set. * @param {number} y - The value for the y component to set. * @return {BufferAttribute} A reference to this instance. */ setXY( index, x, y ) { index *= this.itemSize; if ( this.normalized ) { x = normalize( x, this.array ); y = normalize( y, this.array ); } this.array[ index + 0 ] = x; this.array[ index + 1 ] = y; return this; } /** * Sets the x, y and z component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @param {number} x - The value for the x component to set. * @param {number} y - The value for the y component to set. * @param {number} z - The value for the z component to set. * @return {BufferAttribute} A reference to this instance. */ setXYZ( index, x, y, z ) { index *= this.itemSize; if ( this.normalized ) { x = normalize( x, this.array ); y = normalize( y, this.array ); z = normalize( z, this.array ); } this.array[ index + 0 ] = x; this.array[ index + 1 ] = y; this.array[ index + 2 ] = z; return this; } /** * Sets the x, y, z and w component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @param {number} x - The value for the x component to set. * @param {number} y - The value for the y component to set. * @param {number} z - The value for the z component to set. * @param {number} w - The value for the w component to set. * @return {BufferAttribute} A reference to this instance. */ setXYZW( index, x, y, z, w ) { index *= this.itemSize; if ( this.normalized ) { x = normalize( x, this.array ); y = normalize( y, this.array ); z = normalize( z, this.array ); w = normalize( w, this.array ); } this.array[ index + 0 ] = x; this.array[ index + 1 ] = y; this.array[ index + 2 ] = z; this.array[ index + 3 ] = w; return this; } /** * Sets the given callback function that is executed after the Renderer has transferred * the attribute array data to the GPU. Can be used to perform clean-up operations after * the upload when attribute data are not needed anymore on the CPU side. * * @param {Function} callback - The `onUpload()` callback. * @return {BufferAttribute} A reference to this instance. */ onUpload( callback ) { this.onUploadCallback = callback; return this; } /** * Returns a new buffer attribute with copied values from this instance. * * @return {BufferAttribute} A clone of this instance. */ clone() { return new this.constructor( this.array, this.itemSize ).copy( this ); } /** * Serializes the buffer attribute into JSON. * * @return {Object} A JSON object representing the serialized buffer attribute. */ toJSON() { const data = { itemSize: this.itemSize, type: this.array.constructor.name, array: Array.from( this.array ), normalized: this.normalized }; if ( this.name !== '' ) data.name = this.name; if ( this.usage !== StaticDrawUsage ) data.usage = this.usage; return data; } } /** * Convenient class that can be used when creating a `Int8` buffer attribute with * a plain `Array` instance. * * @augments BufferAttribute */ class Int8BufferAttribute extends BufferAttribute { /** * Constructs a new buffer attribute. * * @param {(Array|Int8Array)} array - The array holding the attribute data. * @param {number} itemSize - The item size. * @param {boolean} [normalized=false] - Whether the data are normalized or not. */ constructor( array, itemSize, normalized ) { super( new Int8Array( array ), itemSize, normalized ); } } /** * Convenient class that can be used when creating a `UInt8` buffer attribute with * a plain `Array` instance. * * @augments BufferAttribute */ class Uint8BufferAttribute extends BufferAttribute { /** * Constructs a new buffer attribute. * * @param {(Array|Uint8Array)} array - The array holding the attribute data. * @param {number} itemSize - The item size. * @param {boolean} [normalized=false] - Whether the data are normalized or not. */ constructor( array, itemSize, normalized ) { super( new Uint8Array( array ), itemSize, normalized ); } } /** * Convenient class that can be used when creating a `UInt8Clamped` buffer attribute with * a plain `Array` instance. * * @augments BufferAttribute */ class Uint8ClampedBufferAttribute extends BufferAttribute { /** * Constructs a new buffer attribute. * * @param {(Array|Uint8ClampedArray)} array - The array holding the attribute data. * @param {number} itemSize - The item size. * @param {boolean} [normalized=false] - Whether the data are normalized or not. */ constructor( array, itemSize, normalized ) { super( new Uint8ClampedArray( array ), itemSize, normalized ); } } /** * Convenient class that can be used when creating a `Int16` buffer attribute with * a plain `Array` instance. * * @augments BufferAttribute */ class Int16BufferAttribute extends BufferAttribute { /** * Constructs a new buffer attribute. * * @param {(Array|Int16Array)} array - The array holding the attribute data. * @param {number} itemSize - The item size. * @param {boolean} [normalized=false] - Whether the data are normalized or not. */ constructor( array, itemSize, normalized ) { super( new Int16Array( array ), itemSize, normalized ); } } /** * Convenient class that can be used when creating a `UInt16` buffer attribute with * a plain `Array` instance. * * @augments BufferAttribute */ class Uint16BufferAttribute extends BufferAttribute { /** * Constructs a new buffer attribute. * * @param {(Array|Uint16Array)} array - The array holding the attribute data. * @param {number} itemSize - The item size. * @param {boolean} [normalized=false] - Whether the data are normalized or not. */ constructor( array, itemSize, normalized ) { super( new Uint16Array( array ), itemSize, normalized ); } } /** * Convenient class that can be used when creating a `Int32` buffer attribute with * a plain `Array` instance. * * @augments BufferAttribute */ class Int32BufferAttribute extends BufferAttribute { /** * Constructs a new buffer attribute. * * @param {(Array|Int32Array)} array - The array holding the attribute data. * @param {number} itemSize - The item size. * @param {boolean} [normalized=false] - Whether the data are normalized or not. */ constructor( array, itemSize, normalized ) { super( new Int32Array( array ), itemSize, normalized ); } } /** * Convenient class that can be used when creating a `UInt32` buffer attribute with * a plain `Array` instance. * * @augments BufferAttribute */ class Uint32BufferAttribute extends BufferAttribute { /** * Constructs a new buffer attribute. * * @param {(Array|Uint32Array)} array - The array holding the attribute data. * @param {number} itemSize - The item size. * @param {boolean} [normalized=false] - Whether the data are normalized or not. */ constructor( array, itemSize, normalized ) { super( new Uint32Array( array ), itemSize, normalized ); } } /** * Convenient class that can be used when creating a `Float16` buffer attribute with * a plain `Array` instance. * * This class automatically converts to to and from FP16 since `Float16Array` is not * natively supported in JavaScript. * * @augments BufferAttribute */ class Float16BufferAttribute extends BufferAttribute { /** * Constructs a new buffer attribute. * * @param {(Array|Uint16Array)} array - The array holding the attribute data. * @param {number} itemSize - The item size. * @param {boolean} [normalized=false] - Whether the data are normalized or not. */ constructor( array, itemSize, normalized ) { super( new Uint16Array( array ), itemSize, normalized ); this.isFloat16BufferAttribute = true; } getX( index ) { let x = fromHalfFloat( this.array[ index * this.itemSize ] ); if ( this.normalized ) x = denormalize( x, this.array ); return x; } setX( index, x ) { if ( this.normalized ) x = normalize( x, this.array ); this.array[ index * this.itemSize ] = toHalfFloat( x ); return this; } getY( index ) { let y = fromHalfFloat( this.array[ index * this.itemSize + 1 ] ); if ( this.normalized ) y = denormalize( y, this.array ); return y; } setY( index, y ) { if ( this.normalized ) y = normalize( y, this.array ); this.array[ index * this.itemSize + 1 ] = toHalfFloat( y ); return this; } getZ( index ) { let z = fromHalfFloat( this.array[ index * this.itemSize + 2 ] ); if ( this.normalized ) z = denormalize( z, this.array ); return z; } setZ( index, z ) { if ( this.normalized ) z = normalize( z, this.array ); this.array[ index * this.itemSize + 2 ] = toHalfFloat( z ); return this; } getW( index ) { let w = fromHalfFloat( this.array[ index * this.itemSize + 3 ] ); if ( this.normalized ) w = denormalize( w, this.array ); return w; } setW( index, w ) { if ( this.normalized ) w = normalize( w, this.array ); this.array[ index * this.itemSize + 3 ] = toHalfFloat( w ); return this; } setXY( index, x, y ) { index *= this.itemSize; if ( this.normalized ) { x = normalize( x, this.array ); y = normalize( y, this.array ); } this.array[ index + 0 ] = toHalfFloat( x ); this.array[ index + 1 ] = toHalfFloat( y ); return this; } setXYZ( index, x, y, z ) { index *= this.itemSize; if ( this.normalized ) { x = normalize( x, this.array ); y = normalize( y, this.array ); z = normalize( z, this.array ); } this.array[ index + 0 ] = toHalfFloat( x ); this.array[ index + 1 ] = toHalfFloat( y ); this.array[ index + 2 ] = toHalfFloat( z ); return this; } setXYZW( index, x, y, z, w ) { index *= this.itemSize; if ( this.normalized ) { x = normalize( x, this.array ); y = normalize( y, this.array ); z = normalize( z, this.array ); w = normalize( w, this.array ); } this.array[ index + 0 ] = toHalfFloat( x ); this.array[ index + 1 ] = toHalfFloat( y ); this.array[ index + 2 ] = toHalfFloat( z ); this.array[ index + 3 ] = toHalfFloat( w ); return this; } } /** * Convenient class that can be used when creating a `Float32` buffer attribute with * a plain `Array` instance. * * @augments BufferAttribute */ class Float32BufferAttribute extends BufferAttribute { /** * Constructs a new buffer attribute. * * @param {(Array|Float32Array)} array - The array holding the attribute data. * @param {number} itemSize - The item size. * @param {boolean} [normalized=false] - Whether the data are normalized or not. */ constructor( array, itemSize, normalized ) { super( new Float32Array( array ), itemSize, normalized ); } } let _id$2 = 0; const _m1$2 = /*@__PURE__*/ new Matrix4(); const _obj = /*@__PURE__*/ new Object3D(); const _offset = /*@__PURE__*/ new Vector3(); const _box$2 = /*@__PURE__*/ new Box3(); const _boxMorphTargets = /*@__PURE__*/ new Box3(); const _vector$8 = /*@__PURE__*/ new Vector3(); /** * A representation of mesh, line, or point geometry. Includes vertex * positions, face indices, normals, colors, UVs, and custom attributes * within buffers, reducing the cost of passing all this data to the GPU. * * ```js * const geometry = new THREE.BufferGeometry(); * // create a simple square shape. We duplicate the top left and bottom right * // vertices because each vertex needs to appear once per triangle. * const vertices = new Float32Array( [ * -1.0, -1.0, 1.0, // v0 * 1.0, -1.0, 1.0, // v1 * 1.0, 1.0, 1.0, // v2 * * 1.0, 1.0, 1.0, // v3 * -1.0, 1.0, 1.0, // v4 * -1.0, -1.0, 1.0 // v5 * ] ); * // itemSize = 3 because there are 3 values (components) per vertex * geometry.setAttribute( 'position', new THREE.BufferAttribute( vertices, 3 ) ); * const material = new THREE.MeshBasicMaterial( { color: 0xff0000 } ); * const mesh = new THREE.Mesh( geometry, material ); * ``` * * @augments EventDispatcher */ class BufferGeometry extends EventDispatcher { /** * Constructs a new geometry. */ constructor() { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isBufferGeometry = true; /** * The ID of the geometry. * * @name BufferGeometry#id * @type {number} * @readonly */ Object.defineProperty( this, 'id', { value: _id$2 ++ } ); /** * The UUID of the geometry. * * @type {string} * @readonly */ this.uuid = generateUUID(); /** * The name of the geometry. * * @type {string} */ this.name = ''; this.type = 'BufferGeometry'; /** * Allows for vertices to be re-used across multiple triangles; this is * called using "indexed triangles". Each triangle is associated with the * indices of three vertices. This attribute therefore stores the index of * each vertex for each triangular face. If this attribute is not set, the * renderer assumes that each three contiguous positions represent a single triangle. * * @type {?BufferAttribute} * @default null */ this.index = null; /** * A (storage) buffer attribute which was generated with a compute shader and * now defines indirect draw calls. * * Can only be used with {@link WebGPURenderer} and a WebGPU backend. * * @type {?BufferAttribute} * @default null */ this.indirect = null; /** * This dictionary has as id the name of the attribute to be set and as value * the buffer attribute to set it to. Rather than accessing this property directly, * use `setAttribute()` and `getAttribute()` to access attributes of this geometry. * * @type {Object} */ this.attributes = {}; /** * This dictionary holds the morph targets of the geometry. * * Note: Once the geometry has been rendered, the morph attribute data cannot * be changed. You will have to call `dispose()?, and create a new geometry instance. * * @type {Object} */ this.morphAttributes = {}; /** * Used to control the morph target behavior; when set to `true`, the morph * target data is treated as relative offsets, rather than as absolute * positions/normals. * * @type {boolean} * @default false */ this.morphTargetsRelative = false; /** * Split the geometry into groups, each of which will be rendered in a * separate draw call. This allows an array of materials to be used with the geometry. * * Use `addGroup()` and `clearGroups()` to edit groups, rather than modifying this array directly. * * Every vertex and index must belong to exactly one group — groups must not share vertices or * indices, and must not leave vertices or indices unused. * * @type {Array} */ this.groups = []; /** * Bounding box for the geometry which can be calculated with `computeBoundingBox()`. * * @type {Box3} * @default null */ this.boundingBox = null; /** * Bounding sphere for the geometry which can be calculated with `computeBoundingSphere()`. * * @type {Sphere} * @default null */ this.boundingSphere = null; /** * Determines the part of the geometry to render. This should not be set directly, * instead use `setDrawRange()`. * * @type {{start:number,count:number}} */ this.drawRange = { start: 0, count: Infinity }; /** * An object that can be used to store custom data about the geometry. * It should not hold references to functions as these will not be cloned. * * @type {Object} */ this.userData = {}; } /** * Returns the index of this geometry. * * @return {?BufferAttribute} The index. Returns `null` if no index is defined. */ getIndex() { return this.index; } /** * Sets the given index to this geometry. * * @param {Array|BufferAttribute} index - The index to set. * @return {BufferGeometry} A reference to this instance. */ setIndex( index ) { if ( Array.isArray( index ) ) { this.index = new ( arrayNeedsUint32( index ) ? Uint32BufferAttribute : Uint16BufferAttribute )( index, 1 ); } else { this.index = index; } return this; } /** * Sets the given indirect attribute to this geometry. * * @param {BufferAttribute} indirect - The attribute holding indirect draw calls. * @return {BufferGeometry} A reference to this instance. */ setIndirect( indirect ) { this.indirect = indirect; return this; } /** * Returns the indirect attribute of this geometry. * * @return {?BufferAttribute} The indirect attribute. Returns `null` if no indirect attribute is defined. */ getIndirect() { return this.indirect; } /** * Returns the buffer attribute for the given name. * * @param {string} name - The attribute name. * @return {BufferAttribute|InterleavedBufferAttribute|undefined} The buffer attribute. * Returns `undefined` if not attribute has been found. */ getAttribute( name ) { return this.attributes[ name ]; } /** * Sets the given attribute for the given name. * * @param {string} name - The attribute name. * @param {BufferAttribute|InterleavedBufferAttribute} attribute - The attribute to set. * @return {BufferGeometry} A reference to this instance. */ setAttribute( name, attribute ) { this.attributes[ name ] = attribute; return this; } /** * Deletes the attribute for the given name. * * @param {string} name - The attribute name to delete. * @return {BufferGeometry} A reference to this instance. */ deleteAttribute( name ) { delete this.attributes[ name ]; return this; } /** * Returns `true` if this geometry has an attribute for the given name. * * @param {string} name - The attribute name. * @return {boolean} Whether this geometry has an attribute for the given name or not. */ hasAttribute( name ) { return this.attributes[ name ] !== undefined; } /** * Adds a group to this geometry. * * @param {number} start - The first element in this draw call. That is the first * vertex for non-indexed geometry, otherwise the first triangle index. * @param {number} count - Specifies how many vertices (or indices) are part of this group. * @param {number} [materialIndex=0] - The material array index to use. */ addGroup( start, count, materialIndex = 0 ) { this.groups.push( { start: start, count: count, materialIndex: materialIndex } ); } /** * Clears all groups. */ clearGroups() { this.groups = []; } /** * Sets the draw range for this geometry. * * @param {number} start - The first vertex for non-indexed geometry, otherwise the first triangle index. * @param {number} count - For non-indexed BufferGeometry, `count` is the number of vertices to render. * For indexed BufferGeometry, `count` is the number of indices to render. */ setDrawRange( start, count ) { this.drawRange.start = start; this.drawRange.count = count; } /** * Applies the given 4x4 transformation matrix to the geometry. * * @param {Matrix4} matrix - The matrix to apply. * @return {BufferGeometry} A reference to this instance. */ applyMatrix4( matrix ) { const position = this.attributes.position; if ( position !== undefined ) { position.applyMatrix4( matrix ); position.needsUpdate = true; } const normal = this.attributes.normal; if ( normal !== undefined ) { const normalMatrix = new Matrix3().getNormalMatrix( matrix ); normal.applyNormalMatrix( normalMatrix ); normal.needsUpdate = true; } const tangent = this.attributes.tangent; if ( tangent !== undefined ) { tangent.transformDirection( matrix ); tangent.needsUpdate = true; } if ( this.boundingBox !== null ) { this.computeBoundingBox(); } if ( this.boundingSphere !== null ) { this.computeBoundingSphere(); } return this; } /** * Applies the rotation represented by the Quaternion to the geometry. * * @param {Quaternion} q - The Quaternion to apply. * @return {BufferGeometry} A reference to this instance. */ applyQuaternion( q ) { _m1$2.makeRotationFromQuaternion( q ); this.applyMatrix4( _m1$2 ); return this; } /** * Rotates the geometry about the X axis. This is typically done as a one time * operation, and not during a loop. Use {@link Object3D#rotation} for typical * real-time mesh rotation. * * @param {number} angle - The angle in radians. * @return {BufferGeometry} A reference to this instance. */ rotateX( angle ) { // rotate geometry around world x-axis _m1$2.makeRotationX( angle ); this.applyMatrix4( _m1$2 ); return this; } /** * Rotates the geometry about the Y axis. This is typically done as a one time * operation, and not during a loop. Use {@link Object3D#rotation} for typical * real-time mesh rotation. * * @param {number} angle - The angle in radians. * @return {BufferGeometry} A reference to this instance. */ rotateY( angle ) { // rotate geometry around world y-axis _m1$2.makeRotationY( angle ); this.applyMatrix4( _m1$2 ); return this; } /** * Rotates the geometry about the Z axis. This is typically done as a one time * operation, and not during a loop. Use {@link Object3D#rotation} for typical * real-time mesh rotation. * * @param {number} angle - The angle in radians. * @return {BufferGeometry} A reference to this instance. */ rotateZ( angle ) { // rotate geometry around world z-axis _m1$2.makeRotationZ( angle ); this.applyMatrix4( _m1$2 ); return this; } /** * Translates the geometry. This is typically done as a one time * operation, and not during a loop. Use {@link Object3D#position} for typical * real-time mesh rotation. * * @param {number} x - The x offset. * @param {number} y - The y offset. * @param {number} z - The z offset. * @return {BufferGeometry} A reference to this instance. */ translate( x, y, z ) { // translate geometry _m1$2.makeTranslation( x, y, z ); this.applyMatrix4( _m1$2 ); return this; } /** * Scales the geometry. This is typically done as a one time * operation, and not during a loop. Use {@link Object3D#scale} for typical * real-time mesh rotation. * * @param {number} x - The x scale. * @param {number} y - The y scale. * @param {number} z - The z scale. * @return {BufferGeometry} A reference to this instance. */ scale( x, y, z ) { // scale geometry _m1$2.makeScale( x, y, z ); this.applyMatrix4( _m1$2 ); return this; } /** * Rotates the geometry to face a point in 3D space. This is typically done as a one time * operation, and not during a loop. Use {@link Object3D#lookAt} for typical * real-time mesh rotation. * * @param {Vector3} vector - The target point. * @return {BufferGeometry} A reference to this instance. */ lookAt( vector ) { _obj.lookAt( vector ); _obj.updateMatrix(); this.applyMatrix4( _obj.matrix ); return this; } /** * Center the geometry based on its bounding box. * * @return {BufferGeometry} A reference to this instance. */ center() { this.computeBoundingBox(); this.boundingBox.getCenter( _offset ).negate(); this.translate( _offset.x, _offset.y, _offset.z ); return this; } /** * Defines a geometry by creating a `position` attribute based on the given array of points. The array * can hold 2D or 3D vectors. When using two-dimensional data, the `z` coordinate for all vertices is * set to `0`. * * If the method is used with an existing `position` attribute, the vertex data are overwritten with the * data from the array. The length of the array must match the vertex count. * * @param {Array|Array} points - The points. * @return {BufferGeometry} A reference to this instance. */ setFromPoints( points ) { const positionAttribute = this.getAttribute( 'position' ); if ( positionAttribute === undefined ) { const position = []; for ( let i = 0, l = points.length; i < l; i ++ ) { const point = points[ i ]; position.push( point.x, point.y, point.z || 0 ); } this.setAttribute( 'position', new Float32BufferAttribute( position, 3 ) ); } else { const l = Math.min( points.length, positionAttribute.count ); // make sure data do not exceed buffer size for ( let i = 0; i < l; i ++ ) { const point = points[ i ]; positionAttribute.setXYZ( i, point.x, point.y, point.z || 0 ); } if ( points.length > positionAttribute.count ) { console.warn( 'THREE.BufferGeometry: Buffer size too small for points data. Use .dispose() and create a new geometry.' ); } positionAttribute.needsUpdate = true; } return this; } /** * Computes the bounding box of the geometry, and updates the `boundingBox` member. * The bounding box is not computed by the engine; it must be computed by your app. * You may need to recompute the bounding box if the geometry vertices are modified. */ computeBoundingBox() { if ( this.boundingBox === null ) { this.boundingBox = new Box3(); } const position = this.attributes.position; const morphAttributesPosition = this.morphAttributes.position; if ( position && position.isGLBufferAttribute ) { console.error( 'THREE.BufferGeometry.computeBoundingBox(): GLBufferAttribute requires a manual bounding box.', this ); this.boundingBox.set( new Vector3( - Infinity, - Infinity, - Infinity ), new Vector3( + Infinity, + Infinity, + Infinity ) ); return; } if ( position !== undefined ) { this.boundingBox.setFromBufferAttribute( position ); // process morph attributes if present if ( morphAttributesPosition ) { for ( let i = 0, il = morphAttributesPosition.length; i < il; i ++ ) { const morphAttribute = morphAttributesPosition[ i ]; _box$2.setFromBufferAttribute( morphAttribute ); if ( this.morphTargetsRelative ) { _vector$8.addVectors( this.boundingBox.min, _box$2.min ); this.boundingBox.expandByPoint( _vector$8 ); _vector$8.addVectors( this.boundingBox.max, _box$2.max ); this.boundingBox.expandByPoint( _vector$8 ); } else { this.boundingBox.expandByPoint( _box$2.min ); this.boundingBox.expandByPoint( _box$2.max ); } } } } else { this.boundingBox.makeEmpty(); } if ( isNaN( this.boundingBox.min.x ) || isNaN( this.boundingBox.min.y ) || isNaN( this.boundingBox.min.z ) ) { console.error( 'THREE.BufferGeometry.computeBoundingBox(): Computed min/max have NaN values. The "position" attribute is likely to have NaN values.', this ); } } /** * Computes the bounding sphere of the geometry, and updates the `boundingSphere` member. * The engine automatically computes the bounding sphere when it is needed, e.g., for ray casting or view frustum culling. * You may need to recompute the bounding sphere if the geometry vertices are modified. */ computeBoundingSphere() { if ( this.boundingSphere === null ) { this.boundingSphere = new Sphere(); } const position = this.attributes.position; const morphAttributesPosition = this.morphAttributes.position; if ( position && position.isGLBufferAttribute ) { console.error( 'THREE.BufferGeometry.computeBoundingSphere(): GLBufferAttribute requires a manual bounding sphere.', this ); this.boundingSphere.set( new Vector3(), Infinity ); return; } if ( position ) { // first, find the center of the bounding sphere const center = this.boundingSphere.center; _box$2.setFromBufferAttribute( position ); // process morph attributes if present if ( morphAttributesPosition ) { for ( let i = 0, il = morphAttributesPosition.length; i < il; i ++ ) { const morphAttribute = morphAttributesPosition[ i ]; _boxMorphTargets.setFromBufferAttribute( morphAttribute ); if ( this.morphTargetsRelative ) { _vector$8.addVectors( _box$2.min, _boxMorphTargets.min ); _box$2.expandByPoint( _vector$8 ); _vector$8.addVectors( _box$2.max, _boxMorphTargets.max ); _box$2.expandByPoint( _vector$8 ); } else { _box$2.expandByPoint( _boxMorphTargets.min ); _box$2.expandByPoint( _boxMorphTargets.max ); } } } _box$2.getCenter( center ); // second, try to find a boundingSphere with a radius smaller than the // boundingSphere of the boundingBox: sqrt(3) smaller in the best case let maxRadiusSq = 0; for ( let i = 0, il = position.count; i < il; i ++ ) { _vector$8.fromBufferAttribute( position, i ); maxRadiusSq = Math.max( maxRadiusSq, center.distanceToSquared( _vector$8 ) ); } // process morph attributes if present if ( morphAttributesPosition ) { for ( let i = 0, il = morphAttributesPosition.length; i < il; i ++ ) { const morphAttribute = morphAttributesPosition[ i ]; const morphTargetsRelative = this.morphTargetsRelative; for ( let j = 0, jl = morphAttribute.count; j < jl; j ++ ) { _vector$8.fromBufferAttribute( morphAttribute, j ); if ( morphTargetsRelative ) { _offset.fromBufferAttribute( position, j ); _vector$8.add( _offset ); } maxRadiusSq = Math.max( maxRadiusSq, center.distanceToSquared( _vector$8 ) ); } } } this.boundingSphere.radius = Math.sqrt( maxRadiusSq ); if ( isNaN( this.boundingSphere.radius ) ) { console.error( 'THREE.BufferGeometry.computeBoundingSphere(): Computed radius is NaN. The "position" attribute is likely to have NaN values.', this ); } } } /** * Calculates and adds a tangent attribute to this geometry. * * The computation is only supported for indexed geometries and if position, normal, and uv attributes * are defined. When using a tangent space normal map, prefer the MikkTSpace algorithm provided by * {@link BufferGeometryUtils#computeMikkTSpaceTangents} instead. */ computeTangents() { const index = this.index; const attributes = this.attributes; // based on http://www.terathon.com/code/tangent.html // (per vertex tangents) if ( index === null || attributes.position === undefined || attributes.normal === undefined || attributes.uv === undefined ) { console.error( 'THREE.BufferGeometry: .computeTangents() failed. Missing required attributes (index, position, normal or uv)' ); return; } const positionAttribute = attributes.position; const normalAttribute = attributes.normal; const uvAttribute = attributes.uv; if ( this.hasAttribute( 'tangent' ) === false ) { this.setAttribute( 'tangent', new BufferAttribute( new Float32Array( 4 * positionAttribute.count ), 4 ) ); } const tangentAttribute = this.getAttribute( 'tangent' ); const tan1 = [], tan2 = []; for ( let i = 0; i < positionAttribute.count; i ++ ) { tan1[ i ] = new Vector3(); tan2[ i ] = new Vector3(); } const vA = new Vector3(), vB = new Vector3(), vC = new Vector3(), uvA = new Vector2(), uvB = new Vector2(), uvC = new Vector2(), sdir = new Vector3(), tdir = new Vector3(); function handleTriangle( a, b, c ) { vA.fromBufferAttribute( positionAttribute, a ); vB.fromBufferAttribute( positionAttribute, b ); vC.fromBufferAttribute( positionAttribute, c ); uvA.fromBufferAttribute( uvAttribute, a ); uvB.fromBufferAttribute( uvAttribute, b ); uvC.fromBufferAttribute( uvAttribute, c ); vB.sub( vA ); vC.sub( vA ); uvB.sub( uvA ); uvC.sub( uvA ); const r = 1.0 / ( uvB.x * uvC.y - uvC.x * uvB.y ); // silently ignore degenerate uv triangles having coincident or colinear vertices if ( ! isFinite( r ) ) return; sdir.copy( vB ).multiplyScalar( uvC.y ).addScaledVector( vC, - uvB.y ).multiplyScalar( r ); tdir.copy( vC ).multiplyScalar( uvB.x ).addScaledVector( vB, - uvC.x ).multiplyScalar( r ); tan1[ a ].add( sdir ); tan1[ b ].add( sdir ); tan1[ c ].add( sdir ); tan2[ a ].add( tdir ); tan2[ b ].add( tdir ); tan2[ c ].add( tdir ); } let groups = this.groups; if ( groups.length === 0 ) { groups = [ { start: 0, count: index.count } ]; } for ( let i = 0, il = groups.length; i < il; ++ i ) { const group = groups[ i ]; const start = group.start; const count = group.count; for ( let j = start, jl = start + count; j < jl; j += 3 ) { handleTriangle( index.getX( j + 0 ), index.getX( j + 1 ), index.getX( j + 2 ) ); } } const tmp = new Vector3(), tmp2 = new Vector3(); const n = new Vector3(), n2 = new Vector3(); function handleVertex( v ) { n.fromBufferAttribute( normalAttribute, v ); n2.copy( n ); const t = tan1[ v ]; // Gram-Schmidt orthogonalize tmp.copy( t ); tmp.sub( n.multiplyScalar( n.dot( t ) ) ).normalize(); // Calculate handedness tmp2.crossVectors( n2, t ); const test = tmp2.dot( tan2[ v ] ); const w = ( test < 0.0 ) ? -1 : 1.0; tangentAttribute.setXYZW( v, tmp.x, tmp.y, tmp.z, w ); } for ( let i = 0, il = groups.length; i < il; ++ i ) { const group = groups[ i ]; const start = group.start; const count = group.count; for ( let j = start, jl = start + count; j < jl; j += 3 ) { handleVertex( index.getX( j + 0 ) ); handleVertex( index.getX( j + 1 ) ); handleVertex( index.getX( j + 2 ) ); } } } /** * Computes vertex normals for the given vertex data. For indexed geometries, the method sets * each vertex normal to be the average of the face normals of the faces that share that vertex. * For non-indexed geometries, vertices are not shared, and the method sets each vertex normal * to be the same as the face normal. */ computeVertexNormals() { const index = this.index; const positionAttribute = this.getAttribute( 'position' ); if ( positionAttribute !== undefined ) { let normalAttribute = this.getAttribute( 'normal' ); if ( normalAttribute === undefined ) { normalAttribute = new BufferAttribute( new Float32Array( positionAttribute.count * 3 ), 3 ); this.setAttribute( 'normal', normalAttribute ); } else { // reset existing normals to zero for ( let i = 0, il = normalAttribute.count; i < il; i ++ ) { normalAttribute.setXYZ( i, 0, 0, 0 ); } } const pA = new Vector3(), pB = new Vector3(), pC = new Vector3(); const nA = new Vector3(), nB = new Vector3(), nC = new Vector3(); const cb = new Vector3(), ab = new Vector3(); // indexed elements if ( index ) { for ( let i = 0, il = index.count; i < il; i += 3 ) { const vA = index.getX( i + 0 ); const vB = index.getX( i + 1 ); const vC = index.getX( i + 2 ); pA.fromBufferAttribute( positionAttribute, vA ); pB.fromBufferAttribute( positionAttribute, vB ); pC.fromBufferAttribute( positionAttribute, vC ); cb.subVectors( pC, pB ); ab.subVectors( pA, pB ); cb.cross( ab ); nA.fromBufferAttribute( normalAttribute, vA ); nB.fromBufferAttribute( normalAttribute, vB ); nC.fromBufferAttribute( normalAttribute, vC ); nA.add( cb ); nB.add( cb ); nC.add( cb ); normalAttribute.setXYZ( vA, nA.x, nA.y, nA.z ); normalAttribute.setXYZ( vB, nB.x, nB.y, nB.z ); normalAttribute.setXYZ( vC, nC.x, nC.y, nC.z ); } } else { // non-indexed elements (unconnected triangle soup) for ( let i = 0, il = positionAttribute.count; i < il; i += 3 ) { pA.fromBufferAttribute( positionAttribute, i + 0 ); pB.fromBufferAttribute( positionAttribute, i + 1 ); pC.fromBufferAttribute( positionAttribute, i + 2 ); cb.subVectors( pC, pB ); ab.subVectors( pA, pB ); cb.cross( ab ); normalAttribute.setXYZ( i + 0, cb.x, cb.y, cb.z ); normalAttribute.setXYZ( i + 1, cb.x, cb.y, cb.z ); normalAttribute.setXYZ( i + 2, cb.x, cb.y, cb.z ); } } this.normalizeNormals(); normalAttribute.needsUpdate = true; } } /** * Ensures every normal vector in a geometry will have a magnitude of `1`. This will * correct lighting on the geometry surfaces. */ normalizeNormals() { const normals = this.attributes.normal; for ( let i = 0, il = normals.count; i < il; i ++ ) { _vector$8.fromBufferAttribute( normals, i ); _vector$8.normalize(); normals.setXYZ( i, _vector$8.x, _vector$8.y, _vector$8.z ); } } /** * Return a new non-index version of this indexed geometry. If the geometry * is already non-indexed, the method is a NOOP. * * @return {BufferGeometry} The non-indexed version of this indexed geometry. */ toNonIndexed() { function convertBufferAttribute( attribute, indices ) { const array = attribute.array; const itemSize = attribute.itemSize; const normalized = attribute.normalized; const array2 = new array.constructor( indices.length * itemSize ); let index = 0, index2 = 0; for ( let i = 0, l = indices.length; i < l; i ++ ) { if ( attribute.isInterleavedBufferAttribute ) { index = indices[ i ] * attribute.data.stride + attribute.offset; } else { index = indices[ i ] * itemSize; } for ( let j = 0; j < itemSize; j ++ ) { array2[ index2 ++ ] = array[ index ++ ]; } } return new BufferAttribute( array2, itemSize, normalized ); } // if ( this.index === null ) { console.warn( 'THREE.BufferGeometry.toNonIndexed(): BufferGeometry is already non-indexed.' ); return this; } const geometry2 = new BufferGeometry(); const indices = this.index.array; const attributes = this.attributes; // attributes for ( const name in attributes ) { const attribute = attributes[ name ]; const newAttribute = convertBufferAttribute( attribute, indices ); geometry2.setAttribute( name, newAttribute ); } // morph attributes const morphAttributes = this.morphAttributes; for ( const name in morphAttributes ) { const morphArray = []; const morphAttribute = morphAttributes[ name ]; // morphAttribute: array of Float32BufferAttributes for ( let i = 0, il = morphAttribute.length; i < il; i ++ ) { const attribute = morphAttribute[ i ]; const newAttribute = convertBufferAttribute( attribute, indices ); morphArray.push( newAttribute ); } geometry2.morphAttributes[ name ] = morphArray; } geometry2.morphTargetsRelative = this.morphTargetsRelative; // groups const groups = this.groups; for ( let i = 0, l = groups.length; i < l; i ++ ) { const group = groups[ i ]; geometry2.addGroup( group.start, group.count, group.materialIndex ); } return geometry2; } /** * Serializes the geometry into JSON. * * @return {Object} A JSON object representing the serialized geometry. */ toJSON() { const data = { metadata: { version: 4.6, type: 'BufferGeometry', generator: 'BufferGeometry.toJSON' } }; // standard BufferGeometry serialization data.uuid = this.uuid; data.type = this.type; if ( this.name !== '' ) data.name = this.name; if ( Object.keys( this.userData ).length > 0 ) data.userData = this.userData; if ( this.parameters !== undefined ) { const parameters = this.parameters; for ( const key in parameters ) { if ( parameters[ key ] !== undefined ) data[ key ] = parameters[ key ]; } return data; } // for simplicity the code assumes attributes are not shared across geometries, see #15811 data.data = { attributes: {} }; const index = this.index; if ( index !== null ) { data.data.index = { type: index.array.constructor.name, array: Array.prototype.slice.call( index.array ) }; } const attributes = this.attributes; for ( const key in attributes ) { const attribute = attributes[ key ]; data.data.attributes[ key ] = attribute.toJSON( data.data ); } const morphAttributes = {}; let hasMorphAttributes = false; for ( const key in this.morphAttributes ) { const attributeArray = this.morphAttributes[ key ]; const array = []; for ( let i = 0, il = attributeArray.length; i < il; i ++ ) { const attribute = attributeArray[ i ]; array.push( attribute.toJSON( data.data ) ); } if ( array.length > 0 ) { morphAttributes[ key ] = array; hasMorphAttributes = true; } } if ( hasMorphAttributes ) { data.data.morphAttributes = morphAttributes; data.data.morphTargetsRelative = this.morphTargetsRelative; } const groups = this.groups; if ( groups.length > 0 ) { data.data.groups = JSON.parse( JSON.stringify( groups ) ); } const boundingSphere = this.boundingSphere; if ( boundingSphere !== null ) { data.data.boundingSphere = { center: boundingSphere.center.toArray(), radius: boundingSphere.radius }; } return data; } /** * Returns a new geometry with copied values from this instance. * * @return {BufferGeometry} A clone of this instance. */ clone() { return new this.constructor().copy( this ); } /** * Copies the values of the given geometry to this instance. * * @param {BufferGeometry} source - The geometry to copy. * @return {BufferGeometry} A reference to this instance. */ copy( source ) { // reset this.index = null; this.attributes = {}; this.morphAttributes = {}; this.groups = []; this.boundingBox = null; this.boundingSphere = null; // used for storing cloned, shared data const data = {}; // name this.name = source.name; // index const index = source.index; if ( index !== null ) { this.setIndex( index.clone() ); } // attributes const attributes = source.attributes; for ( const name in attributes ) { const attribute = attributes[ name ]; this.setAttribute( name, attribute.clone( data ) ); } // morph attributes const morphAttributes = source.morphAttributes; for ( const name in morphAttributes ) { const array = []; const morphAttribute = morphAttributes[ name ]; // morphAttribute: array of Float32BufferAttributes for ( let i = 0, l = morphAttribute.length; i < l; i ++ ) { array.push( morphAttribute[ i ].clone( data ) ); } this.morphAttributes[ name ] = array; } this.morphTargetsRelative = source.morphTargetsRelative; // groups const groups = source.groups; for ( let i = 0, l = groups.length; i < l; i ++ ) { const group = groups[ i ]; this.addGroup( group.start, group.count, group.materialIndex ); } // bounding box const boundingBox = source.boundingBox; if ( boundingBox !== null ) { this.boundingBox = boundingBox.clone(); } // bounding sphere const boundingSphere = source.boundingSphere; if ( boundingSphere !== null ) { this.boundingSphere = boundingSphere.clone(); } // draw range this.drawRange.start = source.drawRange.start; this.drawRange.count = source.drawRange.count; // user data this.userData = source.userData; return this; } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. * * @fires BufferGeometry#dispose */ dispose() { this.dispatchEvent( { type: 'dispose' } ); } } const _inverseMatrix$3 = /*@__PURE__*/ new Matrix4(); const _ray$3 = /*@__PURE__*/ new Ray(); const _sphere$6 = /*@__PURE__*/ new Sphere(); const _sphereHitAt = /*@__PURE__*/ new Vector3(); const _vA$1 = /*@__PURE__*/ new Vector3(); const _vB$1 = /*@__PURE__*/ new Vector3(); const _vC$1 = /*@__PURE__*/ new Vector3(); const _tempA = /*@__PURE__*/ new Vector3(); const _morphA = /*@__PURE__*/ new Vector3(); const _intersectionPoint = /*@__PURE__*/ new Vector3(); const _intersectionPointWorld = /*@__PURE__*/ new Vector3(); /** * Class representing triangular polygon mesh based objects. * * ```js * const geometry = new THREE.BoxGeometry( 1, 1, 1 ); * const material = new THREE.MeshBasicMaterial( { color: 0xffff00 } ); * const mesh = new THREE.Mesh( geometry, material ); * scene.add( mesh ); * ``` * * @augments Object3D */ class Mesh extends Object3D { /** * Constructs a new mesh. * * @param {BufferGeometry} [geometry] - The mesh geometry. * @param {Material|Array} [material] - The mesh material. */ constructor( geometry = new BufferGeometry(), material = new MeshBasicMaterial() ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isMesh = true; this.type = 'Mesh'; /** * The mesh geometry. * * @type {BufferGeometry} */ this.geometry = geometry; /** * The mesh material. * * @type {Material|Array} * @default MeshBasicMaterial */ this.material = material; /** * A dictionary representing the morph targets in the geometry. The key is the * morph targets name, the value its attribute index. This member is `undefined` * by default and only set when morph targets are detected in the geometry. * * @type {Object|undefined} * @default undefined */ this.morphTargetDictionary = undefined; /** * An array of weights typically in the range `[0,1]` that specify how much of the morph * is applied. This member is `undefined` by default and only set when morph targets are * detected in the geometry. * * @type {Array|undefined} * @default undefined */ this.morphTargetInfluences = undefined; this.updateMorphTargets(); } copy( source, recursive ) { super.copy( source, recursive ); if ( source.morphTargetInfluences !== undefined ) { this.morphTargetInfluences = source.morphTargetInfluences.slice(); } if ( source.morphTargetDictionary !== undefined ) { this.morphTargetDictionary = Object.assign( {}, source.morphTargetDictionary ); } this.material = Array.isArray( source.material ) ? source.material.slice() : source.material; this.geometry = source.geometry; return this; } /** * Sets the values of {@link Mesh#morphTargetDictionary} and {@link Mesh#morphTargetInfluences} * to make sure existing morph targets can influence this 3D object. */ updateMorphTargets() { const geometry = this.geometry; const morphAttributes = geometry.morphAttributes; const keys = Object.keys( morphAttributes ); if ( keys.length > 0 ) { const morphAttribute = morphAttributes[ keys[ 0 ] ]; if ( morphAttribute !== undefined ) { this.morphTargetInfluences = []; this.morphTargetDictionary = {}; for ( let m = 0, ml = morphAttribute.length; m < ml; m ++ ) { const name = morphAttribute[ m ].name || String( m ); this.morphTargetInfluences.push( 0 ); this.morphTargetDictionary[ name ] = m; } } } } /** * Returns the local-space position of the vertex at the given index, taking into * account the current animation state of both morph targets and skinning. * * @param {number} index - The vertex index. * @param {Vector3} target - The target object that is used to store the method's result. * @return {Vector3} The vertex position in local space. */ getVertexPosition( index, target ) { const geometry = this.geometry; const position = geometry.attributes.position; const morphPosition = geometry.morphAttributes.position; const morphTargetsRelative = geometry.morphTargetsRelative; target.fromBufferAttribute( position, index ); const morphInfluences = this.morphTargetInfluences; if ( morphPosition && morphInfluences ) { _morphA.set( 0, 0, 0 ); for ( let i = 0, il = morphPosition.length; i < il; i ++ ) { const influence = morphInfluences[ i ]; const morphAttribute = morphPosition[ i ]; if ( influence === 0 ) continue; _tempA.fromBufferAttribute( morphAttribute, index ); if ( morphTargetsRelative ) { _morphA.addScaledVector( _tempA, influence ); } else { _morphA.addScaledVector( _tempA.sub( target ), influence ); } } target.add( _morphA ); } return target; } /** * Computes intersection points between a casted ray and this line. * * @param {Raycaster} raycaster - The raycaster. * @param {Array} intersects - The target array that holds the intersection points. */ raycast( raycaster, intersects ) { const geometry = this.geometry; const material = this.material; const matrixWorld = this.matrixWorld; if ( material === undefined ) return; // test with bounding sphere in world space if ( geometry.boundingSphere === null ) geometry.computeBoundingSphere(); _sphere$6.copy( geometry.boundingSphere ); _sphere$6.applyMatrix4( matrixWorld ); // check distance from ray origin to bounding sphere _ray$3.copy( raycaster.ray ).recast( raycaster.near ); if ( _sphere$6.containsPoint( _ray$3.origin ) === false ) { if ( _ray$3.intersectSphere( _sphere$6, _sphereHitAt ) === null ) return; if ( _ray$3.origin.distanceToSquared( _sphereHitAt ) > ( raycaster.far - raycaster.near ) ** 2 ) return; } // convert ray to local space of mesh _inverseMatrix$3.copy( matrixWorld ).invert(); _ray$3.copy( raycaster.ray ).applyMatrix4( _inverseMatrix$3 ); // test with bounding box in local space if ( geometry.boundingBox !== null ) { if ( _ray$3.intersectsBox( geometry.boundingBox ) === false ) return; } // test for intersections with geometry this._computeIntersections( raycaster, intersects, _ray$3 ); } _computeIntersections( raycaster, intersects, rayLocalSpace ) { let intersection; const geometry = this.geometry; const material = this.material; const index = geometry.index; const position = geometry.attributes.position; const uv = geometry.attributes.uv; const uv1 = geometry.attributes.uv1; const normal = geometry.attributes.normal; const groups = geometry.groups; const drawRange = geometry.drawRange; if ( index !== null ) { // indexed buffer geometry if ( Array.isArray( material ) ) { for ( let i = 0, il = groups.length; i < il; i ++ ) { const group = groups[ i ]; const groupMaterial = material[ group.materialIndex ]; const start = Math.max( group.start, drawRange.start ); const end = Math.min( index.count, Math.min( ( group.start + group.count ), ( drawRange.start + drawRange.count ) ) ); for ( let j = start, jl = end; j < jl; j += 3 ) { const a = index.getX( j ); const b = index.getX( j + 1 ); const c = index.getX( j + 2 ); intersection = checkGeometryIntersection( this, groupMaterial, raycaster, rayLocalSpace, uv, uv1, normal, a, b, c ); if ( intersection ) { intersection.faceIndex = Math.floor( j / 3 ); // triangle number in indexed buffer semantics intersection.face.materialIndex = group.materialIndex; intersects.push( intersection ); } } } } else { const start = Math.max( 0, drawRange.start ); const end = Math.min( index.count, ( drawRange.start + drawRange.count ) ); for ( let i = start, il = end; i < il; i += 3 ) { const a = index.getX( i ); const b = index.getX( i + 1 ); const c = index.getX( i + 2 ); intersection = checkGeometryIntersection( this, material, raycaster, rayLocalSpace, uv, uv1, normal, a, b, c ); if ( intersection ) { intersection.faceIndex = Math.floor( i / 3 ); // triangle number in indexed buffer semantics intersects.push( intersection ); } } } } else if ( position !== undefined ) { // non-indexed buffer geometry if ( Array.isArray( material ) ) { for ( let i = 0, il = groups.length; i < il; i ++ ) { const group = groups[ i ]; const groupMaterial = material[ group.materialIndex ]; const start = Math.max( group.start, drawRange.start ); const end = Math.min( position.count, Math.min( ( group.start + group.count ), ( drawRange.start + drawRange.count ) ) ); for ( let j = start, jl = end; j < jl; j += 3 ) { const a = j; const b = j + 1; const c = j + 2; intersection = checkGeometryIntersection( this, groupMaterial, raycaster, rayLocalSpace, uv, uv1, normal, a, b, c ); if ( intersection ) { intersection.faceIndex = Math.floor( j / 3 ); // triangle number in non-indexed buffer semantics intersection.face.materialIndex = group.materialIndex; intersects.push( intersection ); } } } } else { const start = Math.max( 0, drawRange.start ); const end = Math.min( position.count, ( drawRange.start + drawRange.count ) ); for ( let i = start, il = end; i < il; i += 3 ) { const a = i; const b = i + 1; const c = i + 2; intersection = checkGeometryIntersection( this, material, raycaster, rayLocalSpace, uv, uv1, normal, a, b, c ); if ( intersection ) { intersection.faceIndex = Math.floor( i / 3 ); // triangle number in non-indexed buffer semantics intersects.push( intersection ); } } } } } } function checkIntersection$1( object, material, raycaster, ray, pA, pB, pC, point ) { let intersect; if ( material.side === BackSide ) { intersect = ray.intersectTriangle( pC, pB, pA, true, point ); } else { intersect = ray.intersectTriangle( pA, pB, pC, ( material.side === FrontSide ), point ); } if ( intersect === null ) return null; _intersectionPointWorld.copy( point ); _intersectionPointWorld.applyMatrix4( object.matrixWorld ); const distance = raycaster.ray.origin.distanceTo( _intersectionPointWorld ); if ( distance < raycaster.near || distance > raycaster.far ) return null; return { distance: distance, point: _intersectionPointWorld.clone(), object: object }; } function checkGeometryIntersection( object, material, raycaster, ray, uv, uv1, normal, a, b, c ) { object.getVertexPosition( a, _vA$1 ); object.getVertexPosition( b, _vB$1 ); object.getVertexPosition( c, _vC$1 ); const intersection = checkIntersection$1( object, material, raycaster, ray, _vA$1, _vB$1, _vC$1, _intersectionPoint ); if ( intersection ) { const barycoord = new Vector3(); Triangle.getBarycoord( _intersectionPoint, _vA$1, _vB$1, _vC$1, barycoord ); if ( uv ) { intersection.uv = Triangle.getInterpolatedAttribute( uv, a, b, c, barycoord, new Vector2() ); } if ( uv1 ) { intersection.uv1 = Triangle.getInterpolatedAttribute( uv1, a, b, c, barycoord, new Vector2() ); } if ( normal ) { intersection.normal = Triangle.getInterpolatedAttribute( normal, a, b, c, barycoord, new Vector3() ); if ( intersection.normal.dot( ray.direction ) > 0 ) { intersection.normal.multiplyScalar( -1 ); } } const face = { a: a, b: b, c: c, normal: new Vector3(), materialIndex: 0 }; Triangle.getNormal( _vA$1, _vB$1, _vC$1, face.normal ); intersection.face = face; intersection.barycoord = barycoord; } return intersection; } /** * A geometry class for a rectangular cuboid with a given width, height, and depth. * On creation, the cuboid is centred on the origin, with each edge parallel to one * of the axes. * * ```js * const geometry = new THREE.BoxGeometry( 1, 1, 1 ); * const material = new THREE.MeshBasicMaterial( { color: 0x00ff00 } ); * const cube = new THREE.Mesh( geometry, material ); * scene.add( cube ); * ``` * * @augments BufferGeometry */ class BoxGeometry extends BufferGeometry { /** * Constructs a new box geometry. * * @param {number} [width=1] - The width. That is, the length of the edges parallel to the X axis. * @param {number} [height=1] - The height. That is, the length of the edges parallel to the Y axis. * @param {number} [depth=1] - The depth. That is, the length of the edges parallel to the Z axis. * @param {number} [widthSegments=1] - Number of segmented rectangular faces along the width of the sides. * @param {number} [heightSegments=1] - Number of segmented rectangular faces along the height of the sides. * @param {number} [depthSegments=1] - Number of segmented rectangular faces along the depth of the sides. */ constructor( width = 1, height = 1, depth = 1, widthSegments = 1, heightSegments = 1, depthSegments = 1 ) { super(); this.type = 'BoxGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { width: width, height: height, depth: depth, widthSegments: widthSegments, heightSegments: heightSegments, depthSegments: depthSegments }; const scope = this; // segments widthSegments = Math.floor( widthSegments ); heightSegments = Math.floor( heightSegments ); depthSegments = Math.floor( depthSegments ); // buffers const indices = []; const vertices = []; const normals = []; const uvs = []; // helper variables let numberOfVertices = 0; let groupStart = 0; // build each side of the box geometry buildPlane( 'z', 'y', 'x', -1, -1, depth, height, width, depthSegments, heightSegments, 0 ); // px buildPlane( 'z', 'y', 'x', 1, -1, depth, height, - width, depthSegments, heightSegments, 1 ); // nx buildPlane( 'x', 'z', 'y', 1, 1, width, depth, height, widthSegments, depthSegments, 2 ); // py buildPlane( 'x', 'z', 'y', 1, -1, width, depth, - height, widthSegments, depthSegments, 3 ); // ny buildPlane( 'x', 'y', 'z', 1, -1, width, height, depth, widthSegments, heightSegments, 4 ); // pz buildPlane( 'x', 'y', 'z', -1, -1, width, height, - depth, widthSegments, heightSegments, 5 ); // nz // build geometry this.setIndex( indices ); this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) ); this.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) ); this.setAttribute( 'uv', new Float32BufferAttribute( uvs, 2 ) ); function buildPlane( u, v, w, udir, vdir, width, height, depth, gridX, gridY, materialIndex ) { const segmentWidth = width / gridX; const segmentHeight = height / gridY; const widthHalf = width / 2; const heightHalf = height / 2; const depthHalf = depth / 2; const gridX1 = gridX + 1; const gridY1 = gridY + 1; let vertexCounter = 0; let groupCount = 0; const vector = new Vector3(); // generate vertices, normals and uvs for ( let iy = 0; iy < gridY1; iy ++ ) { const y = iy * segmentHeight - heightHalf; for ( let ix = 0; ix < gridX1; ix ++ ) { const x = ix * segmentWidth - widthHalf; // set values to correct vector component vector[ u ] = x * udir; vector[ v ] = y * vdir; vector[ w ] = depthHalf; // now apply vector to vertex buffer vertices.push( vector.x, vector.y, vector.z ); // set values to correct vector component vector[ u ] = 0; vector[ v ] = 0; vector[ w ] = depth > 0 ? 1 : -1; // now apply vector to normal buffer normals.push( vector.x, vector.y, vector.z ); // uvs uvs.push( ix / gridX ); uvs.push( 1 - ( iy / gridY ) ); // counters vertexCounter += 1; } } // indices // 1. you need three indices to draw a single face // 2. a single segment consists of two faces // 3. so we need to generate six (2*3) indices per segment for ( let iy = 0; iy < gridY; iy ++ ) { for ( let ix = 0; ix < gridX; ix ++ ) { const a = numberOfVertices + ix + gridX1 * iy; const b = numberOfVertices + ix + gridX1 * ( iy + 1 ); const c = numberOfVertices + ( ix + 1 ) + gridX1 * ( iy + 1 ); const d = numberOfVertices + ( ix + 1 ) + gridX1 * iy; // faces indices.push( a, b, d ); indices.push( b, c, d ); // increase counter groupCount += 6; } } // add a group to the geometry. this will ensure multi material support scope.addGroup( groupStart, groupCount, materialIndex ); // calculate new start value for groups groupStart += groupCount; // update total number of vertices numberOfVertices += vertexCounter; } } copy( source ) { super.copy( source ); this.parameters = Object.assign( {}, source.parameters ); return this; } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @return {BoxGeometry} A new instance. */ static fromJSON( data ) { return new BoxGeometry( data.width, data.height, data.depth, data.widthSegments, data.heightSegments, data.depthSegments ); } } // Uniform Utilities function cloneUniforms( src ) { const dst = {}; for ( const u in src ) { dst[ u ] = {}; for ( const p in src[ u ] ) { const property = src[ u ][ p ]; if ( property && ( property.isColor || property.isMatrix3 || property.isMatrix4 || property.isVector2 || property.isVector3 || property.isVector4 || property.isTexture || property.isQuaternion ) ) { if ( property.isRenderTargetTexture ) { console.warn( 'UniformsUtils: Textures of render targets cannot be cloned via cloneUniforms() or mergeUniforms().' ); dst[ u ][ p ] = null; } else { dst[ u ][ p ] = property.clone(); } } else if ( Array.isArray( property ) ) { dst[ u ][ p ] = property.slice(); } else { dst[ u ][ p ] = property; } } } return dst; } function mergeUniforms( uniforms ) { const merged = {}; for ( let u = 0; u < uniforms.length; u ++ ) { const tmp = cloneUniforms( uniforms[ u ] ); for ( const p in tmp ) { merged[ p ] = tmp[ p ]; } } return merged; } function cloneUniformsGroups( src ) { const dst = []; for ( let u = 0; u < src.length; u ++ ) { dst.push( src[ u ].clone() ); } return dst; } function getUnlitUniformColorSpace( renderer ) { const currentRenderTarget = renderer.getRenderTarget(); if ( currentRenderTarget === null ) { // https://github.com/mrdoob/three.js/pull/23937#issuecomment-1111067398 return renderer.outputColorSpace; } // https://github.com/mrdoob/three.js/issues/27868 if ( currentRenderTarget.isXRRenderTarget === true ) { return currentRenderTarget.texture.colorSpace; } return ColorManagement.workingColorSpace; } // Legacy const UniformsUtils = { clone: cloneUniforms, merge: mergeUniforms }; var default_vertex = "void main() {\n\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n}"; var default_fragment = "void main() {\n\tgl_FragColor = vec4( 1.0, 0.0, 0.0, 1.0 );\n}"; /** * A material rendered with custom shaders. A shader is a small program written in GLSL. * that runs on the GPU. You may want to use a custom shader if you need to implement an * effect not included with any of the built-in materials. * * There are the following notes to bear in mind when using a `ShaderMaterial`: * * - `ShaderMaterial` can only be used with {@link WebGLRenderer}. * - Built in attributes and uniforms are passed to the shaders along with your code. If * you don't want that, use {@link RawShaderMaterial} instead. * - You can use the directive `#pragma unroll_loop_start` and `#pragma unroll_loop_end` * in order to unroll a `for` loop in GLSL by the shader preprocessor. The directive has * to be placed right above the loop. The loop formatting has to correspond to a defined standard. * - The loop has to be [normalized]{@link https://en.wikipedia.org/wiki/Normalized_loop}. * - The loop variable has to be *i*. * - The value `UNROLLED_LOOP_INDEX` will be replaced with the explicitly * value of *i* for the given iteration and can be used in preprocessor * statements. * * ```js * const material = new THREE.ShaderMaterial( { * uniforms: { * time: { value: 1.0 }, * resolution: { value: new THREE.Vector2() } * }, * vertexShader: document.getElementById( 'vertexShader' ).textContent, * fragmentShader: document.getElementById( 'fragmentShader' ).textContent * } ); * ``` * * @augments Material */ class ShaderMaterial extends Material { /** * Constructs a new shader material. * * @param {Object} [parameters] - An object with one or more properties * defining the material's appearance. Any property of the material * (including any property from inherited materials) can be passed * in here. Color values can be passed any type of value accepted * by {@link Color#set}. */ constructor( parameters ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isShaderMaterial = true; this.type = 'ShaderMaterial'; /** * Defines custom constants using `#define` directives within the GLSL code * for both the vertex shader and the fragment shader; each key/value pair * yields another directive. * ```js * defines: { * FOO: 15, * BAR: true * } * ``` * Yields the lines: * ``` * #define FOO 15 * #define BAR true * ``` * * @type {Object} */ this.defines = {}; /** * An object of the form: * ```js * { * "uniform1": { value: 1.0 }, * "uniform2": { value: 2 } * } * ``` * specifying the uniforms to be passed to the shader code; keys are uniform * names, values are definitions of the form * ``` * { * value: 1.0 * } * ``` * where `value` is the value of the uniform. Names must match the name of * the uniform, as defined in the GLSL code. Note that uniforms are refreshed * on every frame, so updating the value of the uniform will immediately * update the value available to the GLSL code. * * @type {Object} */ this.uniforms = {}; /** * An array holding uniforms groups for configuring UBOs. * * @type {Array} */ this.uniformsGroups = []; /** * Vertex shader GLSL code. This is the actual code for the shader. * * @type {string} */ this.vertexShader = default_vertex; /** * Fragment shader GLSL code. This is the actual code for the shader. * * @type {string} */ this.fragmentShader = default_fragment; /** * Controls line thickness or lines. * * WebGL and WebGPU ignore this setting and always render line primitives with a * width of one pixel. * * @type {number} * @default 1 */ this.linewidth = 1; /** * Renders the geometry as a wireframe. * * @type {boolean} * @default false */ this.wireframe = false; /** * Controls the thickness of the wireframe. * * WebGL and WebGPU ignore this property and always render * 1 pixel wide lines. * * @type {number} * @default 1 */ this.wireframeLinewidth = 1; /** * Define whether the material color is affected by global fog settings; `true` * to pass fog uniforms to the shader. * * @type {boolean} * @default false */ this.fog = false; /** * Defines whether this material uses lighting; `true` to pass uniform data * related to lighting to this shader. * * @type {boolean} * @default false */ this.lights = false; /** * Defines whether this material supports clipping; `true` to let the renderer * pass the clippingPlanes uniform. * * @type {boolean} * @default false */ this.clipping = false; /** * Overwritten and set to `true` by default. * * @type {boolean} * @default true */ this.forceSinglePass = true; /** * This object allows to enable certain WebGL 2 extensions. * * - clipCullDistance: set to `true` to use vertex shader clipping * - multiDraw: set to `true` to use vertex shader multi_draw / enable gl_DrawID * * @type {{clipCullDistance:false,multiDraw:false}} */ this.extensions = { clipCullDistance: false, // set to use vertex shader clipping multiDraw: false // set to use vertex shader multi_draw / enable gl_DrawID }; /** * When the rendered geometry doesn't include these attributes but the * material does, these default values will be passed to the shaders. This * avoids errors when buffer data is missing. * * - color: [ 1, 1, 1 ] * - uv: [ 0, 0 ] * - uv1: [ 0, 0 ] * * @type {Object} */ this.defaultAttributeValues = { 'color': [ 1, 1, 1 ], 'uv': [ 0, 0 ], 'uv1': [ 0, 0 ] }; /** * If set, this calls [gl.bindAttribLocation]{@link https://developer.mozilla.org/en-US/docs/Web/API/WebGLRenderingContext/bindAttribLocation} * to bind a generic vertex index to an attribute variable. * * @type {string|undefined} * @default undefined */ this.index0AttributeName = undefined; /** * Can be used to force a uniform update while changing uniforms in * {@link Object3D#onBeforeRender}. * * @type {boolean} * @default false */ this.uniformsNeedUpdate = false; /** * Defines the GLSL version of custom shader code. * * @type {?(GLSL1|GLSL3)} * @default null */ this.glslVersion = null; if ( parameters !== undefined ) { this.setValues( parameters ); } } copy( source ) { super.copy( source ); this.fragmentShader = source.fragmentShader; this.vertexShader = source.vertexShader; this.uniforms = cloneUniforms( source.uniforms ); this.uniformsGroups = cloneUniformsGroups( source.uniformsGroups ); this.defines = Object.assign( {}, source.defines ); this.wireframe = source.wireframe; this.wireframeLinewidth = source.wireframeLinewidth; this.fog = source.fog; this.lights = source.lights; this.clipping = source.clipping; this.extensions = Object.assign( {}, source.extensions ); this.glslVersion = source.glslVersion; return this; } toJSON( meta ) { const data = super.toJSON( meta ); data.glslVersion = this.glslVersion; data.uniforms = {}; for ( const name in this.uniforms ) { const uniform = this.uniforms[ name ]; const value = uniform.value; if ( value && value.isTexture ) { data.uniforms[ name ] = { type: 't', value: value.toJSON( meta ).uuid }; } else if ( value && value.isColor ) { data.uniforms[ name ] = { type: 'c', value: value.getHex() }; } else if ( value && value.isVector2 ) { data.uniforms[ name ] = { type: 'v2', value: value.toArray() }; } else if ( value && value.isVector3 ) { data.uniforms[ name ] = { type: 'v3', value: value.toArray() }; } else if ( value && value.isVector4 ) { data.uniforms[ name ] = { type: 'v4', value: value.toArray() }; } else if ( value && value.isMatrix3 ) { data.uniforms[ name ] = { type: 'm3', value: value.toArray() }; } else if ( value && value.isMatrix4 ) { data.uniforms[ name ] = { type: 'm4', value: value.toArray() }; } else { data.uniforms[ name ] = { value: value }; // note: the array variants v2v, v3v, v4v, m4v and tv are not supported so far } } if ( Object.keys( this.defines ).length > 0 ) data.defines = this.defines; data.vertexShader = this.vertexShader; data.fragmentShader = this.fragmentShader; data.lights = this.lights; data.clipping = this.clipping; const extensions = {}; for ( const key in this.extensions ) { if ( this.extensions[ key ] === true ) extensions[ key ] = true; } if ( Object.keys( extensions ).length > 0 ) data.extensions = extensions; return data; } } /** * Abstract base class for cameras. This class should always be inherited * when you build a new camera. * * @abstract * @augments Object3D */ class Camera extends Object3D { /** * Constructs a new camera. */ constructor() { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isCamera = true; this.type = 'Camera'; /** * The inverse of the camera's world matrix. * * @type {Matrix4} */ this.matrixWorldInverse = new Matrix4(); /** * The camera's projection matrix. * * @type {Matrix4} */ this.projectionMatrix = new Matrix4(); /** * The inverse of the camera's projection matrix. * * @type {Matrix4} */ this.projectionMatrixInverse = new Matrix4(); /** * The coordinate system in which the camera is used. * * @type {(WebGLCoordinateSystem|WebGPUCoordinateSystem)} */ this.coordinateSystem = WebGLCoordinateSystem; } copy( source, recursive ) { super.copy( source, recursive ); this.matrixWorldInverse.copy( source.matrixWorldInverse ); this.projectionMatrix.copy( source.projectionMatrix ); this.projectionMatrixInverse.copy( source.projectionMatrixInverse ); this.coordinateSystem = source.coordinateSystem; return this; } /** * Returns a vector representing the ("look") direction of the 3D object in world space. * * This method is overwritten since cameras have a different forward vector compared to other * 3D objects. A camera looks down its local, negative z-axis by default. * * @param {Vector3} target - The target vector the result is stored to. * @return {Vector3} The 3D object's direction in world space. */ getWorldDirection( target ) { return super.getWorldDirection( target ).negate(); } updateMatrixWorld( force ) { super.updateMatrixWorld( force ); this.matrixWorldInverse.copy( this.matrixWorld ).invert(); } updateWorldMatrix( updateParents, updateChildren ) { super.updateWorldMatrix( updateParents, updateChildren ); this.matrixWorldInverse.copy( this.matrixWorld ).invert(); } clone() { return new this.constructor().copy( this ); } } const _v3$1 = /*@__PURE__*/ new Vector3(); const _minTarget = /*@__PURE__*/ new Vector2(); const _maxTarget = /*@__PURE__*/ new Vector2(); /** * Camera that uses [perspective projection]{@link https://en.wikipedia.org/wiki/Perspective_(graphical)}. * * This projection mode is designed to mimic the way the human eye sees. It * is the most common projection mode used for rendering a 3D scene. * * ```js * const camera = new THREE.PerspectiveCamera( 45, width / height, 1, 1000 ); * scene.add( camera ); * ``` * * @augments Camera */ class PerspectiveCamera extends Camera { /** * Constructs a new perspective camera. * * @param {number} [fov=50] - The vertical field of view. * @param {number} [aspect=1] - The aspect ratio. * @param {number} [near=0.1] - The camera's near plane. * @param {number} [far=2000] - The camera's far plane. */ constructor( fov = 50, aspect = 1, near = 0.1, far = 2000 ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isPerspectiveCamera = true; this.type = 'PerspectiveCamera'; /** * The vertical field of view, from bottom to top of view, * in degrees. * * @type {number} * @default 50 */ this.fov = fov; /** * The zoom factor of the camera. * * @type {number} * @default 1 */ this.zoom = 1; /** * The camera's near plane. The valid range is greater than `0` * and less than the current value of {@link PerspectiveCamera#far}. * * Note that, unlike for the {@link OrthographicCamera}, `0` is not a * valid value for a perspective camera's near plane. * * @type {number} * @default 0.1 */ this.near = near; /** * The camera's far plane. Must be greater than the * current value of {@link PerspectiveCamera#near}. * * @type {number} * @default 2000 */ this.far = far; /** * Object distance used for stereoscopy and depth-of-field effects. This * parameter does not influence the projection matrix unless a * {@link StereoCamera} is being used. * * @type {number} * @default 10 */ this.focus = 10; /** * The aspect ratio, usually the canvas width / canvas height. * * @type {number} * @default 1 */ this.aspect = aspect; /** * Represents the frustum window specification. This property should not be edited * directly but via {@link PerspectiveCamera#setViewOffset} and {@link PerspectiveCamera#clearViewOffset}. * * @type {?Object} * @default null */ this.view = null; /** * Film size used for the larger axis. Default is `35` (millimeters). This * parameter does not influence the projection matrix unless {@link PerspectiveCamera#filmOffset} * is set to a nonzero value. * * @type {number} * @default 35 */ this.filmGauge = 35; /** * Horizontal off-center offset in the same unit as {@link PerspectiveCamera#filmGauge}. * * @type {number} * @default 0 */ this.filmOffset = 0; this.updateProjectionMatrix(); } copy( source, recursive ) { super.copy( source, recursive ); this.fov = source.fov; this.zoom = source.zoom; this.near = source.near; this.far = source.far; this.focus = source.focus; this.aspect = source.aspect; this.view = source.view === null ? null : Object.assign( {}, source.view ); this.filmGauge = source.filmGauge; this.filmOffset = source.filmOffset; return this; } /** * Sets the FOV by focal length in respect to the current {@link PerspectiveCamera#filmGauge}. * * The default film gauge is 35, so that the focal length can be specified for * a 35mm (full frame) camera. * * @param {number} focalLength - Values for focal length and film gauge must have the same unit. */ setFocalLength( focalLength ) { /** see {@link http://www.bobatkins.com/photography/technical/field_of_view.html} */ const vExtentSlope = 0.5 * this.getFilmHeight() / focalLength; this.fov = RAD2DEG * 2 * Math.atan( vExtentSlope ); this.updateProjectionMatrix(); } /** * Returns the focal length from the current {@link PerspectiveCamera#fov} and * {@link PerspectiveCamera#filmGauge}. * * @return {number} The computed focal length. */ getFocalLength() { const vExtentSlope = Math.tan( DEG2RAD * 0.5 * this.fov ); return 0.5 * this.getFilmHeight() / vExtentSlope; } /** * Returns the current vertical field of view angle in degrees considering {@link PerspectiveCamera#zoom}. * * @return {number} The effective FOV. */ getEffectiveFOV() { return RAD2DEG * 2 * Math.atan( Math.tan( DEG2RAD * 0.5 * this.fov ) / this.zoom ); } /** * Returns the width of the image on the film. If {@link PerspectiveCamera#aspect} is greater than or * equal to one (landscape format), the result equals {@link PerspectiveCamera#filmGauge}. * * @return {number} The film width. */ getFilmWidth() { // film not completely covered in portrait format (aspect < 1) return this.filmGauge * Math.min( this.aspect, 1 ); } /** * Returns the height of the image on the film. If {@link PerspectiveCamera#aspect} is greater than or * equal to one (landscape format), the result equals {@link PerspectiveCamera#filmGauge}. * * @return {number} The film width. */ getFilmHeight() { // film not completely covered in landscape format (aspect > 1) return this.filmGauge / Math.max( this.aspect, 1 ); } /** * Computes the 2D bounds of the camera's viewable rectangle at a given distance along the viewing direction. * Sets `minTarget` and `maxTarget` to the coordinates of the lower-left and upper-right corners of the view rectangle. * * @param {number} distance - The viewing distance. * @param {Vector2} minTarget - The lower-left corner of the view rectangle is written into this vector. * @param {Vector2} maxTarget - The upper-right corner of the view rectangle is written into this vector. */ getViewBounds( distance, minTarget, maxTarget ) { _v3$1.set( -1, -1, 0.5 ).applyMatrix4( this.projectionMatrixInverse ); minTarget.set( _v3$1.x, _v3$1.y ).multiplyScalar( - distance / _v3$1.z ); _v3$1.set( 1, 1, 0.5 ).applyMatrix4( this.projectionMatrixInverse ); maxTarget.set( _v3$1.x, _v3$1.y ).multiplyScalar( - distance / _v3$1.z ); } /** * Computes the width and height of the camera's viewable rectangle at a given distance along the viewing direction. * * @param {number} distance - The viewing distance. * @param {Vector2} target - The target vector that is used to store result where x is width and y is height. * @returns {Vector2} The view size. */ getViewSize( distance, target ) { this.getViewBounds( distance, _minTarget, _maxTarget ); return target.subVectors( _maxTarget, _minTarget ); } /** * Sets an offset in a larger frustum. This is useful for multi-window or * multi-monitor/multi-machine setups. * * For example, if you have 3x2 monitors and each monitor is 1920x1080 and * the monitors are in grid like this *``` * +---+---+---+ * | A | B | C | * +---+---+---+ * | D | E | F | * +---+---+---+ *``` * then for each monitor you would call it like this: *```js * const w = 1920; * const h = 1080; * const fullWidth = w * 3; * const fullHeight = h * 2; * * // --A-- * camera.setViewOffset( fullWidth, fullHeight, w * 0, h * 0, w, h ); * // --B-- * camera.setViewOffset( fullWidth, fullHeight, w * 1, h * 0, w, h ); * // --C-- * camera.setViewOffset( fullWidth, fullHeight, w * 2, h * 0, w, h ); * // --D-- * camera.setViewOffset( fullWidth, fullHeight, w * 0, h * 1, w, h ); * // --E-- * camera.setViewOffset( fullWidth, fullHeight, w * 1, h * 1, w, h ); * // --F-- * camera.setViewOffset( fullWidth, fullHeight, w * 2, h * 1, w, h ); * ``` * * Note there is no reason monitors have to be the same size or in a grid. * * @param {number} fullWidth - The full width of multiview setup. * @param {number} fullHeight - The full height of multiview setup. * @param {number} x - The horizontal offset of the subcamera. * @param {number} y - The vertical offset of the subcamera. * @param {number} width - The width of subcamera. * @param {number} height - The height of subcamera. */ setViewOffset( fullWidth, fullHeight, x, y, width, height ) { this.aspect = fullWidth / fullHeight; if ( this.view === null ) { this.view = { enabled: true, fullWidth: 1, fullHeight: 1, offsetX: 0, offsetY: 0, width: 1, height: 1 }; } this.view.enabled = true; this.view.fullWidth = fullWidth; this.view.fullHeight = fullHeight; this.view.offsetX = x; this.view.offsetY = y; this.view.width = width; this.view.height = height; this.updateProjectionMatrix(); } /** * Removes the view offset from the projection matrix. */ clearViewOffset() { if ( this.view !== null ) { this.view.enabled = false; } this.updateProjectionMatrix(); } /** * Updates the camera's projection matrix. Must be called after any change of * camera properties. */ updateProjectionMatrix() { const near = this.near; let top = near * Math.tan( DEG2RAD * 0.5 * this.fov ) / this.zoom; let height = 2 * top; let width = this.aspect * height; let left = -0.5 * width; const view = this.view; if ( this.view !== null && this.view.enabled ) { const fullWidth = view.fullWidth, fullHeight = view.fullHeight; left += view.offsetX * width / fullWidth; top -= view.offsetY * height / fullHeight; width *= view.width / fullWidth; height *= view.height / fullHeight; } const skew = this.filmOffset; if ( skew !== 0 ) left += near * skew / this.getFilmWidth(); this.projectionMatrix.makePerspective( left, left + width, top, top - height, near, this.far, this.coordinateSystem ); this.projectionMatrixInverse.copy( this.projectionMatrix ).invert(); } toJSON( meta ) { const data = super.toJSON( meta ); data.object.fov = this.fov; data.object.zoom = this.zoom; data.object.near = this.near; data.object.far = this.far; data.object.focus = this.focus; data.object.aspect = this.aspect; if ( this.view !== null ) data.object.view = Object.assign( {}, this.view ); data.object.filmGauge = this.filmGauge; data.object.filmOffset = this.filmOffset; return data; } } const fov = -90; // negative fov is not an error const aspect = 1; /** * A special type of camera that is positioned in 3D space to render its surroundings into a * cube render target. The render target can then be used as an environment map for rendering * realtime reflections in your scene. * * ```js * // Create cube render target * const cubeRenderTarget = new THREE.WebGLCubeRenderTarget( 256, { generateMipmaps: true, minFilter: THREE.LinearMipmapLinearFilter } ); * * // Create cube camera * const cubeCamera = new THREE.CubeCamera( 1, 100000, cubeRenderTarget ); * scene.add( cubeCamera ); * * // Create car * const chromeMaterial = new THREE.MeshLambertMaterial( { color: 0xffffff, envMap: cubeRenderTarget.texture } ); * const car = new THREE.Mesh( carGeometry, chromeMaterial ); * scene.add( car ); * * // Update the render target cube * car.visible = false; * cubeCamera.position.copy( car.position ); * cubeCamera.update( renderer, scene ); * * // Render the scene * car.visible = true; * renderer.render( scene, camera ); * ``` * * @augments Object3D */ class CubeCamera extends Object3D { /** * Constructs a new cube camera. * * @param {number} near - The camera's near plane. * @param {number} far - The camera's far plane. * @param {WebGLCubeRenderTarget} renderTarget - The cube render target. */ constructor( near, far, renderTarget ) { super(); this.type = 'CubeCamera'; /** * A reference to the cube render target. * * @type {WebGLCubeRenderTarget} */ this.renderTarget = renderTarget; /** * The current active coordinate system. * * @type {?(WebGLCoordinateSystem|WebGPUCoordinateSystem)} * @default null */ this.coordinateSystem = null; /** * The current active mipmap level * * @type {number} * @default 0 */ this.activeMipmapLevel = 0; const cameraPX = new PerspectiveCamera( fov, aspect, near, far ); cameraPX.layers = this.layers; this.add( cameraPX ); const cameraNX = new PerspectiveCamera( fov, aspect, near, far ); cameraNX.layers = this.layers; this.add( cameraNX ); const cameraPY = new PerspectiveCamera( fov, aspect, near, far ); cameraPY.layers = this.layers; this.add( cameraPY ); const cameraNY = new PerspectiveCamera( fov, aspect, near, far ); cameraNY.layers = this.layers; this.add( cameraNY ); const cameraPZ = new PerspectiveCamera( fov, aspect, near, far ); cameraPZ.layers = this.layers; this.add( cameraPZ ); const cameraNZ = new PerspectiveCamera( fov, aspect, near, far ); cameraNZ.layers = this.layers; this.add( cameraNZ ); } /** * Must be called when the coordinate system of the cube camera is changed. */ updateCoordinateSystem() { const coordinateSystem = this.coordinateSystem; const cameras = this.children.concat(); const [ cameraPX, cameraNX, cameraPY, cameraNY, cameraPZ, cameraNZ ] = cameras; for ( const camera of cameras ) this.remove( camera ); if ( coordinateSystem === WebGLCoordinateSystem ) { cameraPX.up.set( 0, 1, 0 ); cameraPX.lookAt( 1, 0, 0 ); cameraNX.up.set( 0, 1, 0 ); cameraNX.lookAt( -1, 0, 0 ); cameraPY.up.set( 0, 0, -1 ); cameraPY.lookAt( 0, 1, 0 ); cameraNY.up.set( 0, 0, 1 ); cameraNY.lookAt( 0, -1, 0 ); cameraPZ.up.set( 0, 1, 0 ); cameraPZ.lookAt( 0, 0, 1 ); cameraNZ.up.set( 0, 1, 0 ); cameraNZ.lookAt( 0, 0, -1 ); } else if ( coordinateSystem === WebGPUCoordinateSystem ) { cameraPX.up.set( 0, -1, 0 ); cameraPX.lookAt( -1, 0, 0 ); cameraNX.up.set( 0, -1, 0 ); cameraNX.lookAt( 1, 0, 0 ); cameraPY.up.set( 0, 0, 1 ); cameraPY.lookAt( 0, 1, 0 ); cameraNY.up.set( 0, 0, -1 ); cameraNY.lookAt( 0, -1, 0 ); cameraPZ.up.set( 0, -1, 0 ); cameraPZ.lookAt( 0, 0, 1 ); cameraNZ.up.set( 0, -1, 0 ); cameraNZ.lookAt( 0, 0, -1 ); } else { throw new Error( 'THREE.CubeCamera.updateCoordinateSystem(): Invalid coordinate system: ' + coordinateSystem ); } for ( const camera of cameras ) { this.add( camera ); camera.updateMatrixWorld(); } } /** * Calling this method will render the given scene with the given renderer * into the cube render target of the camera. * * @param {(Renderer|WebGLRenderer)} renderer - The renderer. * @param {Scene} scene - The scene to render. */ update( renderer, scene ) { if ( this.parent === null ) this.updateMatrixWorld(); const { renderTarget, activeMipmapLevel } = this; if ( this.coordinateSystem !== renderer.coordinateSystem ) { this.coordinateSystem = renderer.coordinateSystem; this.updateCoordinateSystem(); } const [ cameraPX, cameraNX, cameraPY, cameraNY, cameraPZ, cameraNZ ] = this.children; const currentRenderTarget = renderer.getRenderTarget(); const currentActiveCubeFace = renderer.getActiveCubeFace(); const currentActiveMipmapLevel = renderer.getActiveMipmapLevel(); const currentXrEnabled = renderer.xr.enabled; renderer.xr.enabled = false; const generateMipmaps = renderTarget.texture.generateMipmaps; renderTarget.texture.generateMipmaps = false; renderer.setRenderTarget( renderTarget, 0, activeMipmapLevel ); renderer.render( scene, cameraPX ); renderer.setRenderTarget( renderTarget, 1, activeMipmapLevel ); renderer.render( scene, cameraNX ); renderer.setRenderTarget( renderTarget, 2, activeMipmapLevel ); renderer.render( scene, cameraPY ); renderer.setRenderTarget( renderTarget, 3, activeMipmapLevel ); renderer.render( scene, cameraNY ); renderer.setRenderTarget( renderTarget, 4, activeMipmapLevel ); renderer.render( scene, cameraPZ ); // mipmaps are generated during the last call of render() // at this point, all sides of the cube render target are defined renderTarget.texture.generateMipmaps = generateMipmaps; renderer.setRenderTarget( renderTarget, 5, activeMipmapLevel ); renderer.render( scene, cameraNZ ); renderer.setRenderTarget( currentRenderTarget, currentActiveCubeFace, currentActiveMipmapLevel ); renderer.xr.enabled = currentXrEnabled; renderTarget.texture.needsPMREMUpdate = true; } } /** * Creates a cube texture made up of six images. * * ```js * const loader = new THREE.CubeTextureLoader(); * loader.setPath( 'textures/cube/pisa/' ); * * const textureCube = loader.load( [ * 'px.png', 'nx.png', 'py.png', 'ny.png', 'pz.png', 'nz.png' * ] ); * * const material = new THREE.MeshBasicMaterial( { color: 0xffffff, envMap: textureCube } ); * ``` * * @augments Texture */ class CubeTexture extends Texture { /** * Constructs a new cube texture. * * @param {Array} [images=[]] - An array holding a image for each side of a cube. * @param {number} [mapping=CubeReflectionMapping] - The texture mapping. * @param {number} [wrapS=ClampToEdgeWrapping] - The wrapS value. * @param {number} [wrapT=ClampToEdgeWrapping] - The wrapT value. * @param {number} [magFilter=LinearFilter] - The mag filter value. * @param {number} [minFilter=LinearMipmapLinearFilter] - The min filter value. * @param {number} [format=RGBAFormat] - The texture format. * @param {number} [type=UnsignedByteType] - The texture type. * @param {number} [anisotropy=Texture.DEFAULT_ANISOTROPY] - The anisotropy value. * @param {string} [colorSpace=NoColorSpace] - The color space value. */ constructor( images = [], mapping = CubeReflectionMapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy, colorSpace ) { super( images, mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy, colorSpace ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isCubeTexture = true; /** * If set to `true`, the texture is flipped along the vertical axis when * uploaded to the GPU. * * Overwritten and set to `false` by default. * * @type {boolean} * @default false */ this.flipY = false; } /** * Alias for {@link CubeTexture#image}. * * @type {Array} */ get images() { return this.image; } set images( value ) { this.image = value; } } /** * A cube render target used in context of {@link WebGLRenderer}. * * @augments WebGLRenderTarget */ class WebGLCubeRenderTarget extends WebGLRenderTarget { /** * Constructs a new cube render target. * * @param {number} [size=1] - The size of the render target. * @param {RenderTarget~Options} [options] - The configuration object. */ constructor( size = 1, options = {} ) { super( size, size, options ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isWebGLCubeRenderTarget = true; const image = { width: size, height: size, depth: 1 }; const images = [ image, image, image, image, image, image ]; /** * Overwritten with a different texture type. * * @type {DataArrayTexture} */ this.texture = new CubeTexture( images, options.mapping, options.wrapS, options.wrapT, options.magFilter, options.minFilter, options.format, options.type, options.anisotropy, options.colorSpace ); // By convention -- likely based on the RenderMan spec from the 1990's -- cube maps are specified by WebGL (and three.js) // in a coordinate system in which positive-x is to the right when looking up the positive-z axis -- in other words, // in a left-handed coordinate system. By continuing this convention, preexisting cube maps continued to render correctly. // three.js uses a right-handed coordinate system. So environment maps used in three.js appear to have px and nx swapped // and the flag isRenderTargetTexture controls this conversion. The flip is not required when using WebGLCubeRenderTarget.texture // as a cube texture (this is detected when isRenderTargetTexture is set to true for cube textures). this.texture.isRenderTargetTexture = true; this.texture.generateMipmaps = options.generateMipmaps !== undefined ? options.generateMipmaps : false; this.texture.minFilter = options.minFilter !== undefined ? options.minFilter : LinearFilter; } /** * Converts the given equirectangular texture to a cube map. * * @param {WebGLRenderer} renderer - The renderer. * @param {Texture} texture - The equirectangular texture. * @return {WebGLCubeRenderTarget} A reference to this cube render target. */ fromEquirectangularTexture( renderer, texture ) { this.texture.type = texture.type; this.texture.colorSpace = texture.colorSpace; this.texture.generateMipmaps = texture.generateMipmaps; this.texture.minFilter = texture.minFilter; this.texture.magFilter = texture.magFilter; const shader = { uniforms: { tEquirect: { value: null }, }, vertexShader: /* glsl */` varying vec3 vWorldDirection; vec3 transformDirection( in vec3 dir, in mat4 matrix ) { return normalize( ( matrix * vec4( dir, 0.0 ) ).xyz ); } void main() { vWorldDirection = transformDirection( position, modelMatrix ); #include #include } `, fragmentShader: /* glsl */` uniform sampler2D tEquirect; varying vec3 vWorldDirection; #include void main() { vec3 direction = normalize( vWorldDirection ); vec2 sampleUV = equirectUv( direction ); gl_FragColor = texture2D( tEquirect, sampleUV ); } ` }; const geometry = new BoxGeometry( 5, 5, 5 ); const material = new ShaderMaterial( { name: 'CubemapFromEquirect', uniforms: cloneUniforms( shader.uniforms ), vertexShader: shader.vertexShader, fragmentShader: shader.fragmentShader, side: BackSide, blending: NoBlending } ); material.uniforms.tEquirect.value = texture; const mesh = new Mesh( geometry, material ); const currentMinFilter = texture.minFilter; // Avoid blurred poles if ( texture.minFilter === LinearMipmapLinearFilter ) texture.minFilter = LinearFilter; const camera = new CubeCamera( 1, 10, this ); camera.update( renderer, mesh ); texture.minFilter = currentMinFilter; mesh.geometry.dispose(); mesh.material.dispose(); return this; } /** * Clears this cube render target. * * @param {WebGLRenderer} renderer - The renderer. * @param {boolean} [color=true] - Whether the color buffer should be cleared or not. * @param {boolean} [depth=true] - Whether the depth buffer should be cleared or not. * @param {boolean} [stencil=true] - Whether the stencil buffer should be cleared or not. */ clear( renderer, color = true, depth = true, stencil = true ) { const currentRenderTarget = renderer.getRenderTarget(); for ( let i = 0; i < 6; i ++ ) { renderer.setRenderTarget( this, i ); renderer.clear( color, depth, stencil ); } renderer.setRenderTarget( currentRenderTarget ); } } /** * This is almost identical to an {@link Object3D}. Its purpose is to * make working with groups of objects syntactically clearer. * * ```js * // Create a group and add the two cubes. * // These cubes can now be rotated / scaled etc as a group. * const group = new THREE.Group(); * * group.add( meshA ); * group.add( meshB ); * * scene.add( group ); * ``` * * @augments Object3D */ class Group extends Object3D { constructor() { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isGroup = true; this.type = 'Group'; } } const _moveEvent = { type: 'move' }; /** * Class for representing a XR controller with its * different coordinate systems. * * @private */ class WebXRController { /** * Constructs a new XR controller. */ constructor() { /** * A group representing the target ray space * of the XR controller. * * @private * @type {?Group} * @default null */ this._targetRay = null; /** * A group representing the grip space * of the XR controller. * * @private * @type {?Group} * @default null */ this._grip = null; /** * A group representing the hand space * of the XR controller. * * @private * @type {?Group} * @default null */ this._hand = null; } /** * Returns a group representing the hand space of the XR controller. * * @return {Group} A group representing the hand space of the XR controller. */ getHandSpace() { if ( this._hand === null ) { this._hand = new Group(); this._hand.matrixAutoUpdate = false; this._hand.visible = false; this._hand.joints = {}; this._hand.inputState = { pinching: false }; } return this._hand; } /** * Returns a group representing the target ray space of the XR controller. * * @return {Group} A group representing the target ray space of the XR controller. */ getTargetRaySpace() { if ( this._targetRay === null ) { this._targetRay = new Group(); this._targetRay.matrixAutoUpdate = false; this._targetRay.visible = false; this._targetRay.hasLinearVelocity = false; this._targetRay.linearVelocity = new Vector3(); this._targetRay.hasAngularVelocity = false; this._targetRay.angularVelocity = new Vector3(); } return this._targetRay; } /** * Returns a group representing the grip space of the XR controller. * * @return {Group} A group representing the grip space of the XR controller. */ getGripSpace() { if ( this._grip === null ) { this._grip = new Group(); this._grip.matrixAutoUpdate = false; this._grip.visible = false; this._grip.hasLinearVelocity = false; this._grip.linearVelocity = new Vector3(); this._grip.hasAngularVelocity = false; this._grip.angularVelocity = new Vector3(); } return this._grip; } /** * Dispatches the given event to the groups representing * the different coordinate spaces of the XR controller. * * @param {Object} event - The event to dispatch. * @return {WebXRController} A reference to this instance. */ dispatchEvent( event ) { if ( this._targetRay !== null ) { this._targetRay.dispatchEvent( event ); } if ( this._grip !== null ) { this._grip.dispatchEvent( event ); } if ( this._hand !== null ) { this._hand.dispatchEvent( event ); } return this; } /** * Connects the controller with the given XR input source. * * @param {XRInputSource} inputSource - The input source. * @return {WebXRController} A reference to this instance. */ connect( inputSource ) { if ( inputSource && inputSource.hand ) { const hand = this._hand; if ( hand ) { for ( const inputjoint of inputSource.hand.values() ) { // Initialize hand with joints when connected this._getHandJoint( hand, inputjoint ); } } } this.dispatchEvent( { type: 'connected', data: inputSource } ); return this; } /** * Disconnects the controller from the given XR input source. * * @param {XRInputSource} inputSource - The input source. * @return {WebXRController} A reference to this instance. */ disconnect( inputSource ) { this.dispatchEvent( { type: 'disconnected', data: inputSource } ); if ( this._targetRay !== null ) { this._targetRay.visible = false; } if ( this._grip !== null ) { this._grip.visible = false; } if ( this._hand !== null ) { this._hand.visible = false; } return this; } /** * Updates the controller with the given input source, XR frame and reference space. * This updates the transformations of the groups that represent the different * coordinate systems of the controller. * * @param {XRInputSource} inputSource - The input source. * @param {XRFrame} frame - The XR frame. * @param {XRReferenceSpace} referenceSpace - The reference space. * @return {WebXRController} A reference to this instance. */ update( inputSource, frame, referenceSpace ) { let inputPose = null; let gripPose = null; let handPose = null; const targetRay = this._targetRay; const grip = this._grip; const hand = this._hand; if ( inputSource && frame.session.visibilityState !== 'visible-blurred' ) { if ( hand && inputSource.hand ) { handPose = true; for ( const inputjoint of inputSource.hand.values() ) { // Update the joints groups with the XRJoint poses const jointPose = frame.getJointPose( inputjoint, referenceSpace ); // The transform of this joint will be updated with the joint pose on each frame const joint = this._getHandJoint( hand, inputjoint ); if ( jointPose !== null ) { joint.matrix.fromArray( jointPose.transform.matrix ); joint.matrix.decompose( joint.position, joint.rotation, joint.scale ); joint.matrixWorldNeedsUpdate = true; joint.jointRadius = jointPose.radius; } joint.visible = jointPose !== null; } // Custom events // Check pinchz const indexTip = hand.joints[ 'index-finger-tip' ]; const thumbTip = hand.joints[ 'thumb-tip' ]; const distance = indexTip.position.distanceTo( thumbTip.position ); const distanceToPinch = 0.02; const threshold = 0.005; if ( hand.inputState.pinching && distance > distanceToPinch + threshold ) { hand.inputState.pinching = false; this.dispatchEvent( { type: 'pinchend', handedness: inputSource.handedness, target: this } ); } else if ( ! hand.inputState.pinching && distance <= distanceToPinch - threshold ) { hand.inputState.pinching = true; this.dispatchEvent( { type: 'pinchstart', handedness: inputSource.handedness, target: this } ); } } else { if ( grip !== null && inputSource.gripSpace ) { gripPose = frame.getPose( inputSource.gripSpace, referenceSpace ); if ( gripPose !== null ) { grip.matrix.fromArray( gripPose.transform.matrix ); grip.matrix.decompose( grip.position, grip.rotation, grip.scale ); grip.matrixWorldNeedsUpdate = true; if ( gripPose.linearVelocity ) { grip.hasLinearVelocity = true; grip.linearVelocity.copy( gripPose.linearVelocity ); } else { grip.hasLinearVelocity = false; } if ( gripPose.angularVelocity ) { grip.hasAngularVelocity = true; grip.angularVelocity.copy( gripPose.angularVelocity ); } else { grip.hasAngularVelocity = false; } } } } if ( targetRay !== null ) { inputPose = frame.getPose( inputSource.targetRaySpace, referenceSpace ); // Some runtimes (namely Vive Cosmos with Vive OpenXR Runtime) have only grip space and ray space is equal to it if ( inputPose === null && gripPose !== null ) { inputPose = gripPose; } if ( inputPose !== null ) { targetRay.matrix.fromArray( inputPose.transform.matrix ); targetRay.matrix.decompose( targetRay.position, targetRay.rotation, targetRay.scale ); targetRay.matrixWorldNeedsUpdate = true; if ( inputPose.linearVelocity ) { targetRay.hasLinearVelocity = true; targetRay.linearVelocity.copy( inputPose.linearVelocity ); } else { targetRay.hasLinearVelocity = false; } if ( inputPose.angularVelocity ) { targetRay.hasAngularVelocity = true; targetRay.angularVelocity.copy( inputPose.angularVelocity ); } else { targetRay.hasAngularVelocity = false; } this.dispatchEvent( _moveEvent ); } } } if ( targetRay !== null ) { targetRay.visible = ( inputPose !== null ); } if ( grip !== null ) { grip.visible = ( gripPose !== null ); } if ( hand !== null ) { hand.visible = ( handPose !== null ); } return this; } /** * Returns a group representing the hand joint for the given input joint. * * @private * @param {Group} hand - The group representing the hand space. * @param {XRHandJoint} inputjoint - The XR frame. * @return {Group} A group representing the hand joint for the given input joint. */ _getHandJoint( hand, inputjoint ) { if ( hand.joints[ inputjoint.jointName ] === undefined ) { const joint = new Group(); joint.matrixAutoUpdate = false; joint.visible = false; hand.joints[ inputjoint.jointName ] = joint; hand.add( joint ); } return hand.joints[ inputjoint.jointName ]; } } /** * This class can be used to define an exponential squared fog, * which gives a clear view near the camera and a faster than exponentially * densening fog farther from the camera. * * ```js * const scene = new THREE.Scene(); * scene.fog = new THREE.FogExp2( 0xcccccc, 0.002 ); * ``` */ class FogExp2 { /** * Constructs a new fog. * * @param {number|Color} color - The fog's color. * @param {number} [density=0.00025] - Defines how fast the fog will grow dense. */ constructor( color, density = 0.00025 ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isFogExp2 = true; /** * The name of the fog. * * @type {string} */ this.name = ''; /** * The fog's color. * * @type {Color} */ this.color = new Color( color ); /** * Defines how fast the fog will grow dense. * * @type {number} * @default 0.00025 */ this.density = density; } /** * Returns a new fog with copied values from this instance. * * @return {FogExp2} A clone of this instance. */ clone() { return new FogExp2( this.color, this.density ); } /** * Serializes the fog into JSON. * * @param {?(Object|string)} meta - An optional value holding meta information about the serialization. * @return {Object} A JSON object representing the serialized fog */ toJSON( /* meta */ ) { return { type: 'FogExp2', name: this.name, color: this.color.getHex(), density: this.density }; } } /** * This class can be used to define a linear fog that grows linearly denser * with the distance. * * ```js * const scene = new THREE.Scene(); * scene.fog = new THREE.Fog( 0xcccccc, 10, 15 ); * ``` */ class Fog { /** * Constructs a new fog. * * @param {number|Color} color - The fog's color. * @param {number} [near=1] - The minimum distance to start applying fog. * @param {number} [far=1000] - The maximum distance at which fog stops being calculated and applied. */ constructor( color, near = 1, far = 1000 ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isFog = true; /** * The name of the fog. * * @type {string} */ this.name = ''; /** * The fog's color. * * @type {Color} */ this.color = new Color( color ); /** * The minimum distance to start applying fog. Objects that are less than * `near` units from the active camera won't be affected by fog. * * @type {number} * @default 1 */ this.near = near; /** * The maximum distance at which fog stops being calculated and applied. * Objects that are more than `far` units away from the active camera won't * be affected by fog. * * @type {number} * @default 1000 */ this.far = far; } /** * Returns a new fog with copied values from this instance. * * @return {Fog} A clone of this instance. */ clone() { return new Fog( this.color, this.near, this.far ); } /** * Serializes the fog into JSON. * * @param {?(Object|string)} meta - An optional value holding meta information about the serialization. * @return {Object} A JSON object representing the serialized fog */ toJSON( /* meta */ ) { return { type: 'Fog', name: this.name, color: this.color.getHex(), near: this.near, far: this.far }; } } /** * Scenes allow you to set up what is to be rendered and where by three.js. * This is where you place 3D objects like meshes, lines or lights. * * @augments Object3D */ class Scene extends Object3D { /** * Constructs a new scene. */ constructor() { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isScene = true; this.type = 'Scene'; /** * Defines the background of the scene. Valid inputs are: * * - A color for defining a uniform colored background. * - A texture for defining a (flat) textured background. * - Cube textures or equirectangular textures for defining a skybox. * * @type {?(Color|Texture)} * @default null */ this.background = null; /** * Sets the environment map for all physical materials in the scene. However, * it's not possible to overwrite an existing texture assigned to the `envMap` * material property. * * @type {?Texture} * @default null */ this.environment = null; /** * A fog instance defining the type of fog that affects everything * rendered in the scene. * * @type {?(Fog|FogExp2)} * @default null */ this.fog = null; /** * Sets the blurriness of the background. Only influences environment maps * assigned to {@link Scene#background}. Valid input is a float between `0` * and `1`. * * @type {number} * @default 0 */ this.backgroundBlurriness = 0; /** * Attenuates the color of the background. Only applies to background textures. * * @type {number} * @default 1 */ this.backgroundIntensity = 1; /** * The rotation of the background in radians. Only influences environment maps * assigned to {@link Scene#background}. * * @type {Euler} * @default (0,0,0) */ this.backgroundRotation = new Euler(); /** * Attenuates the color of the environment. Only influences environment maps * assigned to {@link Scene#environment}. * * @type {number} * @default 1 */ this.environmentIntensity = 1; /** * The rotation of the environment map in radians. Only influences physical materials * in the scene when {@link Scene#environment} is used. * * @type {Euler} * @default (0,0,0) */ this.environmentRotation = new Euler(); /** * Forces everything in the scene to be rendered with the defined material. It is possible * to exclude materials from override by setting {@link Material#allowOverride} to `false`. * * @type {?Material} * @default null */ this.overrideMaterial = null; if ( typeof __THREE_DEVTOOLS__ !== 'undefined' ) { __THREE_DEVTOOLS__.dispatchEvent( new CustomEvent( 'observe', { detail: this } ) ); } } copy( source, recursive ) { super.copy( source, recursive ); if ( source.background !== null ) this.background = source.background.clone(); if ( source.environment !== null ) this.environment = source.environment.clone(); if ( source.fog !== null ) this.fog = source.fog.clone(); this.backgroundBlurriness = source.backgroundBlurriness; this.backgroundIntensity = source.backgroundIntensity; this.backgroundRotation.copy( source.backgroundRotation ); this.environmentIntensity = source.environmentIntensity; this.environmentRotation.copy( source.environmentRotation ); if ( source.overrideMaterial !== null ) this.overrideMaterial = source.overrideMaterial.clone(); this.matrixAutoUpdate = source.matrixAutoUpdate; return this; } toJSON( meta ) { const data = super.toJSON( meta ); if ( this.fog !== null ) data.object.fog = this.fog.toJSON(); if ( this.backgroundBlurriness > 0 ) data.object.backgroundBlurriness = this.backgroundBlurriness; if ( this.backgroundIntensity !== 1 ) data.object.backgroundIntensity = this.backgroundIntensity; data.object.backgroundRotation = this.backgroundRotation.toArray(); if ( this.environmentIntensity !== 1 ) data.object.environmentIntensity = this.environmentIntensity; data.object.environmentRotation = this.environmentRotation.toArray(); return data; } } /** * "Interleaved" means that multiple attributes, possibly of different types, * (e.g., position, normal, uv, color) are packed into a single array buffer. * * An introduction into interleaved arrays can be found here: [Interleaved array basics]{@link https://blog.tojicode.com/2011/05/interleaved-array-basics.html} */ class InterleavedBuffer { /** * Constructs a new interleaved buffer. * * @param {TypedArray} array - A typed array with a shared buffer storing attribute data. * @param {number} stride - The number of typed-array elements per vertex. */ constructor( array, stride ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isInterleavedBuffer = true; /** * A typed array with a shared buffer storing attribute data. * * @type {TypedArray} */ this.array = array; /** * The number of typed-array elements per vertex. * * @type {number} */ this.stride = stride; /** * The total number of elements in the array * * @type {number} * @readonly */ this.count = array !== undefined ? array.length / stride : 0; /** * Defines the intended usage pattern of the data store for optimization purposes. * * Note: After the initial use of a buffer, its usage cannot be changed. Instead, * instantiate a new one and set the desired usage before the next render. * * @type {(StaticDrawUsage|DynamicDrawUsage|StreamDrawUsage|StaticReadUsage|DynamicReadUsage|StreamReadUsage|StaticCopyUsage|DynamicCopyUsage|StreamCopyUsage)} * @default StaticDrawUsage */ this.usage = StaticDrawUsage; /** * This can be used to only update some components of stored vectors (for example, just the * component related to color). Use the `addUpdateRange()` function to add ranges to this array. * * @type {Array} */ this.updateRanges = []; /** * A version number, incremented every time the `needsUpdate` is set to `true`. * * @type {number} */ this.version = 0; /** * The UUID of the interleaved buffer. * * @type {string} * @readonly */ this.uuid = generateUUID(); } /** * A callback function that is executed after the renderer has transferred the attribute array * data to the GPU. */ onUploadCallback() {} /** * Flag to indicate that this attribute has changed and should be re-sent to * the GPU. Set this to `true` when you modify the value of the array. * * @type {number} * @default false * @param {boolean} value */ set needsUpdate( value ) { if ( value === true ) this.version ++; } /** * Sets the usage of this interleaved buffer. * * @param {(StaticDrawUsage|DynamicDrawUsage|StreamDrawUsage|StaticReadUsage|DynamicReadUsage|StreamReadUsage|StaticCopyUsage|DynamicCopyUsage|StreamCopyUsage)} value - The usage to set. * @return {InterleavedBuffer} A reference to this interleaved buffer. */ setUsage( value ) { this.usage = value; return this; } /** * Adds a range of data in the data array to be updated on the GPU. * * @param {number} start - Position at which to start update. * @param {number} count - The number of components to update. */ addUpdateRange( start, count ) { this.updateRanges.push( { start, count } ); } /** * Clears the update ranges. */ clearUpdateRanges() { this.updateRanges.length = 0; } /** * Copies the values of the given interleaved buffer to this instance. * * @param {InterleavedBuffer} source - The interleaved buffer to copy. * @return {InterleavedBuffer} A reference to this instance. */ copy( source ) { this.array = new source.array.constructor( source.array ); this.count = source.count; this.stride = source.stride; this.usage = source.usage; return this; } /** * Copies a vector from the given interleaved buffer to this one. The start * and destination position in the attribute buffers are represented by the * given indices. * * @param {number} index1 - The destination index into this interleaved buffer. * @param {InterleavedBuffer} interleavedBuffer - The interleaved buffer to copy from. * @param {number} index2 - The source index into the given interleaved buffer. * @return {InterleavedBuffer} A reference to this instance. */ copyAt( index1, interleavedBuffer, index2 ) { index1 *= this.stride; index2 *= interleavedBuffer.stride; for ( let i = 0, l = this.stride; i < l; i ++ ) { this.array[ index1 + i ] = interleavedBuffer.array[ index2 + i ]; } return this; } /** * Sets the given array data in the interleaved buffer. * * @param {(TypedArray|Array)} value - The array data to set. * @param {number} [offset=0] - The offset in this interleaved buffer's array. * @return {InterleavedBuffer} A reference to this instance. */ set( value, offset = 0 ) { this.array.set( value, offset ); return this; } /** * Returns a new interleaved buffer with copied values from this instance. * * @param {Object} [data] - An object with shared array buffers that allows to retain shared structures. * @return {InterleavedBuffer} A clone of this instance. */ clone( data ) { if ( data.arrayBuffers === undefined ) { data.arrayBuffers = {}; } if ( this.array.buffer._uuid === undefined ) { this.array.buffer._uuid = generateUUID(); } if ( data.arrayBuffers[ this.array.buffer._uuid ] === undefined ) { data.arrayBuffers[ this.array.buffer._uuid ] = this.array.slice( 0 ).buffer; } const array = new this.array.constructor( data.arrayBuffers[ this.array.buffer._uuid ] ); const ib = new this.constructor( array, this.stride ); ib.setUsage( this.usage ); return ib; } /** * Sets the given callback function that is executed after the Renderer has transferred * the array data to the GPU. Can be used to perform clean-up operations after * the upload when data are not needed anymore on the CPU side. * * @param {Function} callback - The `onUpload()` callback. * @return {InterleavedBuffer} A reference to this instance. */ onUpload( callback ) { this.onUploadCallback = callback; return this; } /** * Serializes the interleaved buffer into JSON. * * @param {Object} [data] - An optional value holding meta information about the serialization. * @return {Object} A JSON object representing the serialized interleaved buffer. */ toJSON( data ) { if ( data.arrayBuffers === undefined ) { data.arrayBuffers = {}; } // generate UUID for array buffer if necessary if ( this.array.buffer._uuid === undefined ) { this.array.buffer._uuid = generateUUID(); } if ( data.arrayBuffers[ this.array.buffer._uuid ] === undefined ) { data.arrayBuffers[ this.array.buffer._uuid ] = Array.from( new Uint32Array( this.array.buffer ) ); } // return { uuid: this.uuid, buffer: this.array.buffer._uuid, type: this.array.constructor.name, stride: this.stride }; } } const _vector$7 = /*@__PURE__*/ new Vector3(); /** * An alternative version of a buffer attribute with interleaved data. Interleaved * attributes share a common interleaved data storage ({@link InterleavedBuffer}) and refer with * different offsets into the buffer. */ class InterleavedBufferAttribute { /** * Constructs a new interleaved buffer attribute. * * @param {InterleavedBuffer} interleavedBuffer - The buffer holding the interleaved data. * @param {number} itemSize - The item size. * @param {number} offset - The attribute offset into the buffer. * @param {boolean} [normalized=false] - Whether the data are normalized or not. */ constructor( interleavedBuffer, itemSize, offset, normalized = false ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isInterleavedBufferAttribute = true; /** * The name of the buffer attribute. * * @type {string} */ this.name = ''; /** * The buffer holding the interleaved data. * * @type {InterleavedBuffer} */ this.data = interleavedBuffer; /** * The item size, see {@link BufferAttribute#itemSize}. * * @type {number} */ this.itemSize = itemSize; /** * The attribute offset into the buffer. * * @type {number} */ this.offset = offset; /** * Whether the data are normalized or not, see {@link BufferAttribute#normalized} * * @type {InterleavedBuffer} */ this.normalized = normalized; } /** * The item count of this buffer attribute. * * @type {number} * @readonly */ get count() { return this.data.count; } /** * The array holding the interleaved buffer attribute data. * * @type {TypedArray} */ get array() { return this.data.array; } /** * Flag to indicate that this attribute has changed and should be re-sent to * the GPU. Set this to `true` when you modify the value of the array. * * @type {number} * @default false * @param {boolean} value */ set needsUpdate( value ) { this.data.needsUpdate = value; } /** * Applies the given 4x4 matrix to the given attribute. Only works with * item size `3`. * * @param {Matrix4} m - The matrix to apply. * @return {InterleavedBufferAttribute} A reference to this instance. */ applyMatrix4( m ) { for ( let i = 0, l = this.data.count; i < l; i ++ ) { _vector$7.fromBufferAttribute( this, i ); _vector$7.applyMatrix4( m ); this.setXYZ( i, _vector$7.x, _vector$7.y, _vector$7.z ); } return this; } /** * Applies the given 3x3 normal matrix to the given attribute. Only works with * item size `3`. * * @param {Matrix3} m - The normal matrix to apply. * @return {InterleavedBufferAttribute} A reference to this instance. */ applyNormalMatrix( m ) { for ( let i = 0, l = this.count; i < l; i ++ ) { _vector$7.fromBufferAttribute( this, i ); _vector$7.applyNormalMatrix( m ); this.setXYZ( i, _vector$7.x, _vector$7.y, _vector$7.z ); } return this; } /** * Applies the given 4x4 matrix to the given attribute. Only works with * item size `3` and with direction vectors. * * @param {Matrix4} m - The matrix to apply. * @return {InterleavedBufferAttribute} A reference to this instance. */ transformDirection( m ) { for ( let i = 0, l = this.count; i < l; i ++ ) { _vector$7.fromBufferAttribute( this, i ); _vector$7.transformDirection( m ); this.setXYZ( i, _vector$7.x, _vector$7.y, _vector$7.z ); } return this; } /** * Returns the given component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @param {number} component - The component index. * @return {number} The returned value. */ getComponent( index, component ) { let value = this.array[ index * this.data.stride + this.offset + component ]; if ( this.normalized ) value = denormalize( value, this.array ); return value; } /** * Sets the given value to the given component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @param {number} component - The component index. * @param {number} value - The value to set. * @return {InterleavedBufferAttribute} A reference to this instance. */ setComponent( index, component, value ) { if ( this.normalized ) value = normalize( value, this.array ); this.data.array[ index * this.data.stride + this.offset + component ] = value; return this; } /** * Sets the x component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @param {number} x - The value to set. * @return {InterleavedBufferAttribute} A reference to this instance. */ setX( index, x ) { if ( this.normalized ) x = normalize( x, this.array ); this.data.array[ index * this.data.stride + this.offset ] = x; return this; } /** * Sets the y component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @param {number} y - The value to set. * @return {InterleavedBufferAttribute} A reference to this instance. */ setY( index, y ) { if ( this.normalized ) y = normalize( y, this.array ); this.data.array[ index * this.data.stride + this.offset + 1 ] = y; return this; } /** * Sets the z component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @param {number} z - The value to set. * @return {InterleavedBufferAttribute} A reference to this instance. */ setZ( index, z ) { if ( this.normalized ) z = normalize( z, this.array ); this.data.array[ index * this.data.stride + this.offset + 2 ] = z; return this; } /** * Sets the w component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @param {number} w - The value to set. * @return {InterleavedBufferAttribute} A reference to this instance. */ setW( index, w ) { if ( this.normalized ) w = normalize( w, this.array ); this.data.array[ index * this.data.stride + this.offset + 3 ] = w; return this; } /** * Returns the x component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @return {number} The x component. */ getX( index ) { let x = this.data.array[ index * this.data.stride + this.offset ]; if ( this.normalized ) x = denormalize( x, this.array ); return x; } /** * Returns the y component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @return {number} The y component. */ getY( index ) { let y = this.data.array[ index * this.data.stride + this.offset + 1 ]; if ( this.normalized ) y = denormalize( y, this.array ); return y; } /** * Returns the z component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @return {number} The z component. */ getZ( index ) { let z = this.data.array[ index * this.data.stride + this.offset + 2 ]; if ( this.normalized ) z = denormalize( z, this.array ); return z; } /** * Returns the w component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @return {number} The w component. */ getW( index ) { let w = this.data.array[ index * this.data.stride + this.offset + 3 ]; if ( this.normalized ) w = denormalize( w, this.array ); return w; } /** * Sets the x and y component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @param {number} x - The value for the x component to set. * @param {number} y - The value for the y component to set. * @return {InterleavedBufferAttribute} A reference to this instance. */ setXY( index, x, y ) { index = index * this.data.stride + this.offset; if ( this.normalized ) { x = normalize( x, this.array ); y = normalize( y, this.array ); } this.data.array[ index + 0 ] = x; this.data.array[ index + 1 ] = y; return this; } /** * Sets the x, y and z component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @param {number} x - The value for the x component to set. * @param {number} y - The value for the y component to set. * @param {number} z - The value for the z component to set. * @return {InterleavedBufferAttribute} A reference to this instance. */ setXYZ( index, x, y, z ) { index = index * this.data.stride + this.offset; if ( this.normalized ) { x = normalize( x, this.array ); y = normalize( y, this.array ); z = normalize( z, this.array ); } this.data.array[ index + 0 ] = x; this.data.array[ index + 1 ] = y; this.data.array[ index + 2 ] = z; return this; } /** * Sets the x, y, z and w component of the vector at the given index. * * @param {number} index - The index into the buffer attribute. * @param {number} x - The value for the x component to set. * @param {number} y - The value for the y component to set. * @param {number} z - The value for the z component to set. * @param {number} w - The value for the w component to set. * @return {InterleavedBufferAttribute} A reference to this instance. */ setXYZW( index, x, y, z, w ) { index = index * this.data.stride + this.offset; if ( this.normalized ) { x = normalize( x, this.array ); y = normalize( y, this.array ); z = normalize( z, this.array ); w = normalize( w, this.array ); } this.data.array[ index + 0 ] = x; this.data.array[ index + 1 ] = y; this.data.array[ index + 2 ] = z; this.data.array[ index + 3 ] = w; return this; } /** * Returns a new buffer attribute with copied values from this instance. * * If no parameter is provided, cloning an interleaved buffer attribute will de-interleave buffer data. * * @param {Object} [data] - An object with interleaved buffers that allows to retain the interleaved property. * @return {BufferAttribute|InterleavedBufferAttribute} A clone of this instance. */ clone( data ) { if ( data === undefined ) { console.log( 'THREE.InterleavedBufferAttribute.clone(): Cloning an interleaved buffer attribute will de-interleave buffer data.' ); const array = []; for ( let i = 0; i < this.count; i ++ ) { const index = i * this.data.stride + this.offset; for ( let j = 0; j < this.itemSize; j ++ ) { array.push( this.data.array[ index + j ] ); } } return new BufferAttribute( new this.array.constructor( array ), this.itemSize, this.normalized ); } else { if ( data.interleavedBuffers === undefined ) { data.interleavedBuffers = {}; } if ( data.interleavedBuffers[ this.data.uuid ] === undefined ) { data.interleavedBuffers[ this.data.uuid ] = this.data.clone( data ); } return new InterleavedBufferAttribute( data.interleavedBuffers[ this.data.uuid ], this.itemSize, this.offset, this.normalized ); } } /** * Serializes the buffer attribute into JSON. * * If no parameter is provided, cloning an interleaved buffer attribute will de-interleave buffer data. * * @param {Object} [data] - An optional value holding meta information about the serialization. * @return {Object} A JSON object representing the serialized buffer attribute. */ toJSON( data ) { if ( data === undefined ) { console.log( 'THREE.InterleavedBufferAttribute.toJSON(): Serializing an interleaved buffer attribute will de-interleave buffer data.' ); const array = []; for ( let i = 0; i < this.count; i ++ ) { const index = i * this.data.stride + this.offset; for ( let j = 0; j < this.itemSize; j ++ ) { array.push( this.data.array[ index + j ] ); } } // de-interleave data and save it as an ordinary buffer attribute for now return { itemSize: this.itemSize, type: this.array.constructor.name, array: array, normalized: this.normalized }; } else { // save as true interleaved attribute if ( data.interleavedBuffers === undefined ) { data.interleavedBuffers = {}; } if ( data.interleavedBuffers[ this.data.uuid ] === undefined ) { data.interleavedBuffers[ this.data.uuid ] = this.data.toJSON( data ); } return { isInterleavedBufferAttribute: true, itemSize: this.itemSize, data: this.data.uuid, offset: this.offset, normalized: this.normalized }; } } } /** * A material for rendering instances of {@link Sprite}. * * ```js * const map = new THREE.TextureLoader().load( 'textures/sprite.png' ); * const material = new THREE.SpriteMaterial( { map: map, color: 0xffffff } ); * * const sprite = new THREE.Sprite( material ); * sprite.scale.set(200, 200, 1) * scene.add( sprite ); * ``` * * @augments Material */ class SpriteMaterial extends Material { /** * Constructs a new sprite material. * * @param {Object} [parameters] - An object with one or more properties * defining the material's appearance. Any property of the material * (including any property from inherited materials) can be passed * in here. Color values can be passed any type of value accepted * by {@link Color#set}. */ constructor( parameters ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isSpriteMaterial = true; this.type = 'SpriteMaterial'; /** * Color of the material. * * @type {Color} * @default (1,1,1) */ this.color = new Color( 0xffffff ); /** * The color map. May optionally include an alpha channel, typically combined * with {@link Material#transparent} or {@link Material#alphaTest}. The texture map * color is modulated by the diffuse `color`. * * @type {?Texture} * @default null */ this.map = null; /** * The alpha map is a grayscale texture that controls the opacity across the * surface (black: fully transparent; white: fully opaque). * * Only the color of the texture is used, ignoring the alpha channel if one * exists. For RGB and RGBA textures, the renderer will use the green channel * when sampling this texture due to the extra bit of precision provided for * green in DXT-compressed and uncompressed RGB 565 formats. Luminance-only and * luminance/alpha textures will also still work as expected. * * @type {?Texture} * @default null */ this.alphaMap = null; /** * The rotation of the sprite in radians. * * @type {number} * @default 0 */ this.rotation = 0; /** * Specifies whether size of the sprite is attenuated by the camera depth (perspective camera only). * * @type {boolean} * @default true */ this.sizeAttenuation = true; /** * Overwritten since sprite materials are transparent * by default. * * @type {boolean} * @default true */ this.transparent = true; /** * Whether the material is affected by fog or not. * * @type {boolean} * @default true */ this.fog = true; this.setValues( parameters ); } copy( source ) { super.copy( source ); this.color.copy( source.color ); this.map = source.map; this.alphaMap = source.alphaMap; this.rotation = source.rotation; this.sizeAttenuation = source.sizeAttenuation; this.fog = source.fog; return this; } } let _geometry; const _intersectPoint = /*@__PURE__*/ new Vector3(); const _worldScale = /*@__PURE__*/ new Vector3(); const _mvPosition = /*@__PURE__*/ new Vector3(); const _alignedPosition = /*@__PURE__*/ new Vector2(); const _rotatedPosition = /*@__PURE__*/ new Vector2(); const _viewWorldMatrix = /*@__PURE__*/ new Matrix4(); const _vA = /*@__PURE__*/ new Vector3(); const _vB = /*@__PURE__*/ new Vector3(); const _vC = /*@__PURE__*/ new Vector3(); const _uvA = /*@__PURE__*/ new Vector2(); const _uvB = /*@__PURE__*/ new Vector2(); const _uvC = /*@__PURE__*/ new Vector2(); /** * A sprite is a plane that always faces towards the camera, generally with a * partially transparent texture applied. * * Sprites do not cast shadows, setting {@link Object3D#castShadow} to `true` will * have no effect. * * ```js * const map = new THREE.TextureLoader().load( 'sprite.png' ); * const material = new THREE.SpriteMaterial( { map: map } ); * * const sprite = new THREE.Sprite( material ); * scene.add( sprite ); * ``` * * @augments Object3D */ class Sprite extends Object3D { /** * Constructs a new sprite. * * @param {SpriteMaterial} [material] - The sprite material. */ constructor( material = new SpriteMaterial() ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isSprite = true; this.type = 'Sprite'; if ( _geometry === undefined ) { _geometry = new BufferGeometry(); const float32Array = new Float32Array( [ -0.5, -0.5, 0, 0, 0, 0.5, -0.5, 0, 1, 0, 0.5, 0.5, 0, 1, 1, -0.5, 0.5, 0, 0, 1 ] ); const interleavedBuffer = new InterleavedBuffer( float32Array, 5 ); _geometry.setIndex( [ 0, 1, 2, 0, 2, 3 ] ); _geometry.setAttribute( 'position', new InterleavedBufferAttribute( interleavedBuffer, 3, 0, false ) ); _geometry.setAttribute( 'uv', new InterleavedBufferAttribute( interleavedBuffer, 2, 3, false ) ); } /** * The sprite geometry. * * @type {BufferGeometry} */ this.geometry = _geometry; /** * The sprite material. * * @type {SpriteMaterial} */ this.material = material; /** * The sprite's anchor point, and the point around which the sprite rotates. * A value of `(0.5, 0.5)` corresponds to the midpoint of the sprite. A value * of `(0, 0)` corresponds to the lower left corner of the sprite. * * @type {Vector2} * @default (0.5,0.5) */ this.center = new Vector2( 0.5, 0.5 ); } /** * Computes intersection points between a casted ray and this sprite. * * @param {Raycaster} raycaster - The raycaster. * @param {Array} intersects - The target array that holds the intersection points. */ raycast( raycaster, intersects ) { if ( raycaster.camera === null ) { console.error( 'THREE.Sprite: "Raycaster.camera" needs to be set in order to raycast against sprites.' ); } _worldScale.setFromMatrixScale( this.matrixWorld ); _viewWorldMatrix.copy( raycaster.camera.matrixWorld ); this.modelViewMatrix.multiplyMatrices( raycaster.camera.matrixWorldInverse, this.matrixWorld ); _mvPosition.setFromMatrixPosition( this.modelViewMatrix ); if ( raycaster.camera.isPerspectiveCamera && this.material.sizeAttenuation === false ) { _worldScale.multiplyScalar( - _mvPosition.z ); } const rotation = this.material.rotation; let sin, cos; if ( rotation !== 0 ) { cos = Math.cos( rotation ); sin = Math.sin( rotation ); } const center = this.center; transformVertex( _vA.set( -0.5, -0.5, 0 ), _mvPosition, center, _worldScale, sin, cos ); transformVertex( _vB.set( 0.5, -0.5, 0 ), _mvPosition, center, _worldScale, sin, cos ); transformVertex( _vC.set( 0.5, 0.5, 0 ), _mvPosition, center, _worldScale, sin, cos ); _uvA.set( 0, 0 ); _uvB.set( 1, 0 ); _uvC.set( 1, 1 ); // check first triangle let intersect = raycaster.ray.intersectTriangle( _vA, _vB, _vC, false, _intersectPoint ); if ( intersect === null ) { // check second triangle transformVertex( _vB.set( -0.5, 0.5, 0 ), _mvPosition, center, _worldScale, sin, cos ); _uvB.set( 0, 1 ); intersect = raycaster.ray.intersectTriangle( _vA, _vC, _vB, false, _intersectPoint ); if ( intersect === null ) { return; } } const distance = raycaster.ray.origin.distanceTo( _intersectPoint ); if ( distance < raycaster.near || distance > raycaster.far ) return; intersects.push( { distance: distance, point: _intersectPoint.clone(), uv: Triangle.getInterpolation( _intersectPoint, _vA, _vB, _vC, _uvA, _uvB, _uvC, new Vector2() ), face: null, object: this } ); } copy( source, recursive ) { super.copy( source, recursive ); if ( source.center !== undefined ) this.center.copy( source.center ); this.material = source.material; return this; } } function transformVertex( vertexPosition, mvPosition, center, scale, sin, cos ) { // compute position in camera space _alignedPosition.subVectors( vertexPosition, center ).addScalar( 0.5 ).multiply( scale ); // to check if rotation is not zero if ( sin !== undefined ) { _rotatedPosition.x = ( cos * _alignedPosition.x ) - ( sin * _alignedPosition.y ); _rotatedPosition.y = ( sin * _alignedPosition.x ) + ( cos * _alignedPosition.y ); } else { _rotatedPosition.copy( _alignedPosition ); } vertexPosition.copy( mvPosition ); vertexPosition.x += _rotatedPosition.x; vertexPosition.y += _rotatedPosition.y; // transform to world space vertexPosition.applyMatrix4( _viewWorldMatrix ); } const _v1$2 = /*@__PURE__*/ new Vector3(); const _v2$1 = /*@__PURE__*/ new Vector3(); /** * A component for providing a basic Level of Detail (LOD) mechanism. * * Every LOD level is associated with an object, and rendering can be switched * between them at the distances specified. Typically you would create, say, * three meshes, one for far away (low detail), one for mid range (medium * detail) and one for close up (high detail). * * ```js * const lod = new THREE.LOD(); * const material = new THREE.MeshBasicMaterial( { color: 0xffff00 } ); * * //Create spheres with 3 levels of detail and create new LOD levels for them * for( let i = 0; i < 3; i++ ) { * * const geometry = new THREE.IcosahedronGeometry( 10, 3 - i ); * const mesh = new THREE.Mesh( geometry, material ); * lod.addLevel( mesh, i * 75 ); * * } * * scene.add( lod ); * ``` * * @augments Object3D */ class LOD extends Object3D { /** * Constructs a new LOD. */ constructor() { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isLOD = true; /** * The current LOD index. * * @private * @type {number} * @default 0 */ this._currentLevel = 0; this.type = 'LOD'; Object.defineProperties( this, { /** * This array holds the LOD levels. * * @name LOD#levels * @type {Array<{object:Object3D,distance:number,hysteresis:number}>} */ levels: { enumerable: true, value: [] } } ); /** * Whether the LOD object is updated automatically by the renderer per frame * or not. If set to `false`, you have to call {@link LOD#update} in the * render loop by yourself. * * @type {boolean} * @default true */ this.autoUpdate = true; } copy( source ) { super.copy( source, false ); const levels = source.levels; for ( let i = 0, l = levels.length; i < l; i ++ ) { const level = levels[ i ]; this.addLevel( level.object.clone(), level.distance, level.hysteresis ); } this.autoUpdate = source.autoUpdate; return this; } /** * Adds a mesh that will display at a certain distance and greater. Typically * the further away the distance, the lower the detail on the mesh. * * @param {Object3D} object - The 3D object to display at this level. * @param {number} [distance=0] - The distance at which to display this level of detail. * @param {number} [hysteresis=0] - Threshold used to avoid flickering at LOD boundaries, as a fraction of distance. * @return {LOD} A reference to this instance. */ addLevel( object, distance = 0, hysteresis = 0 ) { distance = Math.abs( distance ); const levels = this.levels; let l; for ( l = 0; l < levels.length; l ++ ) { if ( distance < levels[ l ].distance ) { break; } } levels.splice( l, 0, { distance: distance, hysteresis: hysteresis, object: object } ); this.add( object ); return this; } /** * Removes an existing level, based on the distance from the camera. * Returns `true` when the level has been removed. Otherwise `false`. * * @param {number} distance - Distance of the level to remove. * @return {boolean} Whether the level has been removed or not. */ removeLevel( distance ) { const levels = this.levels; for ( let i = 0; i < levels.length; i ++ ) { if ( levels[ i ].distance === distance ) { const removedElements = levels.splice( i, 1 ); this.remove( removedElements[ 0 ].object ); return true; } } return false; } /** * Returns the currently active LOD level index. * * @return {number} The current active LOD level index. */ getCurrentLevel() { return this._currentLevel; } /** * Returns a reference to the first 3D object that is greater than * the given distance. * * @param {number} distance - The LOD distance. * @return {Object3D|null} The found 3D object. `null` if no 3D object has been found. */ getObjectForDistance( distance ) { const levels = this.levels; if ( levels.length > 0 ) { let i, l; for ( i = 1, l = levels.length; i < l; i ++ ) { let levelDistance = levels[ i ].distance; if ( levels[ i ].object.visible ) { levelDistance -= levelDistance * levels[ i ].hysteresis; } if ( distance < levelDistance ) { break; } } return levels[ i - 1 ].object; } return null; } /** * Computes intersection points between a casted ray and this LOD. * * @param {Raycaster} raycaster - The raycaster. * @param {Array} intersects - The target array that holds the intersection points. */ raycast( raycaster, intersects ) { const levels = this.levels; if ( levels.length > 0 ) { _v1$2.setFromMatrixPosition( this.matrixWorld ); const distance = raycaster.ray.origin.distanceTo( _v1$2 ); this.getObjectForDistance( distance ).raycast( raycaster, intersects ); } } /** * Updates the LOD by computing which LOD level should be visible according * to the current distance of the given camera. * * @param {Camera} camera - The camera the scene is rendered with. */ update( camera ) { const levels = this.levels; if ( levels.length > 1 ) { _v1$2.setFromMatrixPosition( camera.matrixWorld ); _v2$1.setFromMatrixPosition( this.matrixWorld ); const distance = _v1$2.distanceTo( _v2$1 ) / camera.zoom; levels[ 0 ].object.visible = true; let i, l; for ( i = 1, l = levels.length; i < l; i ++ ) { let levelDistance = levels[ i ].distance; if ( levels[ i ].object.visible ) { levelDistance -= levelDistance * levels[ i ].hysteresis; } if ( distance >= levelDistance ) { levels[ i - 1 ].object.visible = false; levels[ i ].object.visible = true; } else { break; } } this._currentLevel = i - 1; for ( ; i < l; i ++ ) { levels[ i ].object.visible = false; } } } toJSON( meta ) { const data = super.toJSON( meta ); if ( this.autoUpdate === false ) data.object.autoUpdate = false; data.object.levels = []; const levels = this.levels; for ( let i = 0, l = levels.length; i < l; i ++ ) { const level = levels[ i ]; data.object.levels.push( { object: level.object.uuid, distance: level.distance, hysteresis: level.hysteresis } ); } return data; } } const _basePosition = /*@__PURE__*/ new Vector3(); const _skinIndex = /*@__PURE__*/ new Vector4(); const _skinWeight = /*@__PURE__*/ new Vector4(); const _vector3 = /*@__PURE__*/ new Vector3(); const _matrix4 = /*@__PURE__*/ new Matrix4(); const _vertex = /*@__PURE__*/ new Vector3(); const _sphere$5 = /*@__PURE__*/ new Sphere(); const _inverseMatrix$2 = /*@__PURE__*/ new Matrix4(); const _ray$2 = /*@__PURE__*/ new Ray(); /** * A mesh that has a {@link Skeleton} that can then be used to animate the * vertices of the geometry with skinning/skeleton animation. * * Next to a valid skeleton, the skinned mesh requires skin indices and weights * as buffer attributes in its geometry. These attribute define which bones affect a single * vertex to a certain extend. * * Typically skinned meshes are not created manually but loaders like {@link GLTFLoader} * or {@link FBXLoader } import respective models. * * @augments Mesh */ class SkinnedMesh extends Mesh { /** * Constructs a new skinned mesh. * * @param {BufferGeometry} [geometry] - The mesh geometry. * @param {Material|Array} [material] - The mesh material. */ constructor( geometry, material ) { super( geometry, material ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isSkinnedMesh = true; this.type = 'SkinnedMesh'; /** * `AttachedBindMode` means the skinned mesh shares the same world space as the skeleton. * This is not true when using `DetachedBindMode` which is useful when sharing a skeleton * across multiple skinned meshes. * * @type {(AttachedBindMode|DetachedBindMode)} * @default AttachedBindMode */ this.bindMode = AttachedBindMode; /** * The base matrix that is used for the bound bone transforms. * * @type {Matrix4} */ this.bindMatrix = new Matrix4(); /** * The base matrix that is used for resetting the bound bone transforms. * * @type {Matrix4} */ this.bindMatrixInverse = new Matrix4(); /** * The bounding box of the skinned mesh. Can be computed via {@link SkinnedMesh#computeBoundingBox}. * * @type {?Box3} * @default null */ this.boundingBox = null; /** * The bounding sphere of the skinned mesh. Can be computed via {@link SkinnedMesh#computeBoundingSphere}. * * @type {?Sphere} * @default null */ this.boundingSphere = null; } /** * Computes the bounding box of the skinned mesh, and updates {@link SkinnedMesh#boundingBox}. * The bounding box is not automatically computed by the engine; this method must be called by your app. * If the skinned mesh is animated, the bounding box should be recomputed per frame in order to reflect * the current animation state. */ computeBoundingBox() { const geometry = this.geometry; if ( this.boundingBox === null ) { this.boundingBox = new Box3(); } this.boundingBox.makeEmpty(); const positionAttribute = geometry.getAttribute( 'position' ); for ( let i = 0; i < positionAttribute.count; i ++ ) { this.getVertexPosition( i, _vertex ); this.boundingBox.expandByPoint( _vertex ); } } /** * Computes the bounding sphere of the skinned mesh, and updates {@link SkinnedMesh#boundingSphere}. * The bounding sphere is automatically computed by the engine once when it is needed, e.g., for ray casting * and view frustum culling. If the skinned mesh is animated, the bounding sphere should be recomputed * per frame in order to reflect the current animation state. */ computeBoundingSphere() { const geometry = this.geometry; if ( this.boundingSphere === null ) { this.boundingSphere = new Sphere(); } this.boundingSphere.makeEmpty(); const positionAttribute = geometry.getAttribute( 'position' ); for ( let i = 0; i < positionAttribute.count; i ++ ) { this.getVertexPosition( i, _vertex ); this.boundingSphere.expandByPoint( _vertex ); } } copy( source, recursive ) { super.copy( source, recursive ); this.bindMode = source.bindMode; this.bindMatrix.copy( source.bindMatrix ); this.bindMatrixInverse.copy( source.bindMatrixInverse ); this.skeleton = source.skeleton; if ( source.boundingBox !== null ) this.boundingBox = source.boundingBox.clone(); if ( source.boundingSphere !== null ) this.boundingSphere = source.boundingSphere.clone(); return this; } raycast( raycaster, intersects ) { const material = this.material; const matrixWorld = this.matrixWorld; if ( material === undefined ) return; // test with bounding sphere in world space if ( this.boundingSphere === null ) this.computeBoundingSphere(); _sphere$5.copy( this.boundingSphere ); _sphere$5.applyMatrix4( matrixWorld ); if ( raycaster.ray.intersectsSphere( _sphere$5 ) === false ) return; // convert ray to local space of skinned mesh _inverseMatrix$2.copy( matrixWorld ).invert(); _ray$2.copy( raycaster.ray ).applyMatrix4( _inverseMatrix$2 ); // test with bounding box in local space if ( this.boundingBox !== null ) { if ( _ray$2.intersectsBox( this.boundingBox ) === false ) return; } // test for intersections with geometry this._computeIntersections( raycaster, intersects, _ray$2 ); } getVertexPosition( index, target ) { super.getVertexPosition( index, target ); this.applyBoneTransform( index, target ); return target; } /** * Binds the given skeleton to the skinned mesh. * * @param {Skeleton} skeleton - The skeleton to bind. * @param {Matrix4} [bindMatrix] - The bind matrix. If no bind matrix is provided, * the skinned mesh's world matrix will be used instead. */ bind( skeleton, bindMatrix ) { this.skeleton = skeleton; if ( bindMatrix === undefined ) { this.updateMatrixWorld( true ); this.skeleton.calculateInverses(); bindMatrix = this.matrixWorld; } this.bindMatrix.copy( bindMatrix ); this.bindMatrixInverse.copy( bindMatrix ).invert(); } /** * This method sets the skinned mesh in the rest pose). */ pose() { this.skeleton.pose(); } /** * Normalizes the skin weights which are defined as a buffer attribute * in the skinned mesh's geometry. */ normalizeSkinWeights() { const vector = new Vector4(); const skinWeight = this.geometry.attributes.skinWeight; for ( let i = 0, l = skinWeight.count; i < l; i ++ ) { vector.fromBufferAttribute( skinWeight, i ); const scale = 1.0 / vector.manhattanLength(); if ( scale !== Infinity ) { vector.multiplyScalar( scale ); } else { vector.set( 1, 0, 0, 0 ); // do something reasonable } skinWeight.setXYZW( i, vector.x, vector.y, vector.z, vector.w ); } } updateMatrixWorld( force ) { super.updateMatrixWorld( force ); if ( this.bindMode === AttachedBindMode ) { this.bindMatrixInverse.copy( this.matrixWorld ).invert(); } else if ( this.bindMode === DetachedBindMode ) { this.bindMatrixInverse.copy( this.bindMatrix ).invert(); } else { console.warn( 'THREE.SkinnedMesh: Unrecognized bindMode: ' + this.bindMode ); } } /** * Applies the bone transform associated with the given index to the given * vertex position. Returns the updated vector. * * @param {number} index - The vertex index. * @param {Vector3} target - The target object that is used to store the method's result. * the skinned mesh's world matrix will be used instead. * @return {Vector3} The updated vertex position. */ applyBoneTransform( index, target ) { const skeleton = this.skeleton; const geometry = this.geometry; _skinIndex.fromBufferAttribute( geometry.attributes.skinIndex, index ); _skinWeight.fromBufferAttribute( geometry.attributes.skinWeight, index ); _basePosition.copy( target ).applyMatrix4( this.bindMatrix ); target.set( 0, 0, 0 ); for ( let i = 0; i < 4; i ++ ) { const weight = _skinWeight.getComponent( i ); if ( weight !== 0 ) { const boneIndex = _skinIndex.getComponent( i ); _matrix4.multiplyMatrices( skeleton.bones[ boneIndex ].matrixWorld, skeleton.boneInverses[ boneIndex ] ); target.addScaledVector( _vector3.copy( _basePosition ).applyMatrix4( _matrix4 ), weight ); } } return target.applyMatrix4( this.bindMatrixInverse ); } } /** * A bone which is part of a {@link Skeleton}. The skeleton in turn is used by * the {@link SkinnedMesh}. * * ```js * const root = new THREE.Bone(); * const child = new THREE.Bone(); * * root.add( child ); * child.position.y = 5; * ``` * * @augments Object3D */ class Bone extends Object3D { /** * Constructs a new bone. */ constructor() { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isBone = true; this.type = 'Bone'; } } /** * Creates a texture directly from raw buffer data. * * The interpretation of the data depends on type and format: If the type is * `UnsignedByteType`, a `Uint8Array` will be useful for addressing the * texel data. If the format is `RGBAFormat`, data needs four values for * one texel; Red, Green, Blue and Alpha (typically the opacity). * * @augments Texture */ class DataTexture extends Texture { /** * Constructs a new data texture. * * @param {?TypedArray} [data=null] - The buffer data. * @param {number} [width=1] - The width of the texture. * @param {number} [height=1] - The height of the texture. * @param {number} [format=RGBAFormat] - The texture format. * @param {number} [type=UnsignedByteType] - The texture type. * @param {number} [mapping=Texture.DEFAULT_MAPPING] - The texture mapping. * @param {number} [wrapS=ClampToEdgeWrapping] - The wrapS value. * @param {number} [wrapT=ClampToEdgeWrapping] - The wrapT value. * @param {number} [magFilter=NearestFilter] - The mag filter value. * @param {number} [minFilter=NearestFilter] - The min filter value. * @param {number} [anisotropy=Texture.DEFAULT_ANISOTROPY] - The anisotropy value. * @param {string} [colorSpace=NoColorSpace] - The color space. */ constructor( data = null, width = 1, height = 1, format, type, mapping, wrapS, wrapT, magFilter = NearestFilter, minFilter = NearestFilter, anisotropy, colorSpace ) { super( null, mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy, colorSpace ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isDataTexture = true; /** * The image definition of a data texture. * * @type {{data:TypedArray,width:number,height:number}} */ this.image = { data: data, width: width, height: height }; /** * Whether to generate mipmaps (if possible) for a texture. * * Overwritten and set to `false` by default. * * @type {boolean} * @default false */ this.generateMipmaps = false; /** * If set to `true`, the texture is flipped along the vertical axis when * uploaded to the GPU. * * Overwritten and set to `false` by default. * * @type {boolean} * @default false */ this.flipY = false; /** * Specifies the alignment requirements for the start of each pixel row in memory. * * Overwritten and set to `1` by default. * * @type {boolean} * @default 1 */ this.unpackAlignment = 1; } } const _offsetMatrix = /*@__PURE__*/ new Matrix4(); const _identityMatrix = /*@__PURE__*/ new Matrix4(); /** * Class for representing the armatures in `three.js`. The skeleton * is defined by a hierarchy of bones. * * ```js * const bones = []; * * const shoulder = new THREE.Bone(); * const elbow = new THREE.Bone(); * const hand = new THREE.Bone(); * * shoulder.add( elbow ); * elbow.add( hand ); * * bones.push( shoulder , elbow, hand); * * shoulder.position.y = -5; * elbow.position.y = 0; * hand.position.y = 5; * * const armSkeleton = new THREE.Skeleton( bones ); * ``` */ class Skeleton { /** * Constructs a new skeleton. * * @param {Array} [bones] - An array of bones. * @param {Array} [boneInverses] - An array of bone inverse matrices. * If not provided, these matrices will be computed automatically via {@link Skeleton#calculateInverses}. */ constructor( bones = [], boneInverses = [] ) { this.uuid = generateUUID(); /** * An array of bones defining the skeleton. * * @type {Array} */ this.bones = bones.slice( 0 ); /** * An array of bone inverse matrices. * * @type {Array} */ this.boneInverses = boneInverses; /** * An array buffer holding the bone data. * Input data for {@link Skeleton#boneTexture}. * * @type {?Float32Array} * @default null */ this.boneMatrices = null; /** * A texture holding the bone data for use * in the vertex shader. * * @type {?DataTexture} * @default null */ this.boneTexture = null; this.init(); } /** * Initializes the skeleton. This method gets automatically called by the constructor * but depending on how the skeleton is created it might be necessary to call this method * manually. */ init() { const bones = this.bones; const boneInverses = this.boneInverses; this.boneMatrices = new Float32Array( bones.length * 16 ); // calculate inverse bone matrices if necessary if ( boneInverses.length === 0 ) { this.calculateInverses(); } else { // handle special case if ( bones.length !== boneInverses.length ) { console.warn( 'THREE.Skeleton: Number of inverse bone matrices does not match amount of bones.' ); this.boneInverses = []; for ( let i = 0, il = this.bones.length; i < il; i ++ ) { this.boneInverses.push( new Matrix4() ); } } } } /** * Computes the bone inverse matrices. This method resets {@link Skeleton#boneInverses} * and fills it with new matrices. */ calculateInverses() { this.boneInverses.length = 0; for ( let i = 0, il = this.bones.length; i < il; i ++ ) { const inverse = new Matrix4(); if ( this.bones[ i ] ) { inverse.copy( this.bones[ i ].matrixWorld ).invert(); } this.boneInverses.push( inverse ); } } /** * Resets the skeleton to the base pose. */ pose() { // recover the bind-time world matrices for ( let i = 0, il = this.bones.length; i < il; i ++ ) { const bone = this.bones[ i ]; if ( bone ) { bone.matrixWorld.copy( this.boneInverses[ i ] ).invert(); } } // compute the local matrices, positions, rotations and scales for ( let i = 0, il = this.bones.length; i < il; i ++ ) { const bone = this.bones[ i ]; if ( bone ) { if ( bone.parent && bone.parent.isBone ) { bone.matrix.copy( bone.parent.matrixWorld ).invert(); bone.matrix.multiply( bone.matrixWorld ); } else { bone.matrix.copy( bone.matrixWorld ); } bone.matrix.decompose( bone.position, bone.quaternion, bone.scale ); } } } /** * Resets the skeleton to the base pose. */ update() { const bones = this.bones; const boneInverses = this.boneInverses; const boneMatrices = this.boneMatrices; const boneTexture = this.boneTexture; // flatten bone matrices to array for ( let i = 0, il = bones.length; i < il; i ++ ) { // compute the offset between the current and the original transform const matrix = bones[ i ] ? bones[ i ].matrixWorld : _identityMatrix; _offsetMatrix.multiplyMatrices( matrix, boneInverses[ i ] ); _offsetMatrix.toArray( boneMatrices, i * 16 ); } if ( boneTexture !== null ) { boneTexture.needsUpdate = true; } } /** * Returns a new skeleton with copied values from this instance. * * @return {Skeleton} A clone of this instance. */ clone() { return new Skeleton( this.bones, this.boneInverses ); } /** * Computes a data texture for passing bone data to the vertex shader. * * @return {Skeleton} A reference of this instance. */ computeBoneTexture() { // layout (1 matrix = 4 pixels) // RGBA RGBA RGBA RGBA (=> column1, column2, column3, column4) // with 8x8 pixel texture max 16 bones * 4 pixels = (8 * 8) // 16x16 pixel texture max 64 bones * 4 pixels = (16 * 16) // 32x32 pixel texture max 256 bones * 4 pixels = (32 * 32) // 64x64 pixel texture max 1024 bones * 4 pixels = (64 * 64) let size = Math.sqrt( this.bones.length * 4 ); // 4 pixels needed for 1 matrix size = Math.ceil( size / 4 ) * 4; size = Math.max( size, 4 ); const boneMatrices = new Float32Array( size * size * 4 ); // 4 floats per RGBA pixel boneMatrices.set( this.boneMatrices ); // copy current values const boneTexture = new DataTexture( boneMatrices, size, size, RGBAFormat, FloatType ); boneTexture.needsUpdate = true; this.boneMatrices = boneMatrices; this.boneTexture = boneTexture; return this; } /** * Searches through the skeleton's bone array and returns the first with a * matching name. * * @param {string} name - The name of the bone. * @return {Bone|undefined} The found bone. `undefined` if no bone has been found. */ getBoneByName( name ) { for ( let i = 0, il = this.bones.length; i < il; i ++ ) { const bone = this.bones[ i ]; if ( bone.name === name ) { return bone; } } return undefined; } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. */ dispose( ) { if ( this.boneTexture !== null ) { this.boneTexture.dispose(); this.boneTexture = null; } } /** * Setups the skeleton by the given JSON and bones. * * @param {Object} json - The skeleton as serialized JSON. * @param {Object} bones - An array of bones. * @return {Skeleton} A reference of this instance. */ fromJSON( json, bones ) { this.uuid = json.uuid; for ( let i = 0, l = json.bones.length; i < l; i ++ ) { const uuid = json.bones[ i ]; let bone = bones[ uuid ]; if ( bone === undefined ) { console.warn( 'THREE.Skeleton: No bone found with UUID:', uuid ); bone = new Bone(); } this.bones.push( bone ); this.boneInverses.push( new Matrix4().fromArray( json.boneInverses[ i ] ) ); } this.init(); return this; } /** * Serializes the skeleton into JSON. * * @return {Object} A JSON object representing the serialized skeleton. * @see {@link ObjectLoader#parse} */ toJSON() { const data = { metadata: { version: 4.6, type: 'Skeleton', generator: 'Skeleton.toJSON' }, bones: [], boneInverses: [] }; data.uuid = this.uuid; const bones = this.bones; const boneInverses = this.boneInverses; for ( let i = 0, l = bones.length; i < l; i ++ ) { const bone = bones[ i ]; data.bones.push( bone.uuid ); const boneInverse = boneInverses[ i ]; data.boneInverses.push( boneInverse.toArray() ); } return data; } } /** * An instanced version of a buffer attribute. * * @augments BufferAttribute */ class InstancedBufferAttribute extends BufferAttribute { /** * Constructs a new instanced buffer attribute. * * @param {TypedArray} array - The array holding the attribute data. * @param {number} itemSize - The item size. * @param {boolean} [normalized=false] - Whether the data are normalized or not. * @param {number} [meshPerAttribute=1] - How often a value of this buffer attribute should be repeated. */ constructor( array, itemSize, normalized, meshPerAttribute = 1 ) { super( array, itemSize, normalized ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isInstancedBufferAttribute = true; /** * Defines how often a value of this buffer attribute should be repeated. A * value of one means that each value of the instanced attribute is used for * a single instance. A value of two means that each value is used for two * consecutive instances (and so on). * * @type {number} * @default 1 */ this.meshPerAttribute = meshPerAttribute; } copy( source ) { super.copy( source ); this.meshPerAttribute = source.meshPerAttribute; return this; } toJSON() { const data = super.toJSON(); data.meshPerAttribute = this.meshPerAttribute; data.isInstancedBufferAttribute = true; return data; } } const _instanceLocalMatrix = /*@__PURE__*/ new Matrix4(); const _instanceWorldMatrix = /*@__PURE__*/ new Matrix4(); const _instanceIntersects = []; const _box3 = /*@__PURE__*/ new Box3(); const _identity = /*@__PURE__*/ new Matrix4(); const _mesh$1 = /*@__PURE__*/ new Mesh(); const _sphere$4 = /*@__PURE__*/ new Sphere(); /** * A special version of a mesh with instanced rendering support. Use * this class if you have to render a large number of objects with the same * geometry and material(s) but with different world transformations. The usage * of 'InstancedMesh' will help you to reduce the number of draw calls and thus * improve the overall rendering performance in your application. * * @augments Mesh */ class InstancedMesh extends Mesh { /** * Constructs a new instanced mesh. * * @param {BufferGeometry} [geometry] - The mesh geometry. * @param {Material|Array} [material] - The mesh material. * @param {number} count - The number of instances. */ constructor( geometry, material, count ) { super( geometry, material ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isInstancedMesh = true; /** * Represents the local transformation of all instances. You have to set its * {@link BufferAttribute#needsUpdate} flag to true if you modify instanced data * via {@link InstancedMesh#setMatrixAt}. * * @type {InstancedBufferAttribute} */ this.instanceMatrix = new InstancedBufferAttribute( new Float32Array( count * 16 ), 16 ); /** * Represents the color of all instances. You have to set its * {@link BufferAttribute#needsUpdate} flag to true if you modify instanced data * via {@link InstancedMesh#setColorAt}. * * @type {?InstancedBufferAttribute} * @default null */ this.instanceColor = null; /** * Represents the morph target weights of all instances. You have to set its * {@link Texture#needsUpdate} flag to true if you modify instanced data * via {@link InstancedMesh#setMorphAt}. * * @type {?DataTexture} * @default null */ this.morphTexture = null; /** * The number of instances. * * @type {number} */ this.count = count; /** * The bounding box of the instanced mesh. Can be computed via {@link InstancedMesh#computeBoundingBox}. * * @type {?Box3} * @default null */ this.boundingBox = null; /** * The bounding sphere of the instanced mesh. Can be computed via {@link InstancedMesh#computeBoundingSphere}. * * @type {?Sphere} * @default null */ this.boundingSphere = null; for ( let i = 0; i < count; i ++ ) { this.setMatrixAt( i, _identity ); } } /** * Computes the bounding box of the instanced mesh, and updates {@link InstancedMesh#boundingBox}. * The bounding box is not automatically computed by the engine; this method must be called by your app. * You may need to recompute the bounding box if an instance is transformed via {@link InstancedMesh#setMatrixAt}. */ computeBoundingBox() { const geometry = this.geometry; const count = this.count; if ( this.boundingBox === null ) { this.boundingBox = new Box3(); } if ( geometry.boundingBox === null ) { geometry.computeBoundingBox(); } this.boundingBox.makeEmpty(); for ( let i = 0; i < count; i ++ ) { this.getMatrixAt( i, _instanceLocalMatrix ); _box3.copy( geometry.boundingBox ).applyMatrix4( _instanceLocalMatrix ); this.boundingBox.union( _box3 ); } } /** * Computes the bounding sphere of the instanced mesh, and updates {@link InstancedMesh#boundingSphere} * The engine automatically computes the bounding sphere when it is needed, e.g., for ray casting or view frustum culling. * You may need to recompute the bounding sphere if an instance is transformed via {@link InstancedMesh#setMatrixAt}. */ computeBoundingSphere() { const geometry = this.geometry; const count = this.count; if ( this.boundingSphere === null ) { this.boundingSphere = new Sphere(); } if ( geometry.boundingSphere === null ) { geometry.computeBoundingSphere(); } this.boundingSphere.makeEmpty(); for ( let i = 0; i < count; i ++ ) { this.getMatrixAt( i, _instanceLocalMatrix ); _sphere$4.copy( geometry.boundingSphere ).applyMatrix4( _instanceLocalMatrix ); this.boundingSphere.union( _sphere$4 ); } } copy( source, recursive ) { super.copy( source, recursive ); this.instanceMatrix.copy( source.instanceMatrix ); if ( source.morphTexture !== null ) this.morphTexture = source.morphTexture.clone(); if ( source.instanceColor !== null ) this.instanceColor = source.instanceColor.clone(); this.count = source.count; if ( source.boundingBox !== null ) this.boundingBox = source.boundingBox.clone(); if ( source.boundingSphere !== null ) this.boundingSphere = source.boundingSphere.clone(); return this; } /** * Gets the color of the defined instance. * * @param {number} index - The instance index. * @param {Color} color - The target object that is used to store the method's result. */ getColorAt( index, color ) { color.fromArray( this.instanceColor.array, index * 3 ); } /** * Gets the local transformation matrix of the defined instance. * * @param {number} index - The instance index. * @param {Matrix4} matrix - The target object that is used to store the method's result. */ getMatrixAt( index, matrix ) { matrix.fromArray( this.instanceMatrix.array, index * 16 ); } /** * Gets the morph target weights of the defined instance. * * @param {number} index - The instance index. * @param {Mesh} object - The target object that is used to store the method's result. */ getMorphAt( index, object ) { const objectInfluences = object.morphTargetInfluences; const array = this.morphTexture.source.data.data; const len = objectInfluences.length + 1; // All influences + the baseInfluenceSum const dataIndex = index * len + 1; // Skip the baseInfluenceSum at the beginning for ( let i = 0; i < objectInfluences.length; i ++ ) { objectInfluences[ i ] = array[ dataIndex + i ]; } } raycast( raycaster, intersects ) { const matrixWorld = this.matrixWorld; const raycastTimes = this.count; _mesh$1.geometry = this.geometry; _mesh$1.material = this.material; if ( _mesh$1.material === undefined ) return; // test with bounding sphere first if ( this.boundingSphere === null ) this.computeBoundingSphere(); _sphere$4.copy( this.boundingSphere ); _sphere$4.applyMatrix4( matrixWorld ); if ( raycaster.ray.intersectsSphere( _sphere$4 ) === false ) return; // now test each instance for ( let instanceId = 0; instanceId < raycastTimes; instanceId ++ ) { // calculate the world matrix for each instance this.getMatrixAt( instanceId, _instanceLocalMatrix ); _instanceWorldMatrix.multiplyMatrices( matrixWorld, _instanceLocalMatrix ); // the mesh represents this single instance _mesh$1.matrixWorld = _instanceWorldMatrix; _mesh$1.raycast( raycaster, _instanceIntersects ); // process the result of raycast for ( let i = 0, l = _instanceIntersects.length; i < l; i ++ ) { const intersect = _instanceIntersects[ i ]; intersect.instanceId = instanceId; intersect.object = this; intersects.push( intersect ); } _instanceIntersects.length = 0; } } /** * Sets the given color to the defined instance. Make sure you set the `needsUpdate` flag of * {@link InstancedMesh#instanceColor} to `true` after updating all the colors. * * @param {number} index - The instance index. * @param {Color} color - The instance color. */ setColorAt( index, color ) { if ( this.instanceColor === null ) { this.instanceColor = new InstancedBufferAttribute( new Float32Array( this.instanceMatrix.count * 3 ).fill( 1 ), 3 ); } color.toArray( this.instanceColor.array, index * 3 ); } /** * Sets the given local transformation matrix to the defined instance. Make sure you set the `needsUpdate` flag of * {@link InstancedMesh#instanceMatrix} to `true` after updating all the colors. * * @param {number} index - The instance index. * @param {Matrix4} matrix - The local transformation. */ setMatrixAt( index, matrix ) { matrix.toArray( this.instanceMatrix.array, index * 16 ); } /** * Sets the morph target weights to the defined instance. Make sure you set the `needsUpdate` flag of * {@link InstancedMesh#morphTexture} to `true` after updating all the influences. * * @param {number} index - The instance index. * @param {Mesh} object - A mesh which `morphTargetInfluences` property containing the morph target weights * of a single instance. */ setMorphAt( index, object ) { const objectInfluences = object.morphTargetInfluences; const len = objectInfluences.length + 1; // morphBaseInfluence + all influences if ( this.morphTexture === null ) { this.morphTexture = new DataTexture( new Float32Array( len * this.count ), len, this.count, RedFormat, FloatType ); } const array = this.morphTexture.source.data.data; let morphInfluencesSum = 0; for ( let i = 0; i < objectInfluences.length; i ++ ) { morphInfluencesSum += objectInfluences[ i ]; } const morphBaseInfluence = this.geometry.morphTargetsRelative ? 1 : 1 - morphInfluencesSum; const dataIndex = len * index; array[ dataIndex ] = morphBaseInfluence; array.set( objectInfluences, dataIndex + 1 ); } updateMorphTargets() { } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. */ dispose() { this.dispatchEvent( { type: 'dispose' } ); if ( this.morphTexture !== null ) { this.morphTexture.dispose(); this.morphTexture = null; } } } const _vector1 = /*@__PURE__*/ new Vector3(); const _vector2 = /*@__PURE__*/ new Vector3(); const _normalMatrix = /*@__PURE__*/ new Matrix3(); /** * A two dimensional surface that extends infinitely in 3D space, represented * in [Hessian normal form]{@link http://mathworld.wolfram.com/HessianNormalForm.html} * by a unit length normal vector and a constant. */ class Plane { /** * Constructs a new plane. * * @param {Vector3} [normal=(1,0,0)] - A unit length vector defining the normal of the plane. * @param {number} [constant=0] - The signed distance from the origin to the plane. */ constructor( normal = new Vector3( 1, 0, 0 ), constant = 0 ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isPlane = true; /** * A unit length vector defining the normal of the plane. * * @type {Vector3} */ this.normal = normal; /** * The signed distance from the origin to the plane. * * @type {number} * @default 0 */ this.constant = constant; } /** * Sets the plane components by copying the given values. * * @param {Vector3} normal - The normal. * @param {number} constant - The constant. * @return {Plane} A reference to this plane. */ set( normal, constant ) { this.normal.copy( normal ); this.constant = constant; return this; } /** * Sets the plane components by defining `x`, `y`, `z` as the * plane normal and `w` as the constant. * * @param {number} x - The value for the normal's x component. * @param {number} y - The value for the normal's y component. * @param {number} z - The value for the normal's z component. * @param {number} w - The constant value. * @return {Plane} A reference to this plane. */ setComponents( x, y, z, w ) { this.normal.set( x, y, z ); this.constant = w; return this; } /** * Sets the plane from the given normal and coplanar point (that is a point * that lies onto the plane). * * @param {Vector3} normal - The normal. * @param {Vector3} point - A coplanar point. * @return {Plane} A reference to this plane. */ setFromNormalAndCoplanarPoint( normal, point ) { this.normal.copy( normal ); this.constant = - point.dot( this.normal ); return this; } /** * Sets the plane from three coplanar points. The winding order is * assumed to be counter-clockwise, and determines the direction of * the plane normal. * * @param {Vector3} a - The first coplanar point. * @param {Vector3} b - The second coplanar point. * @param {Vector3} c - The third coplanar point. * @return {Plane} A reference to this plane. */ setFromCoplanarPoints( a, b, c ) { const normal = _vector1.subVectors( c, b ).cross( _vector2.subVectors( a, b ) ).normalize(); // Q: should an error be thrown if normal is zero (e.g. degenerate plane)? this.setFromNormalAndCoplanarPoint( normal, a ); return this; } /** * Copies the values of the given plane to this instance. * * @param {Plane} plane - The plane to copy. * @return {Plane} A reference to this plane. */ copy( plane ) { this.normal.copy( plane.normal ); this.constant = plane.constant; return this; } /** * Normalizes the plane normal and adjusts the constant accordingly. * * @return {Plane} A reference to this plane. */ normalize() { // Note: will lead to a divide by zero if the plane is invalid. const inverseNormalLength = 1.0 / this.normal.length(); this.normal.multiplyScalar( inverseNormalLength ); this.constant *= inverseNormalLength; return this; } /** * Negates both the plane normal and the constant. * * @return {Plane} A reference to this plane. */ negate() { this.constant *= -1; this.normal.negate(); return this; } /** * Returns the signed distance from the given point to this plane. * * @param {Vector3} point - The point to compute the distance for. * @return {number} The signed distance. */ distanceToPoint( point ) { return this.normal.dot( point ) + this.constant; } /** * Returns the signed distance from the given sphere to this plane. * * @param {Sphere} sphere - The sphere to compute the distance for. * @return {number} The signed distance. */ distanceToSphere( sphere ) { return this.distanceToPoint( sphere.center ) - sphere.radius; } /** * Projects a the given point onto the plane. * * @param {Vector3} point - The point to project. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} The projected point on the plane. */ projectPoint( point, target ) { return target.copy( point ).addScaledVector( this.normal, - this.distanceToPoint( point ) ); } /** * Returns the intersection point of the passed line and the plane. Returns * `null` if the line does not intersect. Returns the line's starting point if * the line is coplanar with the plane. * * @param {Line3} line - The line to compute the intersection for. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {?Vector3} The intersection point. */ intersectLine( line, target ) { const direction = line.delta( _vector1 ); const denominator = this.normal.dot( direction ); if ( denominator === 0 ) { // line is coplanar, return origin if ( this.distanceToPoint( line.start ) === 0 ) { return target.copy( line.start ); } // Unsure if this is the correct method to handle this case. return null; } const t = - ( line.start.dot( this.normal ) + this.constant ) / denominator; if ( t < 0 || t > 1 ) { return null; } return target.copy( line.start ).addScaledVector( direction, t ); } /** * Returns `true` if the given line segment intersects with (passes through) the plane. * * @param {Line3} line - The line to test. * @return {boolean} Whether the given line segment intersects with the plane or not. */ intersectsLine( line ) { // Note: this tests if a line intersects the plane, not whether it (or its end-points) are coplanar with it. const startSign = this.distanceToPoint( line.start ); const endSign = this.distanceToPoint( line.end ); return ( startSign < 0 && endSign > 0 ) || ( endSign < 0 && startSign > 0 ); } /** * Returns `true` if the given bounding box intersects with the plane. * * @param {Box3} box - The bounding box to test. * @return {boolean} Whether the given bounding box intersects with the plane or not. */ intersectsBox( box ) { return box.intersectsPlane( this ); } /** * Returns `true` if the given bounding sphere intersects with the plane. * * @param {Sphere} sphere - The bounding sphere to test. * @return {boolean} Whether the given bounding sphere intersects with the plane or not. */ intersectsSphere( sphere ) { return sphere.intersectsPlane( this ); } /** * Returns a coplanar vector to the plane, by calculating the * projection of the normal at the origin onto the plane. * * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} The coplanar point. */ coplanarPoint( target ) { return target.copy( this.normal ).multiplyScalar( - this.constant ); } /** * Apply a 4x4 matrix to the plane. The matrix must be an affine, homogeneous transform. * * The optional normal matrix can be pre-computed like so: * ```js * const optionalNormalMatrix = new THREE.Matrix3().getNormalMatrix( matrix ); * ``` * * @param {Matrix4} matrix - The transformation matrix. * @param {Matrix4} [optionalNormalMatrix] - A pre-computed normal matrix. * @return {Plane} A reference to this plane. */ applyMatrix4( matrix, optionalNormalMatrix ) { const normalMatrix = optionalNormalMatrix || _normalMatrix.getNormalMatrix( matrix ); const referencePoint = this.coplanarPoint( _vector1 ).applyMatrix4( matrix ); const normal = this.normal.applyMatrix3( normalMatrix ).normalize(); this.constant = - referencePoint.dot( normal ); return this; } /** * Translates the plane by the distance defined by the given offset vector. * Note that this only affects the plane constant and will not affect the normal vector. * * @param {Vector3} offset - The offset vector. * @return {Plane} A reference to this plane. */ translate( offset ) { this.constant -= offset.dot( this.normal ); return this; } /** * Returns `true` if this plane is equal with the given one. * * @param {Plane} plane - The plane to test for equality. * @return {boolean} Whether this plane is equal with the given one. */ equals( plane ) { return plane.normal.equals( this.normal ) && ( plane.constant === this.constant ); } /** * Returns a new plane with copied values from this instance. * * @return {Plane} A clone of this instance. */ clone() { return new this.constructor().copy( this ); } } const _sphere$3 = /*@__PURE__*/ new Sphere(); const _vector$6 = /*@__PURE__*/ new Vector3(); /** * Frustums are used to determine what is inside the camera's field of view. * They help speed up the rendering process - objects which lie outside a camera's * frustum can safely be excluded from rendering. * * This class is mainly intended for use internally by a renderer. */ class Frustum { /** * Constructs a new frustum. * * @param {Plane} [p0] - The first plane that encloses the frustum. * @param {Plane} [p1] - The second plane that encloses the frustum. * @param {Plane} [p2] - The third plane that encloses the frustum. * @param {Plane} [p3] - The fourth plane that encloses the frustum. * @param {Plane} [p4] - The fifth plane that encloses the frustum. * @param {Plane} [p5] - The sixth plane that encloses the frustum. */ constructor( p0 = new Plane(), p1 = new Plane(), p2 = new Plane(), p3 = new Plane(), p4 = new Plane(), p5 = new Plane() ) { /** * This array holds the planes that enclose the frustum. * * @type {Array} */ this.planes = [ p0, p1, p2, p3, p4, p5 ]; } /** * Sets the frustum planes by copying the given planes. * * @param {Plane} [p0] - The first plane that encloses the frustum. * @param {Plane} [p1] - The second plane that encloses the frustum. * @param {Plane} [p2] - The third plane that encloses the frustum. * @param {Plane} [p3] - The fourth plane that encloses the frustum. * @param {Plane} [p4] - The fifth plane that encloses the frustum. * @param {Plane} [p5] - The sixth plane that encloses the frustum. * @return {Frustum} A reference to this frustum. */ set( p0, p1, p2, p3, p4, p5 ) { const planes = this.planes; planes[ 0 ].copy( p0 ); planes[ 1 ].copy( p1 ); planes[ 2 ].copy( p2 ); planes[ 3 ].copy( p3 ); planes[ 4 ].copy( p4 ); planes[ 5 ].copy( p5 ); return this; } /** * Copies the values of the given frustum to this instance. * * @param {Frustum} frustum - The frustum to copy. * @return {Frustum} A reference to this frustum. */ copy( frustum ) { const planes = this.planes; for ( let i = 0; i < 6; i ++ ) { planes[ i ].copy( frustum.planes[ i ] ); } return this; } /** * Sets the frustum planes from the given projection matrix. * * @param {Matrix4} m - The projection matrix. * @param {(WebGLCoordinateSystem|WebGPUCoordinateSystem)} coordinateSystem - The coordinate system. * @return {Frustum} A reference to this frustum. */ setFromProjectionMatrix( m, coordinateSystem = WebGLCoordinateSystem ) { const planes = this.planes; const me = m.elements; const me0 = me[ 0 ], me1 = me[ 1 ], me2 = me[ 2 ], me3 = me[ 3 ]; const me4 = me[ 4 ], me5 = me[ 5 ], me6 = me[ 6 ], me7 = me[ 7 ]; const me8 = me[ 8 ], me9 = me[ 9 ], me10 = me[ 10 ], me11 = me[ 11 ]; const me12 = me[ 12 ], me13 = me[ 13 ], me14 = me[ 14 ], me15 = me[ 15 ]; planes[ 0 ].setComponents( me3 - me0, me7 - me4, me11 - me8, me15 - me12 ).normalize(); planes[ 1 ].setComponents( me3 + me0, me7 + me4, me11 + me8, me15 + me12 ).normalize(); planes[ 2 ].setComponents( me3 + me1, me7 + me5, me11 + me9, me15 + me13 ).normalize(); planes[ 3 ].setComponents( me3 - me1, me7 - me5, me11 - me9, me15 - me13 ).normalize(); planes[ 4 ].setComponents( me3 - me2, me7 - me6, me11 - me10, me15 - me14 ).normalize(); if ( coordinateSystem === WebGLCoordinateSystem ) { planes[ 5 ].setComponents( me3 + me2, me7 + me6, me11 + me10, me15 + me14 ).normalize(); } else if ( coordinateSystem === WebGPUCoordinateSystem ) { planes[ 5 ].setComponents( me2, me6, me10, me14 ).normalize(); } else { throw new Error( 'THREE.Frustum.setFromProjectionMatrix(): Invalid coordinate system: ' + coordinateSystem ); } return this; } /** * Returns `true` if the 3D object's bounding sphere is intersecting this frustum. * * Note that the 3D object must have a geometry so that the bounding sphere can be calculated. * * @param {Object3D} object - The 3D object to test. * @return {boolean} Whether the 3D object's bounding sphere is intersecting this frustum or not. */ intersectsObject( object ) { if ( object.boundingSphere !== undefined ) { if ( object.boundingSphere === null ) object.computeBoundingSphere(); _sphere$3.copy( object.boundingSphere ).applyMatrix4( object.matrixWorld ); } else { const geometry = object.geometry; if ( geometry.boundingSphere === null ) geometry.computeBoundingSphere(); _sphere$3.copy( geometry.boundingSphere ).applyMatrix4( object.matrixWorld ); } return this.intersectsSphere( _sphere$3 ); } /** * Returns `true` if the given sprite is intersecting this frustum. * * @param {Sprite} sprite - The sprite to test. * @return {boolean} Whether the sprite is intersecting this frustum or not. */ intersectsSprite( sprite ) { _sphere$3.center.set( 0, 0, 0 ); _sphere$3.radius = 0.7071067811865476; _sphere$3.applyMatrix4( sprite.matrixWorld ); return this.intersectsSphere( _sphere$3 ); } /** * Returns `true` if the given bounding sphere is intersecting this frustum. * * @param {Sphere} sphere - The bounding sphere to test. * @return {boolean} Whether the bounding sphere is intersecting this frustum or not. */ intersectsSphere( sphere ) { const planes = this.planes; const center = sphere.center; const negRadius = - sphere.radius; for ( let i = 0; i < 6; i ++ ) { const distance = planes[ i ].distanceToPoint( center ); if ( distance < negRadius ) { return false; } } return true; } /** * Returns `true` if the given bounding box is intersecting this frustum. * * @param {Box3} box - The bounding box to test. * @return {boolean} Whether the bounding box is intersecting this frustum or not. */ intersectsBox( box ) { const planes = this.planes; for ( let i = 0; i < 6; i ++ ) { const plane = planes[ i ]; // corner at max distance _vector$6.x = plane.normal.x > 0 ? box.max.x : box.min.x; _vector$6.y = plane.normal.y > 0 ? box.max.y : box.min.y; _vector$6.z = plane.normal.z > 0 ? box.max.z : box.min.z; if ( plane.distanceToPoint( _vector$6 ) < 0 ) { return false; } } return true; } /** * Returns `true` if the given point lies within the frustum. * * @param {Vector3} point - The point to test. * @return {boolean} Whether the point lies within this frustum or not. */ containsPoint( point ) { const planes = this.planes; for ( let i = 0; i < 6; i ++ ) { if ( planes[ i ].distanceToPoint( point ) < 0 ) { return false; } } return true; } /** * Returns a new frustum with copied values from this instance. * * @return {Frustum} A clone of this instance. */ clone() { return new this.constructor().copy( this ); } } function ascIdSort( a, b ) { return a - b; } function sortOpaque( a, b ) { return a.z - b.z; } function sortTransparent( a, b ) { return b.z - a.z; } class MultiDrawRenderList { constructor() { this.index = 0; this.pool = []; this.list = []; } push( start, count, z, index ) { const pool = this.pool; const list = this.list; if ( this.index >= pool.length ) { pool.push( { start: -1, count: -1, z: -1, index: -1, } ); } const item = pool[ this.index ]; list.push( item ); this.index ++; item.start = start; item.count = count; item.z = z; item.index = index; } reset() { this.list.length = 0; this.index = 0; } } const _matrix$1 = /*@__PURE__*/ new Matrix4(); const _whiteColor = /*@__PURE__*/ new Color( 1, 1, 1 ); const _frustum = /*@__PURE__*/ new Frustum(); const _box$1 = /*@__PURE__*/ new Box3(); const _sphere$2 = /*@__PURE__*/ new Sphere(); const _vector$5 = /*@__PURE__*/ new Vector3(); const _forward = /*@__PURE__*/ new Vector3(); const _temp = /*@__PURE__*/ new Vector3(); const _renderList = /*@__PURE__*/ new MultiDrawRenderList(); const _mesh = /*@__PURE__*/ new Mesh(); const _batchIntersects = []; // copies data from attribute "src" into "target" starting at "targetOffset" function copyAttributeData( src, target, targetOffset = 0 ) { const itemSize = target.itemSize; if ( src.isInterleavedBufferAttribute || src.array.constructor !== target.array.constructor ) { // use the component getters and setters if the array data cannot // be copied directly const vertexCount = src.count; for ( let i = 0; i < vertexCount; i ++ ) { for ( let c = 0; c < itemSize; c ++ ) { target.setComponent( i + targetOffset, c, src.getComponent( i, c ) ); } } } else { // faster copy approach using typed array set function target.array.set( src.array, targetOffset * itemSize ); } target.needsUpdate = true; } // safely copies array contents to a potentially smaller array function copyArrayContents( src, target ) { if ( src.constructor !== target.constructor ) { // if arrays are of a different type (eg due to index size increasing) then data must be per-element copied const len = Math.min( src.length, target.length ); for ( let i = 0; i < len; i ++ ) { target[ i ] = src[ i ]; } } else { // if the arrays use the same data layout we can use a fast block copy const len = Math.min( src.length, target.length ); target.set( new src.constructor( src.buffer, 0, len ) ); } } /** * A special version of a mesh with multi draw batch rendering support. Use * this class if you have to render a large number of objects with the same * material but with different geometries or world transformations. The usage of * `BatchedMesh` will help you to reduce the number of draw calls and thus improve the overall * rendering performance in your application. * * ```js * const box = new THREE.BoxGeometry( 1, 1, 1 ); * const sphere = new THREE.SphereGeometry( 1, 12, 12 ); * const material = new THREE.MeshBasicMaterial( { color: 0x00ff00 } ); * * // initialize and add geometries into the batched mesh * const batchedMesh = new BatchedMesh( 10, 5000, 10000, material ); * const boxGeometryId = batchedMesh.addGeometry( box ); * const sphereGeometryId = batchedMesh.addGeometry( sphere ); * * // create instances of those geometries * const boxInstancedId1 = batchedMesh.addInstance( boxGeometryId ); * const boxInstancedId2 = batchedMesh.addInstance( boxGeometryId ); * * const sphereInstancedId1 = batchedMesh.addInstance( sphereGeometryId ); * const sphereInstancedId2 = batchedMesh.addInstance( sphereGeometryId ); * * // position the geometries * batchedMesh.setMatrixAt( boxInstancedId1, boxMatrix1 ); * batchedMesh.setMatrixAt( boxInstancedId2, boxMatrix2 ); * * batchedMesh.setMatrixAt( sphereInstancedId1, sphereMatrix1 ); * batchedMesh.setMatrixAt( sphereInstancedId2, sphereMatrix2 ); * * scene.add( batchedMesh ); * ``` * * @augments Mesh */ class BatchedMesh extends Mesh { /** * Constructs a new batched mesh. * * @param {number} maxInstanceCount - The maximum number of individual instances planned to be added and rendered. * @param {number} maxVertexCount - The maximum number of vertices to be used by all unique geometries. * @param {number} [maxIndexCount=maxVertexCount*2] - The maximum number of indices to be used by all unique geometries * @param {Material|Array} [material] - The mesh material. */ constructor( maxInstanceCount, maxVertexCount, maxIndexCount = maxVertexCount * 2, material ) { super( new BufferGeometry(), material ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isBatchedMesh = true; /** * When set ot `true`, the individual objects of a batch are frustum culled. * * @type {boolean} * @default true */ this.perObjectFrustumCulled = true; /** * When set to `true`, the individual objects of a batch are sorted to improve overdraw-related artifacts. * If the material is marked as "transparent" objects are rendered back to front and if not then they are * rendered front to back. * * @type {boolean} * @default true */ this.sortObjects = true; /** * The bounding box of the batched mesh. Can be computed via {@link BatchedMesh#computeBoundingBox}. * * @type {?Box3} * @default null */ this.boundingBox = null; /** * The bounding sphere of the batched mesh. Can be computed via {@link BatchedMesh#computeBoundingSphere}. * * @type {?Sphere} * @default null */ this.boundingSphere = null; /** * Takes a sort a function that is run before render. The function takes a list of instances to * sort and a camera. The objects in the list include a "z" field to perform a depth-ordered * sort with. * * @type {?Function} * @default null */ this.customSort = null; // stores visible, active, and geometry id per instance and reserved buffer ranges for geometries this._instanceInfo = []; this._geometryInfo = []; // instance, geometry ids that have been set as inactive, and are available to be overwritten this._availableInstanceIds = []; this._availableGeometryIds = []; // used to track where the next point is that geometry should be inserted this._nextIndexStart = 0; this._nextVertexStart = 0; this._geometryCount = 0; // flags this._visibilityChanged = true; this._geometryInitialized = false; // cached user options this._maxInstanceCount = maxInstanceCount; this._maxVertexCount = maxVertexCount; this._maxIndexCount = maxIndexCount; // buffers for multi draw this._multiDrawCounts = new Int32Array( maxInstanceCount ); this._multiDrawStarts = new Int32Array( maxInstanceCount ); this._multiDrawCount = 0; this._multiDrawInstances = null; // Local matrix per geometry by using data texture this._matricesTexture = null; this._indirectTexture = null; this._colorsTexture = null; this._initMatricesTexture(); this._initIndirectTexture(); } /** * The maximum number of individual instances that can be stored in the batch. * * @type {number} * @readonly */ get maxInstanceCount() { return this._maxInstanceCount; } /** * The instance count. * * @type {number} * @readonly */ get instanceCount() { return this._instanceInfo.length - this._availableInstanceIds.length; } /** * The number of unused vertices. * * @type {number} * @readonly */ get unusedVertexCount() { return this._maxVertexCount - this._nextVertexStart; } /** * The number of unused indices. * * @type {number} * @readonly */ get unusedIndexCount() { return this._maxIndexCount - this._nextIndexStart; } _initMatricesTexture() { // layout (1 matrix = 4 pixels) // RGBA RGBA RGBA RGBA (=> column1, column2, column3, column4) // with 8x8 pixel texture max 16 matrices * 4 pixels = (8 * 8) // 16x16 pixel texture max 64 matrices * 4 pixels = (16 * 16) // 32x32 pixel texture max 256 matrices * 4 pixels = (32 * 32) // 64x64 pixel texture max 1024 matrices * 4 pixels = (64 * 64) let size = Math.sqrt( this._maxInstanceCount * 4 ); // 4 pixels needed for 1 matrix size = Math.ceil( size / 4 ) * 4; size = Math.max( size, 4 ); const matricesArray = new Float32Array( size * size * 4 ); // 4 floats per RGBA pixel const matricesTexture = new DataTexture( matricesArray, size, size, RGBAFormat, FloatType ); this._matricesTexture = matricesTexture; } _initIndirectTexture() { let size = Math.sqrt( this._maxInstanceCount ); size = Math.ceil( size ); const indirectArray = new Uint32Array( size * size ); const indirectTexture = new DataTexture( indirectArray, size, size, RedIntegerFormat, UnsignedIntType ); this._indirectTexture = indirectTexture; } _initColorsTexture() { let size = Math.sqrt( this._maxInstanceCount ); size = Math.ceil( size ); // 4 floats per RGBA pixel initialized to white const colorsArray = new Float32Array( size * size * 4 ).fill( 1 ); const colorsTexture = new DataTexture( colorsArray, size, size, RGBAFormat, FloatType ); colorsTexture.colorSpace = ColorManagement.workingColorSpace; this._colorsTexture = colorsTexture; } _initializeGeometry( reference ) { const geometry = this.geometry; const maxVertexCount = this._maxVertexCount; const maxIndexCount = this._maxIndexCount; if ( this._geometryInitialized === false ) { for ( const attributeName in reference.attributes ) { const srcAttribute = reference.getAttribute( attributeName ); const { array, itemSize, normalized } = srcAttribute; const dstArray = new array.constructor( maxVertexCount * itemSize ); const dstAttribute = new BufferAttribute( dstArray, itemSize, normalized ); geometry.setAttribute( attributeName, dstAttribute ); } if ( reference.getIndex() !== null ) { // Reserve last u16 index for primitive restart. const indexArray = maxVertexCount > 65535 ? new Uint32Array( maxIndexCount ) : new Uint16Array( maxIndexCount ); geometry.setIndex( new BufferAttribute( indexArray, 1 ) ); } this._geometryInitialized = true; } } // Make sure the geometry is compatible with the existing combined geometry attributes _validateGeometry( geometry ) { // check to ensure the geometries are using consistent attributes and indices const batchGeometry = this.geometry; if ( Boolean( geometry.getIndex() ) !== Boolean( batchGeometry.getIndex() ) ) { throw new Error( 'THREE.BatchedMesh: All geometries must consistently have "index".' ); } for ( const attributeName in batchGeometry.attributes ) { if ( ! geometry.hasAttribute( attributeName ) ) { throw new Error( `THREE.BatchedMesh: Added geometry missing "${ attributeName }". All geometries must have consistent attributes.` ); } const srcAttribute = geometry.getAttribute( attributeName ); const dstAttribute = batchGeometry.getAttribute( attributeName ); if ( srcAttribute.itemSize !== dstAttribute.itemSize || srcAttribute.normalized !== dstAttribute.normalized ) { throw new Error( 'THREE.BatchedMesh: All attributes must have a consistent itemSize and normalized value.' ); } } } /** * Validates the instance defined by the given ID. * * @param {number} instanceId - The instance to validate. */ validateInstanceId( instanceId ) { const instanceInfo = this._instanceInfo; if ( instanceId < 0 || instanceId >= instanceInfo.length || instanceInfo[ instanceId ].active === false ) { throw new Error( `THREE.BatchedMesh: Invalid instanceId ${instanceId}. Instance is either out of range or has been deleted.` ); } } /** * Validates the geometry defined by the given ID. * * @param {number} geometryId - The geometry to validate. */ validateGeometryId( geometryId ) { const geometryInfoList = this._geometryInfo; if ( geometryId < 0 || geometryId >= geometryInfoList.length || geometryInfoList[ geometryId ].active === false ) { throw new Error( `THREE.BatchedMesh: Invalid geometryId ${geometryId}. Geometry is either out of range or has been deleted.` ); } } /** * Takes a sort a function that is run before render. The function takes a list of instances to * sort and a camera. The objects in the list include a "z" field to perform a depth-ordered sort with. * * @param {Function} func - The custom sort function. * @return {BatchedMesh} A reference to this batched mesh. */ setCustomSort( func ) { this.customSort = func; return this; } /** * Computes the bounding box, updating {@link BatchedMesh#boundingBox}. * Bounding boxes aren't computed by default. They need to be explicitly computed, * otherwise they are `null`. */ computeBoundingBox() { if ( this.boundingBox === null ) { this.boundingBox = new Box3(); } const boundingBox = this.boundingBox; const instanceInfo = this._instanceInfo; boundingBox.makeEmpty(); for ( let i = 0, l = instanceInfo.length; i < l; i ++ ) { if ( instanceInfo[ i ].active === false ) continue; const geometryId = instanceInfo[ i ].geometryIndex; this.getMatrixAt( i, _matrix$1 ); this.getBoundingBoxAt( geometryId, _box$1 ).applyMatrix4( _matrix$1 ); boundingBox.union( _box$1 ); } } /** * Computes the bounding sphere, updating {@link BatchedMesh#boundingSphere}. * Bounding spheres aren't computed by default. They need to be explicitly computed, * otherwise they are `null`. */ computeBoundingSphere() { if ( this.boundingSphere === null ) { this.boundingSphere = new Sphere(); } const boundingSphere = this.boundingSphere; const instanceInfo = this._instanceInfo; boundingSphere.makeEmpty(); for ( let i = 0, l = instanceInfo.length; i < l; i ++ ) { if ( instanceInfo[ i ].active === false ) continue; const geometryId = instanceInfo[ i ].geometryIndex; this.getMatrixAt( i, _matrix$1 ); this.getBoundingSphereAt( geometryId, _sphere$2 ).applyMatrix4( _matrix$1 ); boundingSphere.union( _sphere$2 ); } } /** * Adds a new instance to the batch using the geometry of the given ID and returns * a new id referring to the new instance to be used by other functions. * * @param {number} geometryId - The ID of a previously added geometry via {@link BatchedMesh#addGeometry}. * @return {number} The instance ID. */ addInstance( geometryId ) { const atCapacity = this._instanceInfo.length >= this.maxInstanceCount; // ensure we're not over geometry if ( atCapacity && this._availableInstanceIds.length === 0 ) { throw new Error( 'THREE.BatchedMesh: Maximum item count reached.' ); } const instanceInfo = { visible: true, active: true, geometryIndex: geometryId, }; let drawId = null; // Prioritize using previously freed instance ids if ( this._availableInstanceIds.length > 0 ) { this._availableInstanceIds.sort( ascIdSort ); drawId = this._availableInstanceIds.shift(); this._instanceInfo[ drawId ] = instanceInfo; } else { drawId = this._instanceInfo.length; this._instanceInfo.push( instanceInfo ); } const matricesTexture = this._matricesTexture; _matrix$1.identity().toArray( matricesTexture.image.data, drawId * 16 ); matricesTexture.needsUpdate = true; const colorsTexture = this._colorsTexture; if ( colorsTexture ) { _whiteColor.toArray( colorsTexture.image.data, drawId * 4 ); colorsTexture.needsUpdate = true; } this._visibilityChanged = true; return drawId; } /** * Adds the given geometry to the batch and returns the associated * geometry id referring to it to be used in other functions. * * @param {BufferGeometry} geometry - The geometry to add. * @param {number} [reservedVertexCount=-1] - Optional parameter specifying the amount of * vertex buffer space to reserve for the added geometry. This is necessary if it is planned * to set a new geometry at this index at a later time that is larger than the original geometry. * Defaults to the length of the given geometry vertex buffer. * @param {number} [reservedIndexCount=-1] - Optional parameter specifying the amount of index * buffer space to reserve for the added geometry. This is necessary if it is planned to set a * new geometry at this index at a later time that is larger than the original geometry. Defaults to * the length of the given geometry index buffer. * @return {number} The geometry ID. */ addGeometry( geometry, reservedVertexCount = -1, reservedIndexCount = -1 ) { this._initializeGeometry( geometry ); this._validateGeometry( geometry ); const geometryInfo = { // geometry information vertexStart: -1, vertexCount: -1, reservedVertexCount: -1, indexStart: -1, indexCount: -1, reservedIndexCount: -1, // draw range information start: -1, count: -1, // state boundingBox: null, boundingSphere: null, active: true, }; const geometryInfoList = this._geometryInfo; geometryInfo.vertexStart = this._nextVertexStart; geometryInfo.reservedVertexCount = reservedVertexCount === -1 ? geometry.getAttribute( 'position' ).count : reservedVertexCount; const index = geometry.getIndex(); const hasIndex = index !== null; if ( hasIndex ) { geometryInfo.indexStart = this._nextIndexStart; geometryInfo.reservedIndexCount = reservedIndexCount === -1 ? index.count : reservedIndexCount; } if ( geometryInfo.indexStart !== -1 && geometryInfo.indexStart + geometryInfo.reservedIndexCount > this._maxIndexCount || geometryInfo.vertexStart + geometryInfo.reservedVertexCount > this._maxVertexCount ) { throw new Error( 'THREE.BatchedMesh: Reserved space request exceeds the maximum buffer size.' ); } // update id let geometryId; if ( this._availableGeometryIds.length > 0 ) { this._availableGeometryIds.sort( ascIdSort ); geometryId = this._availableGeometryIds.shift(); geometryInfoList[ geometryId ] = geometryInfo; } else { geometryId = this._geometryCount; this._geometryCount ++; geometryInfoList.push( geometryInfo ); } // update the geometry this.setGeometryAt( geometryId, geometry ); // increment the next geometry position this._nextIndexStart = geometryInfo.indexStart + geometryInfo.reservedIndexCount; this._nextVertexStart = geometryInfo.vertexStart + geometryInfo.reservedVertexCount; return geometryId; } /** * Replaces the geometry at the given ID with the provided geometry. Throws an error if there * is not enough space reserved for geometry. Calling this will change all instances that are * rendering that geometry. * * @param {number} geometryId - The ID of the geometry that should be replaced with the given geometry. * @param {BufferGeometry} geometry - The new geometry. * @return {number} The geometry ID. */ setGeometryAt( geometryId, geometry ) { if ( geometryId >= this._geometryCount ) { throw new Error( 'THREE.BatchedMesh: Maximum geometry count reached.' ); } this._validateGeometry( geometry ); const batchGeometry = this.geometry; const hasIndex = batchGeometry.getIndex() !== null; const dstIndex = batchGeometry.getIndex(); const srcIndex = geometry.getIndex(); const geometryInfo = this._geometryInfo[ geometryId ]; if ( hasIndex && srcIndex.count > geometryInfo.reservedIndexCount || geometry.attributes.position.count > geometryInfo.reservedVertexCount ) { throw new Error( 'THREE.BatchedMesh: Reserved space not large enough for provided geometry.' ); } // copy geometry buffer data over const vertexStart = geometryInfo.vertexStart; const reservedVertexCount = geometryInfo.reservedVertexCount; geometryInfo.vertexCount = geometry.getAttribute( 'position' ).count; for ( const attributeName in batchGeometry.attributes ) { // copy attribute data const srcAttribute = geometry.getAttribute( attributeName ); const dstAttribute = batchGeometry.getAttribute( attributeName ); copyAttributeData( srcAttribute, dstAttribute, vertexStart ); // fill the rest in with zeroes const itemSize = srcAttribute.itemSize; for ( let i = srcAttribute.count, l = reservedVertexCount; i < l; i ++ ) { const index = vertexStart + i; for ( let c = 0; c < itemSize; c ++ ) { dstAttribute.setComponent( index, c, 0 ); } } dstAttribute.needsUpdate = true; dstAttribute.addUpdateRange( vertexStart * itemSize, reservedVertexCount * itemSize ); } // copy index if ( hasIndex ) { const indexStart = geometryInfo.indexStart; const reservedIndexCount = geometryInfo.reservedIndexCount; geometryInfo.indexCount = geometry.getIndex().count; // copy index data over for ( let i = 0; i < srcIndex.count; i ++ ) { dstIndex.setX( indexStart + i, vertexStart + srcIndex.getX( i ) ); } // fill the rest in with zeroes for ( let i = srcIndex.count, l = reservedIndexCount; i < l; i ++ ) { dstIndex.setX( indexStart + i, vertexStart ); } dstIndex.needsUpdate = true; dstIndex.addUpdateRange( indexStart, geometryInfo.reservedIndexCount ); } // update the draw range geometryInfo.start = hasIndex ? geometryInfo.indexStart : geometryInfo.vertexStart; geometryInfo.count = hasIndex ? geometryInfo.indexCount : geometryInfo.vertexCount; // store the bounding boxes geometryInfo.boundingBox = null; if ( geometry.boundingBox !== null ) { geometryInfo.boundingBox = geometry.boundingBox.clone(); } geometryInfo.boundingSphere = null; if ( geometry.boundingSphere !== null ) { geometryInfo.boundingSphere = geometry.boundingSphere.clone(); } this._visibilityChanged = true; return geometryId; } /** * Deletes the geometry defined by the given ID from this batch. Any instances referencing * this geometry will also be removed as a side effect. * * @param {number} geometryId - The ID of the geometry to remove from the batch. * @return {BatchedMesh} A reference to this batched mesh. */ deleteGeometry( geometryId ) { const geometryInfoList = this._geometryInfo; if ( geometryId >= geometryInfoList.length || geometryInfoList[ geometryId ].active === false ) { return this; } // delete any instances associated with this geometry const instanceInfo = this._instanceInfo; for ( let i = 0, l = instanceInfo.length; i < l; i ++ ) { if ( instanceInfo[ i ].active && instanceInfo[ i ].geometryIndex === geometryId ) { this.deleteInstance( i ); } } geometryInfoList[ geometryId ].active = false; this._availableGeometryIds.push( geometryId ); this._visibilityChanged = true; return this; } /** * Deletes an existing instance from the batch using the given ID. * * @param {number} instanceId - The ID of the instance to remove from the batch. * @return {BatchedMesh} A reference to this batched mesh. */ deleteInstance( instanceId ) { this.validateInstanceId( instanceId ); this._instanceInfo[ instanceId ].active = false; this._availableInstanceIds.push( instanceId ); this._visibilityChanged = true; return this; } /** * Repacks the sub geometries in [name] to remove any unused space remaining from * previously deleted geometry, freeing up space to add new geometry. * * @param {number} instanceId - The ID of the instance to remove from the batch. * @return {BatchedMesh} A reference to this batched mesh. */ optimize() { // track the next indices to copy data to let nextVertexStart = 0; let nextIndexStart = 0; // Iterate over all geometry ranges in order sorted from earliest in the geometry buffer to latest // in the geometry buffer. Because draw range objects can be reused there is no guarantee of their order. const geometryInfoList = this._geometryInfo; const indices = geometryInfoList .map( ( e, i ) => i ) .sort( ( a, b ) => { return geometryInfoList[ a ].vertexStart - geometryInfoList[ b ].vertexStart; } ); const geometry = this.geometry; for ( let i = 0, l = geometryInfoList.length; i < l; i ++ ) { // if a geometry range is inactive then don't copy anything const index = indices[ i ]; const geometryInfo = geometryInfoList[ index ]; if ( geometryInfo.active === false ) { continue; } // if a geometry contains an index buffer then shift it, as well if ( geometry.index !== null ) { if ( geometryInfo.indexStart !== nextIndexStart ) { const { indexStart, vertexStart, reservedIndexCount } = geometryInfo; const index = geometry.index; const array = index.array; // shift the index pointers based on how the vertex data will shift // adjusting the index must happen first so the original vertex start value is available const elementDelta = nextVertexStart - vertexStart; for ( let j = indexStart; j < indexStart + reservedIndexCount; j ++ ) { array[ j ] = array[ j ] + elementDelta; } index.array.copyWithin( nextIndexStart, indexStart, indexStart + reservedIndexCount ); index.addUpdateRange( nextIndexStart, reservedIndexCount ); geometryInfo.indexStart = nextIndexStart; } nextIndexStart += geometryInfo.reservedIndexCount; } // if a geometry needs to be moved then copy attribute data to overwrite unused space if ( geometryInfo.vertexStart !== nextVertexStart ) { const { vertexStart, reservedVertexCount } = geometryInfo; const attributes = geometry.attributes; for ( const key in attributes ) { const attribute = attributes[ key ]; const { array, itemSize } = attribute; array.copyWithin( nextVertexStart * itemSize, vertexStart * itemSize, ( vertexStart + reservedVertexCount ) * itemSize ); attribute.addUpdateRange( nextVertexStart * itemSize, reservedVertexCount * itemSize ); } geometryInfo.vertexStart = nextVertexStart; } nextVertexStart += geometryInfo.reservedVertexCount; geometryInfo.start = geometry.index ? geometryInfo.indexStart : geometryInfo.vertexStart; // step the next geometry points to the shifted position this._nextIndexStart = geometry.index ? geometryInfo.indexStart + geometryInfo.reservedIndexCount : 0; this._nextVertexStart = geometryInfo.vertexStart + geometryInfo.reservedVertexCount; } return this; } /** * Returns the bounding box for the given geometry. * * @param {number} geometryId - The ID of the geometry to return the bounding box for. * @param {Box3} target - The target object that is used to store the method's result. * @return {Box3|null} The geometry's bounding box. Returns `null` if no geometry has been found for the given ID. */ getBoundingBoxAt( geometryId, target ) { if ( geometryId >= this._geometryCount ) { return null; } // compute bounding box const geometry = this.geometry; const geometryInfo = this._geometryInfo[ geometryId ]; if ( geometryInfo.boundingBox === null ) { const box = new Box3(); const index = geometry.index; const position = geometry.attributes.position; for ( let i = geometryInfo.start, l = geometryInfo.start + geometryInfo.count; i < l; i ++ ) { let iv = i; if ( index ) { iv = index.getX( iv ); } box.expandByPoint( _vector$5.fromBufferAttribute( position, iv ) ); } geometryInfo.boundingBox = box; } target.copy( geometryInfo.boundingBox ); return target; } /** * Returns the bounding sphere for the given geometry. * * @param {number} geometryId - The ID of the geometry to return the bounding sphere for. * @param {Sphere} target - The target object that is used to store the method's result. * @return {Sphere|null} The geometry's bounding sphere. Returns `null` if no geometry has been found for the given ID. */ getBoundingSphereAt( geometryId, target ) { if ( geometryId >= this._geometryCount ) { return null; } // compute bounding sphere const geometry = this.geometry; const geometryInfo = this._geometryInfo[ geometryId ]; if ( geometryInfo.boundingSphere === null ) { const sphere = new Sphere(); this.getBoundingBoxAt( geometryId, _box$1 ); _box$1.getCenter( sphere.center ); const index = geometry.index; const position = geometry.attributes.position; let maxRadiusSq = 0; for ( let i = geometryInfo.start, l = geometryInfo.start + geometryInfo.count; i < l; i ++ ) { let iv = i; if ( index ) { iv = index.getX( iv ); } _vector$5.fromBufferAttribute( position, iv ); maxRadiusSq = Math.max( maxRadiusSq, sphere.center.distanceToSquared( _vector$5 ) ); } sphere.radius = Math.sqrt( maxRadiusSq ); geometryInfo.boundingSphere = sphere; } target.copy( geometryInfo.boundingSphere ); return target; } /** * Sets the given local transformation matrix to the defined instance. * Negatively scaled matrices are not supported. * * @param {number} instanceId - The ID of an instance to set the matrix of. * @param {Matrix4} matrix - A 4x4 matrix representing the local transformation of a single instance. * @return {BatchedMesh} A reference to this batched mesh. */ setMatrixAt( instanceId, matrix ) { this.validateInstanceId( instanceId ); const matricesTexture = this._matricesTexture; const matricesArray = this._matricesTexture.image.data; matrix.toArray( matricesArray, instanceId * 16 ); matricesTexture.needsUpdate = true; return this; } /** * Returns the local transformation matrix of the defined instance. * * @param {number} instanceId - The ID of an instance to get the matrix of. * @param {Matrix4} matrix - The target object that is used to store the method's result. * @return {Matrix4} The instance's local transformation matrix. */ getMatrixAt( instanceId, matrix ) { this.validateInstanceId( instanceId ); return matrix.fromArray( this._matricesTexture.image.data, instanceId * 16 ); } /** * Sets the given color to the defined instance. * * @param {number} instanceId - The ID of an instance to set the color of. * @param {Color} color - The color to set the instance to. * @return {BatchedMesh} A reference to this batched mesh. */ setColorAt( instanceId, color ) { this.validateInstanceId( instanceId ); if ( this._colorsTexture === null ) { this._initColorsTexture(); } color.toArray( this._colorsTexture.image.data, instanceId * 4 ); this._colorsTexture.needsUpdate = true; return this; } /** * Returns the color of the defined instance. * * @param {number} instanceId - The ID of an instance to get the color of. * @param {Color} color - The target object that is used to store the method's result. * @return {Color} The instance's color. */ getColorAt( instanceId, color ) { this.validateInstanceId( instanceId ); return color.fromArray( this._colorsTexture.image.data, instanceId * 4 ); } /** * Sets the visibility of the instance. * * @param {number} instanceId - The id of the instance to set the visibility of. * @param {boolean} visible - Whether the instance is visible or not. * @return {BatchedMesh} A reference to this batched mesh. */ setVisibleAt( instanceId, visible ) { this.validateInstanceId( instanceId ); if ( this._instanceInfo[ instanceId ].visible === visible ) { return this; } this._instanceInfo[ instanceId ].visible = visible; this._visibilityChanged = true; return this; } /** * Returns the visibility state of the defined instance. * * @param {number} instanceId - The ID of an instance to get the visibility state of. * @return {boolean} Whether the instance is visible or not. */ getVisibleAt( instanceId ) { this.validateInstanceId( instanceId ); return this._instanceInfo[ instanceId ].visible; } /** * Sets the geometry ID of the instance at the given index. * * @param {number} instanceId - The ID of the instance to set the geometry ID of. * @param {number} geometryId - The geometry ID to be use by the instance. * @return {BatchedMesh} A reference to this batched mesh. */ setGeometryIdAt( instanceId, geometryId ) { this.validateInstanceId( instanceId ); this.validateGeometryId( geometryId ); this._instanceInfo[ instanceId ].geometryIndex = geometryId; return this; } /** * Returns the geometry ID of the defined instance. * * @param {number} instanceId - The ID of an instance to get the geometry ID of. * @return {number} The instance's geometry ID. */ getGeometryIdAt( instanceId ) { this.validateInstanceId( instanceId ); return this._instanceInfo[ instanceId ].geometryIndex; } /** * Get the range representing the subset of triangles related to the attached geometry, * indicating the starting offset and count, or `null` if invalid. * * @param {number} geometryId - The id of the geometry to get the range of. * @param {Object} [target] - The target object that is used to store the method's result. * @return {{ * vertexStart:number,vertexCount:number,reservedVertexCount:number, * indexStart:number,indexCount:number,reservedIndexCount:number, * start:number,count:number * }} The result object with range data. */ getGeometryRangeAt( geometryId, target = {} ) { this.validateGeometryId( geometryId ); const geometryInfo = this._geometryInfo[ geometryId ]; target.vertexStart = geometryInfo.vertexStart; target.vertexCount = geometryInfo.vertexCount; target.reservedVertexCount = geometryInfo.reservedVertexCount; target.indexStart = geometryInfo.indexStart; target.indexCount = geometryInfo.indexCount; target.reservedIndexCount = geometryInfo.reservedIndexCount; target.start = geometryInfo.start; target.count = geometryInfo.count; return target; } /** * Resizes the necessary buffers to support the provided number of instances. * If the provided arguments shrink the number of instances but there are not enough * unused Ids at the end of the list then an error is thrown. * * @param {number} maxInstanceCount - The max number of individual instances that can be added and rendered by the batch. */ setInstanceCount( maxInstanceCount ) { // shrink the available instances as much as possible const availableInstanceIds = this._availableInstanceIds; const instanceInfo = this._instanceInfo; availableInstanceIds.sort( ascIdSort ); while ( availableInstanceIds[ availableInstanceIds.length - 1 ] === instanceInfo.length ) { instanceInfo.pop(); availableInstanceIds.pop(); } // throw an error if it can't be shrunk to the desired size if ( maxInstanceCount < instanceInfo.length ) { throw new Error( `BatchedMesh: Instance ids outside the range ${ maxInstanceCount } are being used. Cannot shrink instance count.` ); } // copy the multi draw counts const multiDrawCounts = new Int32Array( maxInstanceCount ); const multiDrawStarts = new Int32Array( maxInstanceCount ); copyArrayContents( this._multiDrawCounts, multiDrawCounts ); copyArrayContents( this._multiDrawStarts, multiDrawStarts ); this._multiDrawCounts = multiDrawCounts; this._multiDrawStarts = multiDrawStarts; this._maxInstanceCount = maxInstanceCount; // update texture data for instance sampling const indirectTexture = this._indirectTexture; const matricesTexture = this._matricesTexture; const colorsTexture = this._colorsTexture; indirectTexture.dispose(); this._initIndirectTexture(); copyArrayContents( indirectTexture.image.data, this._indirectTexture.image.data ); matricesTexture.dispose(); this._initMatricesTexture(); copyArrayContents( matricesTexture.image.data, this._matricesTexture.image.data ); if ( colorsTexture ) { colorsTexture.dispose(); this._initColorsTexture(); copyArrayContents( colorsTexture.image.data, this._colorsTexture.image.data ); } } /** * Resizes the available space in the batch's vertex and index buffer attributes to the provided sizes. * If the provided arguments shrink the geometry buffers but there is not enough unused space at the * end of the geometry attributes then an error is thrown. * * @param {number} maxVertexCount - The maximum number of vertices to be used by all unique geometries to resize to. * @param {number} maxIndexCount - The maximum number of indices to be used by all unique geometries to resize to. */ setGeometrySize( maxVertexCount, maxIndexCount ) { // Check if we can shrink to the requested vertex attribute size const validRanges = [ ...this._geometryInfo ].filter( info => info.active ); const requiredVertexLength = Math.max( ...validRanges.map( range => range.vertexStart + range.reservedVertexCount ) ); if ( requiredVertexLength > maxVertexCount ) { throw new Error( `BatchedMesh: Geometry vertex values are being used outside the range ${ maxIndexCount }. Cannot shrink further.` ); } // Check if we can shrink to the requested index attribute size if ( this.geometry.index ) { const requiredIndexLength = Math.max( ...validRanges.map( range => range.indexStart + range.reservedIndexCount ) ); if ( requiredIndexLength > maxIndexCount ) { throw new Error( `BatchedMesh: Geometry index values are being used outside the range ${ maxIndexCount }. Cannot shrink further.` ); } } // // dispose of the previous geometry const oldGeometry = this.geometry; oldGeometry.dispose(); // recreate the geometry needed based on the previous variant this._maxVertexCount = maxVertexCount; this._maxIndexCount = maxIndexCount; if ( this._geometryInitialized ) { this._geometryInitialized = false; this.geometry = new BufferGeometry(); this._initializeGeometry( oldGeometry ); } // copy data from the previous geometry const geometry = this.geometry; if ( oldGeometry.index ) { copyArrayContents( oldGeometry.index.array, geometry.index.array ); } for ( const key in oldGeometry.attributes ) { copyArrayContents( oldGeometry.attributes[ key ].array, geometry.attributes[ key ].array ); } } raycast( raycaster, intersects ) { const instanceInfo = this._instanceInfo; const geometryInfoList = this._geometryInfo; const matrixWorld = this.matrixWorld; const batchGeometry = this.geometry; // iterate over each geometry _mesh.material = this.material; _mesh.geometry.index = batchGeometry.index; _mesh.geometry.attributes = batchGeometry.attributes; if ( _mesh.geometry.boundingBox === null ) { _mesh.geometry.boundingBox = new Box3(); } if ( _mesh.geometry.boundingSphere === null ) { _mesh.geometry.boundingSphere = new Sphere(); } for ( let i = 0, l = instanceInfo.length; i < l; i ++ ) { if ( ! instanceInfo[ i ].visible || ! instanceInfo[ i ].active ) { continue; } const geometryId = instanceInfo[ i ].geometryIndex; const geometryInfo = geometryInfoList[ geometryId ]; _mesh.geometry.setDrawRange( geometryInfo.start, geometryInfo.count ); // get the intersects this.getMatrixAt( i, _mesh.matrixWorld ).premultiply( matrixWorld ); this.getBoundingBoxAt( geometryId, _mesh.geometry.boundingBox ); this.getBoundingSphereAt( geometryId, _mesh.geometry.boundingSphere ); _mesh.raycast( raycaster, _batchIntersects ); // add batch id to the intersects for ( let j = 0, l = _batchIntersects.length; j < l; j ++ ) { const intersect = _batchIntersects[ j ]; intersect.object = this; intersect.batchId = i; intersects.push( intersect ); } _batchIntersects.length = 0; } _mesh.material = null; _mesh.geometry.index = null; _mesh.geometry.attributes = {}; _mesh.geometry.setDrawRange( 0, Infinity ); } copy( source ) { super.copy( source ); this.geometry = source.geometry.clone(); this.perObjectFrustumCulled = source.perObjectFrustumCulled; this.sortObjects = source.sortObjects; this.boundingBox = source.boundingBox !== null ? source.boundingBox.clone() : null; this.boundingSphere = source.boundingSphere !== null ? source.boundingSphere.clone() : null; this._geometryInfo = source._geometryInfo.map( info => ( { ...info, boundingBox: info.boundingBox !== null ? info.boundingBox.clone() : null, boundingSphere: info.boundingSphere !== null ? info.boundingSphere.clone() : null, } ) ); this._instanceInfo = source._instanceInfo.map( info => ( { ...info } ) ); this._maxInstanceCount = source._maxInstanceCount; this._maxVertexCount = source._maxVertexCount; this._maxIndexCount = source._maxIndexCount; this._geometryInitialized = source._geometryInitialized; this._geometryCount = source._geometryCount; this._multiDrawCounts = source._multiDrawCounts.slice(); this._multiDrawStarts = source._multiDrawStarts.slice(); this._matricesTexture = source._matricesTexture.clone(); this._matricesTexture.image.data = this._matricesTexture.image.data.slice(); if ( this._colorsTexture !== null ) { this._colorsTexture = source._colorsTexture.clone(); this._colorsTexture.image.data = this._colorsTexture.image.data.slice(); } return this; } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. */ dispose() { // Assuming the geometry is not shared with other meshes this.geometry.dispose(); this._matricesTexture.dispose(); this._matricesTexture = null; this._indirectTexture.dispose(); this._indirectTexture = null; if ( this._colorsTexture !== null ) { this._colorsTexture.dispose(); this._colorsTexture = null; } } onBeforeRender( renderer, scene, camera, geometry, material/*, _group*/ ) { // if visibility has not changed and frustum culling and object sorting is not required // then skip iterating over all items if ( ! this._visibilityChanged && ! this.perObjectFrustumCulled && ! this.sortObjects ) { return; } // the indexed version of the multi draw function requires specifying the start // offset in bytes. const index = geometry.getIndex(); const bytesPerElement = index === null ? 1 : index.array.BYTES_PER_ELEMENT; const instanceInfo = this._instanceInfo; const multiDrawStarts = this._multiDrawStarts; const multiDrawCounts = this._multiDrawCounts; const geometryInfoList = this._geometryInfo; const perObjectFrustumCulled = this.perObjectFrustumCulled; const indirectTexture = this._indirectTexture; const indirectArray = indirectTexture.image.data; // prepare the frustum in the local frame if ( perObjectFrustumCulled ) { _matrix$1 .multiplyMatrices( camera.projectionMatrix, camera.matrixWorldInverse ) .multiply( this.matrixWorld ); _frustum.setFromProjectionMatrix( _matrix$1, renderer.coordinateSystem ); } let multiDrawCount = 0; if ( this.sortObjects ) { // get the camera position in the local frame _matrix$1.copy( this.matrixWorld ).invert(); _vector$5.setFromMatrixPosition( camera.matrixWorld ).applyMatrix4( _matrix$1 ); _forward.set( 0, 0, -1 ).transformDirection( camera.matrixWorld ).transformDirection( _matrix$1 ); for ( let i = 0, l = instanceInfo.length; i < l; i ++ ) { if ( instanceInfo[ i ].visible && instanceInfo[ i ].active ) { const geometryId = instanceInfo[ i ].geometryIndex; // get the bounds in world space this.getMatrixAt( i, _matrix$1 ); this.getBoundingSphereAt( geometryId, _sphere$2 ).applyMatrix4( _matrix$1 ); // determine whether the batched geometry is within the frustum let culled = false; if ( perObjectFrustumCulled ) { culled = ! _frustum.intersectsSphere( _sphere$2 ); } if ( ! culled ) { // get the distance from camera used for sorting const geometryInfo = geometryInfoList[ geometryId ]; const z = _temp.subVectors( _sphere$2.center, _vector$5 ).dot( _forward ); _renderList.push( geometryInfo.start, geometryInfo.count, z, i ); } } } // Sort the draw ranges and prep for rendering const list = _renderList.list; const customSort = this.customSort; if ( customSort === null ) { list.sort( material.transparent ? sortTransparent : sortOpaque ); } else { customSort.call( this, list, camera ); } for ( let i = 0, l = list.length; i < l; i ++ ) { const item = list[ i ]; multiDrawStarts[ multiDrawCount ] = item.start * bytesPerElement; multiDrawCounts[ multiDrawCount ] = item.count; indirectArray[ multiDrawCount ] = item.index; multiDrawCount ++; } _renderList.reset(); } else { for ( let i = 0, l = instanceInfo.length; i < l; i ++ ) { if ( instanceInfo[ i ].visible && instanceInfo[ i ].active ) { const geometryId = instanceInfo[ i ].geometryIndex; // determine whether the batched geometry is within the frustum let culled = false; if ( perObjectFrustumCulled ) { // get the bounds in world space this.getMatrixAt( i, _matrix$1 ); this.getBoundingSphereAt( geometryId, _sphere$2 ).applyMatrix4( _matrix$1 ); culled = ! _frustum.intersectsSphere( _sphere$2 ); } if ( ! culled ) { const geometryInfo = geometryInfoList[ geometryId ]; multiDrawStarts[ multiDrawCount ] = geometryInfo.start * bytesPerElement; multiDrawCounts[ multiDrawCount ] = geometryInfo.count; indirectArray[ multiDrawCount ] = i; multiDrawCount ++; } } } } indirectTexture.needsUpdate = true; this._multiDrawCount = multiDrawCount; this._visibilityChanged = false; } onBeforeShadow( renderer, object, camera, shadowCamera, geometry, depthMaterial/* , group */ ) { this.onBeforeRender( renderer, null, shadowCamera, geometry, depthMaterial ); } } /** * A material for rendering line primitives. * * Materials define the appearance of renderable 3D objects. * * ```js * const material = new THREE.LineBasicMaterial( { color: 0xffffff } ); * ``` * * @augments Material */ class LineBasicMaterial extends Material { /** * Constructs a new line basic material. * * @param {Object} [parameters] - An object with one or more properties * defining the material's appearance. Any property of the material * (including any property from inherited materials) can be passed * in here. Color values can be passed any type of value accepted * by {@link Color#set}. */ constructor( parameters ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isLineBasicMaterial = true; this.type = 'LineBasicMaterial'; /** * Color of the material. * * @type {Color} * @default (1,1,1) */ this.color = new Color( 0xffffff ); /** * Sets the color of the lines using data from a texture. The texture map * color is modulated by the diffuse `color`. * * @type {?Texture} * @default null */ this.map = null; /** * Controls line thickness or lines. * * Can only be used with {@link SVGRenderer}. WebGL and WebGPU * ignore this setting and always render line primitives with a * width of one pixel. * * @type {number} * @default 1 */ this.linewidth = 1; /** * Defines appearance of line ends. * * Can only be used with {@link SVGRenderer}. * * @type {('butt'|'round'|'square')} * @default 'round' */ this.linecap = 'round'; /** * Defines appearance of line joints. * * Can only be used with {@link SVGRenderer}. * * @type {('round'|'bevel'|'miter')} * @default 'round' */ this.linejoin = 'round'; /** * Whether the material is affected by fog or not. * * @type {boolean} * @default true */ this.fog = true; this.setValues( parameters ); } copy( source ) { super.copy( source ); this.color.copy( source.color ); this.map = source.map; this.linewidth = source.linewidth; this.linecap = source.linecap; this.linejoin = source.linejoin; this.fog = source.fog; return this; } } const _vStart = /*@__PURE__*/ new Vector3(); const _vEnd = /*@__PURE__*/ new Vector3(); const _inverseMatrix$1 = /*@__PURE__*/ new Matrix4(); const _ray$1 = /*@__PURE__*/ new Ray(); const _sphere$1 = /*@__PURE__*/ new Sphere(); const _intersectPointOnRay = /*@__PURE__*/ new Vector3(); const _intersectPointOnSegment = /*@__PURE__*/ new Vector3(); /** * A continuous line. The line are rendered by connecting consecutive * vertices with straight lines. * * ```js * const material = new THREE.LineBasicMaterial( { color: 0x0000ff } ); * * const points = []; * points.push( new THREE.Vector3( - 10, 0, 0 ) ); * points.push( new THREE.Vector3( 0, 10, 0 ) ); * points.push( new THREE.Vector3( 10, 0, 0 ) ); * * const geometry = new THREE.BufferGeometry().setFromPoints( points ); * * const line = new THREE.Line( geometry, material ); * scene.add( line ); * ``` * * @augments Object3D */ class Line extends Object3D { /** * Constructs a new line. * * @param {BufferGeometry} [geometry] - The line geometry. * @param {Material|Array} [material] - The line material. */ constructor( geometry = new BufferGeometry(), material = new LineBasicMaterial() ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isLine = true; this.type = 'Line'; /** * The line geometry. * * @type {BufferGeometry} */ this.geometry = geometry; /** * The line material. * * @type {Material|Array} * @default LineBasicMaterial */ this.material = material; /** * A dictionary representing the morph targets in the geometry. The key is the * morph targets name, the value its attribute index. This member is `undefined` * by default and only set when morph targets are detected in the geometry. * * @type {Object|undefined} * @default undefined */ this.morphTargetDictionary = undefined; /** * An array of weights typically in the range `[0,1]` that specify how much of the morph * is applied. This member is `undefined` by default and only set when morph targets are * detected in the geometry. * * @type {Array|undefined} * @default undefined */ this.morphTargetInfluences = undefined; this.updateMorphTargets(); } copy( source, recursive ) { super.copy( source, recursive ); this.material = Array.isArray( source.material ) ? source.material.slice() : source.material; this.geometry = source.geometry; return this; } /** * Computes an array of distance values which are necessary for rendering dashed lines. * For each vertex in the geometry, the method calculates the cumulative length from the * current point to the very beginning of the line. * * @return {Line} A reference to this line. */ computeLineDistances() { const geometry = this.geometry; // we assume non-indexed geometry if ( geometry.index === null ) { const positionAttribute = geometry.attributes.position; const lineDistances = [ 0 ]; for ( let i = 1, l = positionAttribute.count; i < l; i ++ ) { _vStart.fromBufferAttribute( positionAttribute, i - 1 ); _vEnd.fromBufferAttribute( positionAttribute, i ); lineDistances[ i ] = lineDistances[ i - 1 ]; lineDistances[ i ] += _vStart.distanceTo( _vEnd ); } geometry.setAttribute( 'lineDistance', new Float32BufferAttribute( lineDistances, 1 ) ); } else { console.warn( 'THREE.Line.computeLineDistances(): Computation only possible with non-indexed BufferGeometry.' ); } return this; } /** * Computes intersection points between a casted ray and this line. * * @param {Raycaster} raycaster - The raycaster. * @param {Array} intersects - The target array that holds the intersection points. */ raycast( raycaster, intersects ) { const geometry = this.geometry; const matrixWorld = this.matrixWorld; const threshold = raycaster.params.Line.threshold; const drawRange = geometry.drawRange; // Checking boundingSphere distance to ray if ( geometry.boundingSphere === null ) geometry.computeBoundingSphere(); _sphere$1.copy( geometry.boundingSphere ); _sphere$1.applyMatrix4( matrixWorld ); _sphere$1.radius += threshold; if ( raycaster.ray.intersectsSphere( _sphere$1 ) === false ) return; // _inverseMatrix$1.copy( matrixWorld ).invert(); _ray$1.copy( raycaster.ray ).applyMatrix4( _inverseMatrix$1 ); const localThreshold = threshold / ( ( this.scale.x + this.scale.y + this.scale.z ) / 3 ); const localThresholdSq = localThreshold * localThreshold; const step = this.isLineSegments ? 2 : 1; const index = geometry.index; const attributes = geometry.attributes; const positionAttribute = attributes.position; if ( index !== null ) { const start = Math.max( 0, drawRange.start ); const end = Math.min( index.count, ( drawRange.start + drawRange.count ) ); for ( let i = start, l = end - 1; i < l; i += step ) { const a = index.getX( i ); const b = index.getX( i + 1 ); const intersect = checkIntersection( this, raycaster, _ray$1, localThresholdSq, a, b, i ); if ( intersect ) { intersects.push( intersect ); } } if ( this.isLineLoop ) { const a = index.getX( end - 1 ); const b = index.getX( start ); const intersect = checkIntersection( this, raycaster, _ray$1, localThresholdSq, a, b, end - 1 ); if ( intersect ) { intersects.push( intersect ); } } } else { const start = Math.max( 0, drawRange.start ); const end = Math.min( positionAttribute.count, ( drawRange.start + drawRange.count ) ); for ( let i = start, l = end - 1; i < l; i += step ) { const intersect = checkIntersection( this, raycaster, _ray$1, localThresholdSq, i, i + 1, i ); if ( intersect ) { intersects.push( intersect ); } } if ( this.isLineLoop ) { const intersect = checkIntersection( this, raycaster, _ray$1, localThresholdSq, end - 1, start, end - 1 ); if ( intersect ) { intersects.push( intersect ); } } } } /** * Sets the values of {@link Line#morphTargetDictionary} and {@link Line#morphTargetInfluences} * to make sure existing morph targets can influence this 3D object. */ updateMorphTargets() { const geometry = this.geometry; const morphAttributes = geometry.morphAttributes; const keys = Object.keys( morphAttributes ); if ( keys.length > 0 ) { const morphAttribute = morphAttributes[ keys[ 0 ] ]; if ( morphAttribute !== undefined ) { this.morphTargetInfluences = []; this.morphTargetDictionary = {}; for ( let m = 0, ml = morphAttribute.length; m < ml; m ++ ) { const name = morphAttribute[ m ].name || String( m ); this.morphTargetInfluences.push( 0 ); this.morphTargetDictionary[ name ] = m; } } } } } function checkIntersection( object, raycaster, ray, thresholdSq, a, b, i ) { const positionAttribute = object.geometry.attributes.position; _vStart.fromBufferAttribute( positionAttribute, a ); _vEnd.fromBufferAttribute( positionAttribute, b ); const distSq = ray.distanceSqToSegment( _vStart, _vEnd, _intersectPointOnRay, _intersectPointOnSegment ); if ( distSq > thresholdSq ) return; _intersectPointOnRay.applyMatrix4( object.matrixWorld ); // Move back to world space for distance calculation const distance = raycaster.ray.origin.distanceTo( _intersectPointOnRay ); if ( distance < raycaster.near || distance > raycaster.far ) return; return { distance: distance, // What do we want? intersection point on the ray or on the segment?? // point: raycaster.ray.at( distance ), point: _intersectPointOnSegment.clone().applyMatrix4( object.matrixWorld ), index: i, face: null, faceIndex: null, barycoord: null, object: object }; } const _start = /*@__PURE__*/ new Vector3(); const _end = /*@__PURE__*/ new Vector3(); /** * A series of lines drawn between pairs of vertices. * * @augments Line */ class LineSegments extends Line { /** * Constructs a new line segments. * * @param {BufferGeometry} [geometry] - The line geometry. * @param {Material|Array} [material] - The line material. */ constructor( geometry, material ) { super( geometry, material ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isLineSegments = true; this.type = 'LineSegments'; } computeLineDistances() { const geometry = this.geometry; // we assume non-indexed geometry if ( geometry.index === null ) { const positionAttribute = geometry.attributes.position; const lineDistances = []; for ( let i = 0, l = positionAttribute.count; i < l; i += 2 ) { _start.fromBufferAttribute( positionAttribute, i ); _end.fromBufferAttribute( positionAttribute, i + 1 ); lineDistances[ i ] = ( i === 0 ) ? 0 : lineDistances[ i - 1 ]; lineDistances[ i + 1 ] = lineDistances[ i ] + _start.distanceTo( _end ); } geometry.setAttribute( 'lineDistance', new Float32BufferAttribute( lineDistances, 1 ) ); } else { console.warn( 'THREE.LineSegments.computeLineDistances(): Computation only possible with non-indexed BufferGeometry.' ); } return this; } } /** * A continuous line. This is nearly the same as {@link Line} the only difference * is that the last vertex is connected with the first vertex in order to close * the line to form a loop. * * @augments Line */ class LineLoop extends Line { /** * Constructs a new line loop. * * @param {BufferGeometry} [geometry] - The line geometry. * @param {Material|Array} [material] - The line material. */ constructor( geometry, material ) { super( geometry, material ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isLineLoop = true; this.type = 'LineLoop'; } } /** * A material for rendering point primitives. * * Materials define the appearance of renderable 3D objects. * * ```js * const vertices = []; * * for ( let i = 0; i < 10000; i ++ ) { * const x = THREE.MathUtils.randFloatSpread( 2000 ); * const y = THREE.MathUtils.randFloatSpread( 2000 ); * const z = THREE.MathUtils.randFloatSpread( 2000 ); * * vertices.push( x, y, z ); * } * * const geometry = new THREE.BufferGeometry(); * geometry.setAttribute( 'position', new THREE.Float32BufferAttribute( vertices, 3 ) ); * const material = new THREE.PointsMaterial( { color: 0x888888 } ); * const points = new THREE.Points( geometry, material ); * scene.add( points ); * ``` * * @augments Material */ class PointsMaterial extends Material { /** * Constructs a new points material. * * @param {Object} [parameters] - An object with one or more properties * defining the material's appearance. Any property of the material * (including any property from inherited materials) can be passed * in here. Color values can be passed any type of value accepted * by {@link Color#set}. */ constructor( parameters ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isPointsMaterial = true; this.type = 'PointsMaterial'; /** * Color of the material. * * @type {Color} * @default (1,1,1) */ this.color = new Color( 0xffffff ); /** * The color map. May optionally include an alpha channel, typically combined * with {@link Material#transparent} or {@link Material#alphaTest}. The texture map * color is modulated by the diffuse `color`. * * @type {?Texture} * @default null */ this.map = null; /** * The alpha map is a grayscale texture that controls the opacity across the * surface (black: fully transparent; white: fully opaque). * * Only the color of the texture is used, ignoring the alpha channel if one * exists. For RGB and RGBA textures, the renderer will use the green channel * when sampling this texture due to the extra bit of precision provided for * green in DXT-compressed and uncompressed RGB 565 formats. Luminance-only and * luminance/alpha textures will also still work as expected. * * @type {?Texture} * @default null */ this.alphaMap = null; /** * Defines the size of the points in pixels. * * Might be capped if the value exceeds hardware dependent parameters like [gl.ALIASED_POINT_SIZE_RANGE]{@link https://developer.mozilla.org/en-US/docs/Web/API/WebGLRenderingContext/getParamete}. * * @type {number} * @default 1 */ this.size = 1; /** * Specifies whether size of individual points is attenuated by the camera depth (perspective camera only). * * @type {boolean} * @default true */ this.sizeAttenuation = true; /** * Whether the material is affected by fog or not. * * @type {boolean} * @default true */ this.fog = true; this.setValues( parameters ); } copy( source ) { super.copy( source ); this.color.copy( source.color ); this.map = source.map; this.alphaMap = source.alphaMap; this.size = source.size; this.sizeAttenuation = source.sizeAttenuation; this.fog = source.fog; return this; } } const _inverseMatrix = /*@__PURE__*/ new Matrix4(); const _ray = /*@__PURE__*/ new Ray(); const _sphere = /*@__PURE__*/ new Sphere(); const _position$2 = /*@__PURE__*/ new Vector3(); /** * A class for displaying points or point clouds. * * @augments Object3D */ class Points extends Object3D { /** * Constructs a new point cloud. * * @param {BufferGeometry} [geometry] - The points geometry. * @param {Material|Array} [material] - The points material. */ constructor( geometry = new BufferGeometry(), material = new PointsMaterial() ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isPoints = true; this.type = 'Points'; /** * The points geometry. * * @type {BufferGeometry} */ this.geometry = geometry; /** * The line material. * * @type {Material|Array} * @default PointsMaterial */ this.material = material; /** * A dictionary representing the morph targets in the geometry. The key is the * morph targets name, the value its attribute index. This member is `undefined` * by default and only set when morph targets are detected in the geometry. * * @type {Object|undefined} * @default undefined */ this.morphTargetDictionary = undefined; /** * An array of weights typically in the range `[0,1]` that specify how much of the morph * is applied. This member is `undefined` by default and only set when morph targets are * detected in the geometry. * * @type {Array|undefined} * @default undefined */ this.morphTargetInfluences = undefined; this.updateMorphTargets(); } copy( source, recursive ) { super.copy( source, recursive ); this.material = Array.isArray( source.material ) ? source.material.slice() : source.material; this.geometry = source.geometry; return this; } /** * Computes intersection points between a casted ray and this point cloud. * * @param {Raycaster} raycaster - The raycaster. * @param {Array} intersects - The target array that holds the intersection points. */ raycast( raycaster, intersects ) { const geometry = this.geometry; const matrixWorld = this.matrixWorld; const threshold = raycaster.params.Points.threshold; const drawRange = geometry.drawRange; // Checking boundingSphere distance to ray if ( geometry.boundingSphere === null ) geometry.computeBoundingSphere(); _sphere.copy( geometry.boundingSphere ); _sphere.applyMatrix4( matrixWorld ); _sphere.radius += threshold; if ( raycaster.ray.intersectsSphere( _sphere ) === false ) return; // _inverseMatrix.copy( matrixWorld ).invert(); _ray.copy( raycaster.ray ).applyMatrix4( _inverseMatrix ); const localThreshold = threshold / ( ( this.scale.x + this.scale.y + this.scale.z ) / 3 ); const localThresholdSq = localThreshold * localThreshold; const index = geometry.index; const attributes = geometry.attributes; const positionAttribute = attributes.position; if ( index !== null ) { const start = Math.max( 0, drawRange.start ); const end = Math.min( index.count, ( drawRange.start + drawRange.count ) ); for ( let i = start, il = end; i < il; i ++ ) { const a = index.getX( i ); _position$2.fromBufferAttribute( positionAttribute, a ); testPoint( _position$2, a, localThresholdSq, matrixWorld, raycaster, intersects, this ); } } else { const start = Math.max( 0, drawRange.start ); const end = Math.min( positionAttribute.count, ( drawRange.start + drawRange.count ) ); for ( let i = start, l = end; i < l; i ++ ) { _position$2.fromBufferAttribute( positionAttribute, i ); testPoint( _position$2, i, localThresholdSq, matrixWorld, raycaster, intersects, this ); } } } /** * Sets the values of {@link Points#morphTargetDictionary} and {@link Points#morphTargetInfluences} * to make sure existing morph targets can influence this 3D object. */ updateMorphTargets() { const geometry = this.geometry; const morphAttributes = geometry.morphAttributes; const keys = Object.keys( morphAttributes ); if ( keys.length > 0 ) { const morphAttribute = morphAttributes[ keys[ 0 ] ]; if ( morphAttribute !== undefined ) { this.morphTargetInfluences = []; this.morphTargetDictionary = {}; for ( let m = 0, ml = morphAttribute.length; m < ml; m ++ ) { const name = morphAttribute[ m ].name || String( m ); this.morphTargetInfluences.push( 0 ); this.morphTargetDictionary[ name ] = m; } } } } } function testPoint( point, index, localThresholdSq, matrixWorld, raycaster, intersects, object ) { const rayPointDistanceSq = _ray.distanceSqToPoint( point ); if ( rayPointDistanceSq < localThresholdSq ) { const intersectPoint = new Vector3(); _ray.closestPointToPoint( point, intersectPoint ); intersectPoint.applyMatrix4( matrixWorld ); const distance = raycaster.ray.origin.distanceTo( intersectPoint ); if ( distance < raycaster.near || distance > raycaster.far ) return; intersects.push( { distance: distance, distanceToRay: Math.sqrt( rayPointDistanceSq ), point: intersectPoint, index: index, face: null, faceIndex: null, barycoord: null, object: object } ); } } /** * A texture for use with a video. * * ```js * // assuming you have created a HTML video element with id="video" * const video = document.getElementById( 'video' ); * const texture = new THREE.VideoTexture( video ); * ``` * * Note: After the initial use of a texture, its dimensions, format, and type * cannot be changed. Instead, call {@link Texture#dispose} on the texture and instantiate a new one. * * @augments Texture */ class VideoTexture extends Texture { /** * Constructs a new video texture. * * @param {Video} video - The video element to use as a data source for the texture. * @param {number} [mapping=Texture.DEFAULT_MAPPING] - The texture mapping. * @param {number} [wrapS=ClampToEdgeWrapping] - The wrapS value. * @param {number} [wrapT=ClampToEdgeWrapping] - The wrapT value. * @param {number} [magFilter=LinearFilter] - The mag filter value. * @param {number} [minFilter=LinearFilter] - The min filter value. * @param {number} [format=RGBAFormat] - The texture format. * @param {number} [type=UnsignedByteType] - The texture type. * @param {number} [anisotropy=Texture.DEFAULT_ANISOTROPY] - The anisotropy value. */ constructor( video, mapping, wrapS, wrapT, magFilter = LinearFilter, minFilter = LinearFilter, format, type, anisotropy ) { super( video, mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isVideoTexture = true; /** * Whether to generate mipmaps (if possible) for a texture. * * Overwritten and set to `false` by default. * * @type {boolean} * @default false */ this.generateMipmaps = false; const scope = this; function updateVideo() { scope.needsUpdate = true; video.requestVideoFrameCallback( updateVideo ); } if ( 'requestVideoFrameCallback' in video ) { video.requestVideoFrameCallback( updateVideo ); } } clone() { return new this.constructor( this.image ).copy( this ); } /** * This method is called automatically by the renderer and sets {@link Texture#needsUpdate} * to `true` every time a new frame is available. * * Only relevant if `requestVideoFrameCallback` is not supported in the browser. */ update() { const video = this.image; const hasVideoFrameCallback = 'requestVideoFrameCallback' in video; if ( hasVideoFrameCallback === false && video.readyState >= video.HAVE_CURRENT_DATA ) { this.needsUpdate = true; } } } /** * This class can be used as an alternative way to define video data. Instead of using * an instance of `HTMLVideoElement` like with `VideoTexture`, `VideoFrameTexture` expects each frame is * defined manually via {@link VideoFrameTexture#setFrame}. A typical use case for this module is when * video frames are decoded with the WebCodecs API. * * ```js * const texture = new THREE.VideoFrameTexture(); * texture.setFrame( frame ); * ``` * * @augments VideoTexture */ class VideoFrameTexture extends VideoTexture { /** * Constructs a new video frame texture. * * @param {number} [mapping=Texture.DEFAULT_MAPPING] - The texture mapping. * @param {number} [wrapS=ClampToEdgeWrapping] - The wrapS value. * @param {number} [wrapT=ClampToEdgeWrapping] - The wrapT value. * @param {number} [magFilter=LinearFilter] - The mag filter value. * @param {number} [minFilter=LinearFilter] - The min filter value. * @param {number} [format=RGBAFormat] - The texture format. * @param {number} [type=UnsignedByteType] - The texture type. * @param {number} [anisotropy=Texture.DEFAULT_ANISOTROPY] - The anisotropy value. */ constructor( mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy ) { super( {}, mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isVideoFrameTexture = true; } /** * This method overwritten with an empty implementation since * this type of texture is updated via `setFrame()`. */ update() {} clone() { return new this.constructor().copy( this ); // restoring Texture.clone() } /** * Sets the current frame of the video. This will automatically update the texture * so the data can be used for rendering. * * @param {VideoFrame} frame - The video frame. */ setFrame( frame ) { this.image = frame; this.needsUpdate = true; } } /** * This class can only be used in combination with `copyFramebufferToTexture()` methods * of renderers. It extracts the contents of the current bound framebuffer and provides it * as a texture for further usage. * * ```js * const pixelRatio = window.devicePixelRatio; * const textureSize = 128 * pixelRatio; * * const frameTexture = new FramebufferTexture( textureSize, textureSize ); * * // calculate start position for copying part of the frame data * const vector = new Vector2(); * vector.x = ( window.innerWidth * pixelRatio / 2 ) - ( textureSize / 2 ); * vector.y = ( window.innerHeight * pixelRatio / 2 ) - ( textureSize / 2 ); * * renderer.render( scene, camera ); * * // copy part of the rendered frame into the framebuffer texture * renderer.copyFramebufferToTexture( frameTexture, vector ); * ``` * * @augments Texture */ class FramebufferTexture extends Texture { /** * Constructs a new framebuffer texture. * * @param {number} width - The width of the texture. * @param {number} height - The height of the texture. */ constructor( width, height ) { super( { width, height } ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isFramebufferTexture = true; /** * How the texture is sampled when a texel covers more than one pixel. * * Overwritten and set to `NearestFilter` by default to disable filtering. * * @type {(NearestFilter|NearestMipmapNearestFilter|NearestMipmapLinearFilter|LinearFilter|LinearMipmapNearestFilter|LinearMipmapLinearFilter)} * @default NearestFilter */ this.magFilter = NearestFilter; /** * How the texture is sampled when a texel covers less than one pixel. * * Overwritten and set to `NearestFilter` by default to disable filtering. * * @type {(NearestFilter|NearestMipmapNearestFilter|NearestMipmapLinearFilter|LinearFilter|LinearMipmapNearestFilter|LinearMipmapLinearFilter)} * @default NearestFilter */ this.minFilter = NearestFilter; /** * Whether to generate mipmaps (if possible) for a texture. * * Overwritten and set to `false` by default. * * @type {boolean} * @default false */ this.generateMipmaps = false; this.needsUpdate = true; } } /** * Creates a texture based on data in compressed form. * * These texture are usually loaded with {@link CompressedTextureLoader}. * * @augments Texture */ class CompressedTexture extends Texture { /** * Constructs a new compressed texture. * * @param {Array} mipmaps - This array holds for all mipmaps (including the bases mip) * the data and dimensions. * @param {number} width - The width of the texture. * @param {number} height - The height of the texture. * @param {number} [format=RGBAFormat] - The texture format. * @param {number} [type=UnsignedByteType] - The texture type. * @param {number} [mapping=Texture.DEFAULT_MAPPING] - The texture mapping. * @param {number} [wrapS=ClampToEdgeWrapping] - The wrapS value. * @param {number} [wrapT=ClampToEdgeWrapping] - The wrapT value. * @param {number} [magFilter=LinearFilter] - The mag filter value. * @param {number} [minFilter=LinearMipmapLinearFilter] - The min filter value. * @param {number} [anisotropy=Texture.DEFAULT_ANISOTROPY] - The anisotropy value. * @param {string} [colorSpace=NoColorSpace] - The color space. */ constructor( mipmaps, width, height, format, type, mapping, wrapS, wrapT, magFilter, minFilter, anisotropy, colorSpace ) { super( null, mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy, colorSpace ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isCompressedTexture = true; /** * The image property of a compressed texture just defines its dimensions. * * @type {{width:number,height:number}} */ this.image = { width: width, height: height }; /** * This array holds for all mipmaps (including the bases mip) the data and dimensions. * * @type {Array} */ this.mipmaps = mipmaps; /** * If set to `true`, the texture is flipped along the vertical axis when * uploaded to the GPU. * * Overwritten and set to `false` by default since it is not possible to * flip compressed textures. * * @type {boolean} * @default false * @readonly */ this.flipY = false; /** * Whether to generate mipmaps (if possible) for a texture. * * Overwritten and set to `false` by default since it is not * possible to generate mipmaps for compressed data. Mipmaps * must be embedded in the compressed texture file. * * @type {boolean} * @default false * @readonly */ this.generateMipmaps = false; } } /** * Creates a texture 2D array based on data in compressed form. * * These texture are usually loaded with {@link CompressedTextureLoader}. * * @augments CompressedTexture */ class CompressedArrayTexture extends CompressedTexture { /** * Constructs a new compressed array texture. * * @param {Array} mipmaps - This array holds for all mipmaps (including the bases mip) * the data and dimensions. * @param {number} width - The width of the texture. * @param {number} height - The height of the texture. * @param {number} depth - The depth of the texture. * @param {number} [format=RGBAFormat] - The min filter value. * @param {number} [type=UnsignedByteType] - The min filter value. */ constructor( mipmaps, width, height, depth, format, type ) { super( mipmaps, width, height, format, type ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isCompressedArrayTexture = true; /** * The image property of a compressed texture just defines its dimensions. * * @name CompressedArrayTexture#image * @type {{width:number,height:number,depth:number}} */ this.image.depth = depth; /** * This defines how the texture is wrapped in the depth and corresponds to * *W* in UVW mapping. * * @type {(RepeatWrapping|ClampToEdgeWrapping|MirroredRepeatWrapping)} * @default ClampToEdgeWrapping */ this.wrapR = ClampToEdgeWrapping; /** * A set of all layers which need to be updated in the texture. * * @type {Set} */ this.layerUpdates = new Set(); } /** * Describes that a specific layer of the texture needs to be updated. * Normally when {@link Texture#needsUpdate} is set to `true`, the * entire compressed texture array is sent to the GPU. Marking specific * layers will only transmit subsets of all mipmaps associated with a * specific depth in the array which is often much more performant. * * @param {number} layerIndex - The layer index that should be updated. */ addLayerUpdate( layerIndex ) { this.layerUpdates.add( layerIndex ); } /** * Resets the layer updates registry. */ clearLayerUpdates() { this.layerUpdates.clear(); } } /** * Creates a cube texture based on data in compressed form. * * These texture are usually loaded with {@link CompressedTextureLoader}. * * @augments CompressedTexture */ class CompressedCubeTexture extends CompressedTexture { /** * Constructs a new compressed texture. * * @param {Array} images - An array of compressed textures. * @param {number} [format=RGBAFormat] - The texture format. * @param {number} [type=UnsignedByteType] - The texture type. */ constructor( images, format, type ) { super( undefined, images[ 0 ].width, images[ 0 ].height, format, type, CubeReflectionMapping ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isCompressedCubeTexture = true; /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isCubeTexture = true; this.image = images; } } /** * Creates a texture from a canvas element. * * This is almost the same as the base texture class, except that it sets {@link Texture#needsUpdate} * to `true` immediately since a canvas can directly be used for rendering. * * @augments Texture */ class CanvasTexture extends Texture { /** * Constructs a new texture. * * @param {HTMLCanvasElement} [canvas] - The HTML canvas element. * @param {number} [mapping=Texture.DEFAULT_MAPPING] - The texture mapping. * @param {number} [wrapS=ClampToEdgeWrapping] - The wrapS value. * @param {number} [wrapT=ClampToEdgeWrapping] - The wrapT value. * @param {number} [magFilter=LinearFilter] - The mag filter value. * @param {number} [minFilter=LinearMipmapLinearFilter] - The min filter value. * @param {number} [format=RGBAFormat] - The texture format. * @param {number} [type=UnsignedByteType] - The texture type. * @param {number} [anisotropy=Texture.DEFAULT_ANISOTROPY] - The anisotropy value. */ constructor( canvas, mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy ) { super( canvas, mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isCanvasTexture = true; this.needsUpdate = true; } } /** * This class can be used to automatically save the depth information of a * rendering into a texture. * * @augments Texture */ class DepthTexture extends Texture { /** * Constructs a new depth texture. * * @param {number} width - The width of the texture. * @param {number} height - The height of the texture. * @param {number} [type=UnsignedIntType] - The texture type. * @param {number} [mapping=Texture.DEFAULT_MAPPING] - The texture mapping. * @param {number} [wrapS=ClampToEdgeWrapping] - The wrapS value. * @param {number} [wrapT=ClampToEdgeWrapping] - The wrapT value. * @param {number} [magFilter=LinearFilter] - The mag filter value. * @param {number} [minFilter=LinearFilter] - The min filter value. * @param {number} [anisotropy=Texture.DEFAULT_ANISOTROPY] - The anisotropy value. * @param {number} [format=DepthFormat] - The texture format. */ constructor( width, height, type = UnsignedIntType, mapping, wrapS, wrapT, magFilter = NearestFilter, minFilter = NearestFilter, anisotropy, format = DepthFormat ) { if ( format !== DepthFormat && format !== DepthStencilFormat ) { throw new Error( 'DepthTexture format must be either THREE.DepthFormat or THREE.DepthStencilFormat' ); } super( null, mapping, wrapS, wrapT, magFilter, minFilter, format, type, anisotropy ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isDepthTexture = true; /** * The image property of a depth texture just defines its dimensions. * * @type {{width:number,height:number}} */ this.image = { width: width, height: height }; /** * If set to `true`, the texture is flipped along the vertical axis when * uploaded to the GPU. * * Overwritten and set to `false` by default. * * @type {boolean} * @default false */ this.flipY = false; /** * Whether to generate mipmaps (if possible) for a texture. * * Overwritten and set to `false` by default. * * @type {boolean} * @default false */ this.generateMipmaps = false; /** * The depth compare function. * * @type {?(NeverCompare|LessCompare|EqualCompare|LessEqualCompare|GreaterCompare|NotEqualCompare|GreaterEqualCompare|AlwaysCompare)} * @default null */ this.compareFunction = null; } copy( source ) { super.copy( source ); this.source = new Source( Object.assign( {}, source.image ) ); // see #30540 this.compareFunction = source.compareFunction; return this; } toJSON( meta ) { const data = super.toJSON( meta ); if ( this.compareFunction !== null ) data.compareFunction = this.compareFunction; return data; } } /** * An abstract base class for creating an analytic curve object that contains methods * for interpolation. * * @abstract */ class Curve { /** * Constructs a new curve. */ constructor() { /** * The type property is used for detecting the object type * in context of serialization/deserialization. * * @type {string} * @readonly */ this.type = 'Curve'; /** * This value determines the amount of divisions when calculating the * cumulative segment lengths of a curve via {@link Curve#getLengths}. To ensure * precision when using methods like {@link Curve#getSpacedPoints}, it is * recommended to increase the value of this property if the curve is very large. * * @type {number} * @default 200 */ this.arcLengthDivisions = 200; /** * Must be set to `true` if the curve parameters have changed. * * @type {boolean} * @default false */ this.needsUpdate = false; /** * An internal cache that holds precomputed curve length values. * * @private * @type {?Array} * @default null */ this.cacheArcLengths = null; } /** * This method returns a vector in 2D or 3D space (depending on the curve definition) * for the given interpolation factor. * * @abstract * @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`. * @param {(Vector2|Vector3)} [optionalTarget] - The optional target vector the result is written to. * @return {(Vector2|Vector3)} The position on the curve. It can be a 2D or 3D vector depending on the curve definition. */ getPoint( /* t, optionalTarget */ ) { console.warn( 'THREE.Curve: .getPoint() not implemented.' ); } /** * This method returns a vector in 2D or 3D space (depending on the curve definition) * for the given interpolation factor. Unlike {@link Curve#getPoint}, this method honors the length * of the curve which equidistant samples. * * @param {number} u - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`. * @param {(Vector2|Vector3)} [optionalTarget] - The optional target vector the result is written to. * @return {(Vector2|Vector3)} The position on the curve. It can be a 2D or 3D vector depending on the curve definition. */ getPointAt( u, optionalTarget ) { const t = this.getUtoTmapping( u ); return this.getPoint( t, optionalTarget ); } /** * This method samples the curve via {@link Curve#getPoint} and returns an array of points representing * the curve shape. * * @param {number} [divisions=5] - The number of divisions. * @return {Array<(Vector2|Vector3)>} An array holding the sampled curve values. The number of points is `divisions + 1`. */ getPoints( divisions = 5 ) { const points = []; for ( let d = 0; d <= divisions; d ++ ) { points.push( this.getPoint( d / divisions ) ); } return points; } // Get sequence of points using getPointAt( u ) /** * This method samples the curve via {@link Curve#getPointAt} and returns an array of points representing * the curve shape. Unlike {@link Curve#getPoints}, this method returns equi-spaced points across the entire * curve. * * @param {number} [divisions=5] - The number of divisions. * @return {Array<(Vector2|Vector3)>} An array holding the sampled curve values. The number of points is `divisions + 1`. */ getSpacedPoints( divisions = 5 ) { const points = []; for ( let d = 0; d <= divisions; d ++ ) { points.push( this.getPointAt( d / divisions ) ); } return points; } /** * Returns the total arc length of the curve. * * @return {number} The length of the curve. */ getLength() { const lengths = this.getLengths(); return lengths[ lengths.length - 1 ]; } /** * Returns an array of cumulative segment lengths of the curve. * * @param {number} [divisions=this.arcLengthDivisions] - The number of divisions. * @return {Array} An array holding the cumulative segment lengths. */ getLengths( divisions = this.arcLengthDivisions ) { if ( this.cacheArcLengths && ( this.cacheArcLengths.length === divisions + 1 ) && ! this.needsUpdate ) { return this.cacheArcLengths; } this.needsUpdate = false; const cache = []; let current, last = this.getPoint( 0 ); let sum = 0; cache.push( 0 ); for ( let p = 1; p <= divisions; p ++ ) { current = this.getPoint( p / divisions ); sum += current.distanceTo( last ); cache.push( sum ); last = current; } this.cacheArcLengths = cache; return cache; // { sums: cache, sum: sum }; Sum is in the last element. } /** * Update the cumulative segment distance cache. The method must be called * every time curve parameters are changed. If an updated curve is part of a * composed curve like {@link CurvePath}, this method must be called on the * composed curve, too. */ updateArcLengths() { this.needsUpdate = true; this.getLengths(); } /** * Given an interpolation factor in the range `[0,1]`, this method returns an updated * interpolation factor in the same range that can be ued to sample equidistant points * from a curve. * * @param {number} u - The interpolation factor. * @param {?number} distance - An optional distance on the curve. * @return {number} The updated interpolation factor. */ getUtoTmapping( u, distance = null ) { const arcLengths = this.getLengths(); let i = 0; const il = arcLengths.length; let targetArcLength; // The targeted u distance value to get if ( distance ) { targetArcLength = distance; } else { targetArcLength = u * arcLengths[ il - 1 ]; } // binary search for the index with largest value smaller than target u distance let low = 0, high = il - 1, comparison; while ( low <= high ) { i = Math.floor( low + ( high - low ) / 2 ); // less likely to overflow, though probably not issue here, JS doesn't really have integers, all numbers are floats comparison = arcLengths[ i ] - targetArcLength; if ( comparison < 0 ) { low = i + 1; } else if ( comparison > 0 ) { high = i - 1; } else { high = i; break; // DONE } } i = high; if ( arcLengths[ i ] === targetArcLength ) { return i / ( il - 1 ); } // we could get finer grain at lengths, or use simple interpolation between two points const lengthBefore = arcLengths[ i ]; const lengthAfter = arcLengths[ i + 1 ]; const segmentLength = lengthAfter - lengthBefore; // determine where we are between the 'before' and 'after' points const segmentFraction = ( targetArcLength - lengthBefore ) / segmentLength; // add that fractional amount to t const t = ( i + segmentFraction ) / ( il - 1 ); return t; } /** * Returns a unit vector tangent for the given interpolation factor. * If the derived curve does not implement its tangent derivation, * two points a small delta apart will be used to find its gradient * which seems to give a reasonable approximation. * * @param {number} t - The interpolation factor. * @param {(Vector2|Vector3)} [optionalTarget] - The optional target vector the result is written to. * @return {(Vector2|Vector3)} The tangent vector. */ getTangent( t, optionalTarget ) { const delta = 0.0001; let t1 = t - delta; let t2 = t + delta; // Capping in case of danger if ( t1 < 0 ) t1 = 0; if ( t2 > 1 ) t2 = 1; const pt1 = this.getPoint( t1 ); const pt2 = this.getPoint( t2 ); const tangent = optionalTarget || ( ( pt1.isVector2 ) ? new Vector2() : new Vector3() ); tangent.copy( pt2 ).sub( pt1 ).normalize(); return tangent; } /** * Same as {@link Curve#getTangent} but with equidistant samples. * * @param {number} u - The interpolation factor. * @param {(Vector2|Vector3)} [optionalTarget] - The optional target vector the result is written to. * @return {(Vector2|Vector3)} The tangent vector. * @see {@link Curve#getPointAt} */ getTangentAt( u, optionalTarget ) { const t = this.getUtoTmapping( u ); return this.getTangent( t, optionalTarget ); } /** * Generates the Frenet Frames. Requires a curve definition in 3D space. Used * in geometries like {@link TubeGeometry} or {@link ExtrudeGeometry}. * * @param {number} segments - The number of segments. * @param {boolean} [closed=false] - Whether the curve is closed or not. * @return {{tangents: Array, normals: Array, binormals: Array}} The Frenet Frames. */ computeFrenetFrames( segments, closed = false ) { // see http://www.cs.indiana.edu/pub/techreports/TR425.pdf const normal = new Vector3(); const tangents = []; const normals = []; const binormals = []; const vec = new Vector3(); const mat = new Matrix4(); // compute the tangent vectors for each segment on the curve for ( let i = 0; i <= segments; i ++ ) { const u = i / segments; tangents[ i ] = this.getTangentAt( u, new Vector3() ); } // select an initial normal vector perpendicular to the first tangent vector, // and in the direction of the minimum tangent xyz component normals[ 0 ] = new Vector3(); binormals[ 0 ] = new Vector3(); let min = Number.MAX_VALUE; const tx = Math.abs( tangents[ 0 ].x ); const ty = Math.abs( tangents[ 0 ].y ); const tz = Math.abs( tangents[ 0 ].z ); if ( tx <= min ) { min = tx; normal.set( 1, 0, 0 ); } if ( ty <= min ) { min = ty; normal.set( 0, 1, 0 ); } if ( tz <= min ) { normal.set( 0, 0, 1 ); } vec.crossVectors( tangents[ 0 ], normal ).normalize(); normals[ 0 ].crossVectors( tangents[ 0 ], vec ); binormals[ 0 ].crossVectors( tangents[ 0 ], normals[ 0 ] ); // compute the slowly-varying normal and binormal vectors for each segment on the curve for ( let i = 1; i <= segments; i ++ ) { normals[ i ] = normals[ i - 1 ].clone(); binormals[ i ] = binormals[ i - 1 ].clone(); vec.crossVectors( tangents[ i - 1 ], tangents[ i ] ); if ( vec.length() > Number.EPSILON ) { vec.normalize(); const theta = Math.acos( clamp( tangents[ i - 1 ].dot( tangents[ i ] ), -1, 1 ) ); // clamp for floating pt errors normals[ i ].applyMatrix4( mat.makeRotationAxis( vec, theta ) ); } binormals[ i ].crossVectors( tangents[ i ], normals[ i ] ); } // if the curve is closed, postprocess the vectors so the first and last normal vectors are the same if ( closed === true ) { let theta = Math.acos( clamp( normals[ 0 ].dot( normals[ segments ] ), -1, 1 ) ); theta /= segments; if ( tangents[ 0 ].dot( vec.crossVectors( normals[ 0 ], normals[ segments ] ) ) > 0 ) { theta = - theta; } for ( let i = 1; i <= segments; i ++ ) { // twist a little... normals[ i ].applyMatrix4( mat.makeRotationAxis( tangents[ i ], theta * i ) ); binormals[ i ].crossVectors( tangents[ i ], normals[ i ] ); } } return { tangents: tangents, normals: normals, binormals: binormals }; } /** * Returns a new curve with copied values from this instance. * * @return {Curve} A clone of this instance. */ clone() { return new this.constructor().copy( this ); } /** * Copies the values of the given curve to this instance. * * @param {Curve} source - The curve to copy. * @return {Curve} A reference to this curve. */ copy( source ) { this.arcLengthDivisions = source.arcLengthDivisions; return this; } /** * Serializes the curve into JSON. * * @return {Object} A JSON object representing the serialized curve. * @see {@link ObjectLoader#parse} */ toJSON() { const data = { metadata: { version: 4.6, type: 'Curve', generator: 'Curve.toJSON' } }; data.arcLengthDivisions = this.arcLengthDivisions; data.type = this.type; return data; } /** * Deserializes the curve from the given JSON. * * @param {Object} json - The JSON holding the serialized curve. * @return {Curve} A reference to this curve. */ fromJSON( json ) { this.arcLengthDivisions = json.arcLengthDivisions; return this; } } /** * A curve representing an ellipse. * * ```js * const curve = new THREE.EllipseCurve( * 0, 0, * 10, 10, * 0, 2 * Math.PI, * false, * 0 * ); * * const points = curve.getPoints( 50 ); * const geometry = new THREE.BufferGeometry().setFromPoints( points ); * * const material = new THREE.LineBasicMaterial( { color: 0xff0000 } ); * * // Create the final object to add to the scene * const ellipse = new THREE.Line( geometry, material ); * ``` * * @augments Curve */ class EllipseCurve extends Curve { /** * Constructs a new ellipse curve. * * @param {number} [aX=0] - The X center of the ellipse. * @param {number} [aY=0] - The Y center of the ellipse. * @param {number} [xRadius=1] - The radius of the ellipse in the x direction. * @param {number} [yRadius=1] - The radius of the ellipse in the y direction. * @param {number} [aStartAngle=0] - The start angle of the curve in radians starting from the positive X axis. * @param {number} [aEndAngle=Math.PI*2] - The end angle of the curve in radians starting from the positive X axis. * @param {boolean} [aClockwise=false] - Whether the ellipse is drawn clockwise or not. * @param {number} [aRotation=0] - The rotation angle of the ellipse in radians, counterclockwise from the positive X axis. */ constructor( aX = 0, aY = 0, xRadius = 1, yRadius = 1, aStartAngle = 0, aEndAngle = Math.PI * 2, aClockwise = false, aRotation = 0 ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isEllipseCurve = true; this.type = 'EllipseCurve'; /** * The X center of the ellipse. * * @type {number} * @default 0 */ this.aX = aX; /** * The Y center of the ellipse. * * @type {number} * @default 0 */ this.aY = aY; /** * The radius of the ellipse in the x direction. * Setting the this value equal to the {@link EllipseCurve#yRadius} will result in a circle. * * @type {number} * @default 1 */ this.xRadius = xRadius; /** * The radius of the ellipse in the y direction. * Setting the this value equal to the {@link EllipseCurve#xRadius} will result in a circle. * * @type {number} * @default 1 */ this.yRadius = yRadius; /** * The start angle of the curve in radians starting from the positive X axis. * * @type {number} * @default 0 */ this.aStartAngle = aStartAngle; /** * The end angle of the curve in radians starting from the positive X axis. * * @type {number} * @default Math.PI*2 */ this.aEndAngle = aEndAngle; /** * Whether the ellipse is drawn clockwise or not. * * @type {boolean} * @default false */ this.aClockwise = aClockwise; /** * The rotation angle of the ellipse in radians, counterclockwise from the positive X axis. * * @type {number} * @default 0 */ this.aRotation = aRotation; } /** * Returns a point on the curve. * * @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`. * @param {Vector2} [optionalTarget] - The optional target vector the result is written to. * @return {Vector2} The position on the curve. */ getPoint( t, optionalTarget = new Vector2() ) { const point = optionalTarget; const twoPi = Math.PI * 2; let deltaAngle = this.aEndAngle - this.aStartAngle; const samePoints = Math.abs( deltaAngle ) < Number.EPSILON; // ensures that deltaAngle is 0 .. 2 PI while ( deltaAngle < 0 ) deltaAngle += twoPi; while ( deltaAngle > twoPi ) deltaAngle -= twoPi; if ( deltaAngle < Number.EPSILON ) { if ( samePoints ) { deltaAngle = 0; } else { deltaAngle = twoPi; } } if ( this.aClockwise === true && ! samePoints ) { if ( deltaAngle === twoPi ) { deltaAngle = - twoPi; } else { deltaAngle = deltaAngle - twoPi; } } const angle = this.aStartAngle + t * deltaAngle; let x = this.aX + this.xRadius * Math.cos( angle ); let y = this.aY + this.yRadius * Math.sin( angle ); if ( this.aRotation !== 0 ) { const cos = Math.cos( this.aRotation ); const sin = Math.sin( this.aRotation ); const tx = x - this.aX; const ty = y - this.aY; // Rotate the point about the center of the ellipse. x = tx * cos - ty * sin + this.aX; y = tx * sin + ty * cos + this.aY; } return point.set( x, y ); } copy( source ) { super.copy( source ); this.aX = source.aX; this.aY = source.aY; this.xRadius = source.xRadius; this.yRadius = source.yRadius; this.aStartAngle = source.aStartAngle; this.aEndAngle = source.aEndAngle; this.aClockwise = source.aClockwise; this.aRotation = source.aRotation; return this; } toJSON() { const data = super.toJSON(); data.aX = this.aX; data.aY = this.aY; data.xRadius = this.xRadius; data.yRadius = this.yRadius; data.aStartAngle = this.aStartAngle; data.aEndAngle = this.aEndAngle; data.aClockwise = this.aClockwise; data.aRotation = this.aRotation; return data; } fromJSON( json ) { super.fromJSON( json ); this.aX = json.aX; this.aY = json.aY; this.xRadius = json.xRadius; this.yRadius = json.yRadius; this.aStartAngle = json.aStartAngle; this.aEndAngle = json.aEndAngle; this.aClockwise = json.aClockwise; this.aRotation = json.aRotation; return this; } } /** * A curve representing an arc. * * @augments EllipseCurve */ class ArcCurve extends EllipseCurve { /** * Constructs a new arc curve. * * @param {number} [aX=0] - The X center of the ellipse. * @param {number} [aY=0] - The Y center of the ellipse. * @param {number} [aRadius=1] - The radius of the ellipse in the x direction. * @param {number} [aStartAngle=0] - The start angle of the curve in radians starting from the positive X axis. * @param {number} [aEndAngle=Math.PI*2] - The end angle of the curve in radians starting from the positive X axis. * @param {boolean} [aClockwise=false] - Whether the ellipse is drawn clockwise or not. */ constructor( aX, aY, aRadius, aStartAngle, aEndAngle, aClockwise ) { super( aX, aY, aRadius, aRadius, aStartAngle, aEndAngle, aClockwise ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isArcCurve = true; this.type = 'ArcCurve'; } } function CubicPoly() { /** * Centripetal CatmullRom Curve - which is useful for avoiding * cusps and self-intersections in non-uniform catmull rom curves. * http://www.cemyuksel.com/research/catmullrom_param/catmullrom.pdf * * curve.type accepts centripetal(default), chordal and catmullrom * curve.tension is used for catmullrom which defaults to 0.5 */ /* Based on an optimized c++ solution in - http://stackoverflow.com/questions/9489736/catmull-rom-curve-with-no-cusps-and-no-self-intersections/ - http://ideone.com/NoEbVM This CubicPoly class could be used for reusing some variables and calculations, but for three.js curve use, it could be possible inlined and flatten into a single function call which can be placed in CurveUtils. */ let c0 = 0, c1 = 0, c2 = 0, c3 = 0; /* * Compute coefficients for a cubic polynomial * p(s) = c0 + c1*s + c2*s^2 + c3*s^3 * such that * p(0) = x0, p(1) = x1 * and * p'(0) = t0, p'(1) = t1. */ function init( x0, x1, t0, t1 ) { c0 = x0; c1 = t0; c2 = -3 * x0 + 3 * x1 - 2 * t0 - t1; c3 = 2 * x0 - 2 * x1 + t0 + t1; } return { initCatmullRom: function ( x0, x1, x2, x3, tension ) { init( x1, x2, tension * ( x2 - x0 ), tension * ( x3 - x1 ) ); }, initNonuniformCatmullRom: function ( x0, x1, x2, x3, dt0, dt1, dt2 ) { // compute tangents when parameterized in [t1,t2] let t1 = ( x1 - x0 ) / dt0 - ( x2 - x0 ) / ( dt0 + dt1 ) + ( x2 - x1 ) / dt1; let t2 = ( x2 - x1 ) / dt1 - ( x3 - x1 ) / ( dt1 + dt2 ) + ( x3 - x2 ) / dt2; // rescale tangents for parametrization in [0,1] t1 *= dt1; t2 *= dt1; init( x1, x2, t1, t2 ); }, calc: function ( t ) { const t2 = t * t; const t3 = t2 * t; return c0 + c1 * t + c2 * t2 + c3 * t3; } }; } // const tmp = /*@__PURE__*/ new Vector3(); const px = /*@__PURE__*/ new CubicPoly(); const py = /*@__PURE__*/ new CubicPoly(); const pz = /*@__PURE__*/ new CubicPoly(); /** * A curve representing a Catmull-Rom spline. * * ```js * //Create a closed wavey loop * const curve = new THREE.CatmullRomCurve3( [ * new THREE.Vector3( -10, 0, 10 ), * new THREE.Vector3( -5, 5, 5 ), * new THREE.Vector3( 0, 0, 0 ), * new THREE.Vector3( 5, -5, 5 ), * new THREE.Vector3( 10, 0, 10 ) * ] ); * * const points = curve.getPoints( 50 ); * const geometry = new THREE.BufferGeometry().setFromPoints( points ); * * const material = new THREE.LineBasicMaterial( { color: 0xff0000 } ); * * // Create the final object to add to the scene * const curveObject = new THREE.Line( geometry, material ); * ``` * * @augments Curve */ class CatmullRomCurve3 extends Curve { /** * Constructs a new Catmull-Rom curve. * * @param {Array} [points] - An array of 3D points defining the curve. * @param {boolean} [closed=false] - Whether the curve is closed or not. * @param {('centripetal'|'chordal'|'catmullrom')} [curveType='centripetal'] - The curve type. * @param {number} [tension=0.5] - Tension of the curve. */ constructor( points = [], closed = false, curveType = 'centripetal', tension = 0.5 ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isCatmullRomCurve3 = true; this.type = 'CatmullRomCurve3'; /** * An array of 3D points defining the curve. * * @type {Array} */ this.points = points; /** * Whether the curve is closed or not. * * @type {boolean} * @default false */ this.closed = closed; /** * The curve type. * * @type {('centripetal'|'chordal'|'catmullrom')} * @default 'centripetal' */ this.curveType = curveType; /** * Tension of the curve. * * @type {number} * @default 0.5 */ this.tension = tension; } /** * Returns a point on the curve. * * @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`. * @param {Vector3} [optionalTarget] - The optional target vector the result is written to. * @return {Vector3} The position on the curve. */ getPoint( t, optionalTarget = new Vector3() ) { const point = optionalTarget; const points = this.points; const l = points.length; const p = ( l - ( this.closed ? 0 : 1 ) ) * t; let intPoint = Math.floor( p ); let weight = p - intPoint; if ( this.closed ) { intPoint += intPoint > 0 ? 0 : ( Math.floor( Math.abs( intPoint ) / l ) + 1 ) * l; } else if ( weight === 0 && intPoint === l - 1 ) { intPoint = l - 2; weight = 1; } let p0, p3; // 4 points (p1 & p2 defined below) if ( this.closed || intPoint > 0 ) { p0 = points[ ( intPoint - 1 ) % l ]; } else { // extrapolate first point tmp.subVectors( points[ 0 ], points[ 1 ] ).add( points[ 0 ] ); p0 = tmp; } const p1 = points[ intPoint % l ]; const p2 = points[ ( intPoint + 1 ) % l ]; if ( this.closed || intPoint + 2 < l ) { p3 = points[ ( intPoint + 2 ) % l ]; } else { // extrapolate last point tmp.subVectors( points[ l - 1 ], points[ l - 2 ] ).add( points[ l - 1 ] ); p3 = tmp; } if ( this.curveType === 'centripetal' || this.curveType === 'chordal' ) { // init Centripetal / Chordal Catmull-Rom const pow = this.curveType === 'chordal' ? 0.5 : 0.25; let dt0 = Math.pow( p0.distanceToSquared( p1 ), pow ); let dt1 = Math.pow( p1.distanceToSquared( p2 ), pow ); let dt2 = Math.pow( p2.distanceToSquared( p3 ), pow ); // safety check for repeated points if ( dt1 < 1e-4 ) dt1 = 1.0; if ( dt0 < 1e-4 ) dt0 = dt1; if ( dt2 < 1e-4 ) dt2 = dt1; px.initNonuniformCatmullRom( p0.x, p1.x, p2.x, p3.x, dt0, dt1, dt2 ); py.initNonuniformCatmullRom( p0.y, p1.y, p2.y, p3.y, dt0, dt1, dt2 ); pz.initNonuniformCatmullRom( p0.z, p1.z, p2.z, p3.z, dt0, dt1, dt2 ); } else if ( this.curveType === 'catmullrom' ) { px.initCatmullRom( p0.x, p1.x, p2.x, p3.x, this.tension ); py.initCatmullRom( p0.y, p1.y, p2.y, p3.y, this.tension ); pz.initCatmullRom( p0.z, p1.z, p2.z, p3.z, this.tension ); } point.set( px.calc( weight ), py.calc( weight ), pz.calc( weight ) ); return point; } copy( source ) { super.copy( source ); this.points = []; for ( let i = 0, l = source.points.length; i < l; i ++ ) { const point = source.points[ i ]; this.points.push( point.clone() ); } this.closed = source.closed; this.curveType = source.curveType; this.tension = source.tension; return this; } toJSON() { const data = super.toJSON(); data.points = []; for ( let i = 0, l = this.points.length; i < l; i ++ ) { const point = this.points[ i ]; data.points.push( point.toArray() ); } data.closed = this.closed; data.curveType = this.curveType; data.tension = this.tension; return data; } fromJSON( json ) { super.fromJSON( json ); this.points = []; for ( let i = 0, l = json.points.length; i < l; i ++ ) { const point = json.points[ i ]; this.points.push( new Vector3().fromArray( point ) ); } this.closed = json.closed; this.curveType = json.curveType; this.tension = json.tension; return this; } } // Bezier Curves formulas obtained from: https://en.wikipedia.org/wiki/B%C3%A9zier_curve /** * Computes a point on a Catmull-Rom spline. * * @param {number} t - The interpolation factor. * @param {number} p0 - The first control point. * @param {number} p1 - The second control point. * @param {number} p2 - The third control point. * @param {number} p3 - The fourth control point. * @return {number} The calculated point on a Catmull-Rom spline. */ function CatmullRom( t, p0, p1, p2, p3 ) { const v0 = ( p2 - p0 ) * 0.5; const v1 = ( p3 - p1 ) * 0.5; const t2 = t * t; const t3 = t * t2; return ( 2 * p1 - 2 * p2 + v0 + v1 ) * t3 + ( -3 * p1 + 3 * p2 - 2 * v0 - v1 ) * t2 + v0 * t + p1; } // function QuadraticBezierP0( t, p ) { const k = 1 - t; return k * k * p; } function QuadraticBezierP1( t, p ) { return 2 * ( 1 - t ) * t * p; } function QuadraticBezierP2( t, p ) { return t * t * p; } /** * Computes a point on a Quadratic Bezier curve. * * @param {number} t - The interpolation factor. * @param {number} p0 - The first control point. * @param {number} p1 - The second control point. * @param {number} p2 - The third control point. * @return {number} The calculated point on a Quadratic Bezier curve. */ function QuadraticBezier( t, p0, p1, p2 ) { return QuadraticBezierP0( t, p0 ) + QuadraticBezierP1( t, p1 ) + QuadraticBezierP2( t, p2 ); } // function CubicBezierP0( t, p ) { const k = 1 - t; return k * k * k * p; } function CubicBezierP1( t, p ) { const k = 1 - t; return 3 * k * k * t * p; } function CubicBezierP2( t, p ) { return 3 * ( 1 - t ) * t * t * p; } function CubicBezierP3( t, p ) { return t * t * t * p; } /** * Computes a point on a Cubic Bezier curve. * * @param {number} t - The interpolation factor. * @param {number} p0 - The first control point. * @param {number} p1 - The second control point. * @param {number} p2 - The third control point. * @param {number} p3 - The fourth control point. * @return {number} The calculated point on a Cubic Bezier curve. */ function CubicBezier( t, p0, p1, p2, p3 ) { return CubicBezierP0( t, p0 ) + CubicBezierP1( t, p1 ) + CubicBezierP2( t, p2 ) + CubicBezierP3( t, p3 ); } /** * A curve representing a 2D Cubic Bezier curve. * * ```js * const curve = new THREE.CubicBezierCurve( * new THREE.Vector2( - 0, 0 ), * new THREE.Vector2( - 5, 15 ), * new THREE.Vector2( 20, 15 ), * new THREE.Vector2( 10, 0 ) * ); * * const points = curve.getPoints( 50 ); * const geometry = new THREE.BufferGeometry().setFromPoints( points ); * * const material = new THREE.LineBasicMaterial( { color: 0xff0000 } ); * * // Create the final object to add to the scene * const curveObject = new THREE.Line( geometry, material ); * ``` * * @augments Curve */ class CubicBezierCurve extends Curve { /** * Constructs a new Cubic Bezier curve. * * @param {Vector2} [v0] - The start point. * @param {Vector2} [v1] - The first control point. * @param {Vector2} [v2] - The second control point. * @param {Vector2} [v3] - The end point. */ constructor( v0 = new Vector2(), v1 = new Vector2(), v2 = new Vector2(), v3 = new Vector2() ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isCubicBezierCurve = true; this.type = 'CubicBezierCurve'; /** * The start point. * * @type {Vector2} */ this.v0 = v0; /** * The first control point. * * @type {Vector2} */ this.v1 = v1; /** * The second control point. * * @type {Vector2} */ this.v2 = v2; /** * The end point. * * @type {Vector2} */ this.v3 = v3; } /** * Returns a point on the curve. * * @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`. * @param {Vector2} [optionalTarget] - The optional target vector the result is written to. * @return {Vector2} The position on the curve. */ getPoint( t, optionalTarget = new Vector2() ) { const point = optionalTarget; const v0 = this.v0, v1 = this.v1, v2 = this.v2, v3 = this.v3; point.set( CubicBezier( t, v0.x, v1.x, v2.x, v3.x ), CubicBezier( t, v0.y, v1.y, v2.y, v3.y ) ); return point; } copy( source ) { super.copy( source ); this.v0.copy( source.v0 ); this.v1.copy( source.v1 ); this.v2.copy( source.v2 ); this.v3.copy( source.v3 ); return this; } toJSON() { const data = super.toJSON(); data.v0 = this.v0.toArray(); data.v1 = this.v1.toArray(); data.v2 = this.v2.toArray(); data.v3 = this.v3.toArray(); return data; } fromJSON( json ) { super.fromJSON( json ); this.v0.fromArray( json.v0 ); this.v1.fromArray( json.v1 ); this.v2.fromArray( json.v2 ); this.v3.fromArray( json.v3 ); return this; } } /** * A curve representing a 3D Cubic Bezier curve. * * @augments Curve */ class CubicBezierCurve3 extends Curve { /** * Constructs a new Cubic Bezier curve. * * @param {Vector3} [v0] - The start point. * @param {Vector3} [v1] - The first control point. * @param {Vector3} [v2] - The second control point. * @param {Vector3} [v3] - The end point. */ constructor( v0 = new Vector3(), v1 = new Vector3(), v2 = new Vector3(), v3 = new Vector3() ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isCubicBezierCurve3 = true; this.type = 'CubicBezierCurve3'; /** * The start point. * * @type {Vector3} */ this.v0 = v0; /** * The first control point. * * @type {Vector3} */ this.v1 = v1; /** * The second control point. * * @type {Vector3} */ this.v2 = v2; /** * The end point. * * @type {Vector3} */ this.v3 = v3; } /** * Returns a point on the curve. * * @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`. * @param {Vector3} [optionalTarget] - The optional target vector the result is written to. * @return {Vector3} The position on the curve. */ getPoint( t, optionalTarget = new Vector3() ) { const point = optionalTarget; const v0 = this.v0, v1 = this.v1, v2 = this.v2, v3 = this.v3; point.set( CubicBezier( t, v0.x, v1.x, v2.x, v3.x ), CubicBezier( t, v0.y, v1.y, v2.y, v3.y ), CubicBezier( t, v0.z, v1.z, v2.z, v3.z ) ); return point; } copy( source ) { super.copy( source ); this.v0.copy( source.v0 ); this.v1.copy( source.v1 ); this.v2.copy( source.v2 ); this.v3.copy( source.v3 ); return this; } toJSON() { const data = super.toJSON(); data.v0 = this.v0.toArray(); data.v1 = this.v1.toArray(); data.v2 = this.v2.toArray(); data.v3 = this.v3.toArray(); return data; } fromJSON( json ) { super.fromJSON( json ); this.v0.fromArray( json.v0 ); this.v1.fromArray( json.v1 ); this.v2.fromArray( json.v2 ); this.v3.fromArray( json.v3 ); return this; } } /** * A curve representing a 2D line segment. * * @augments Curve */ class LineCurve extends Curve { /** * Constructs a new line curve. * * @param {Vector2} [v1] - The start point. * @param {Vector2} [v2] - The end point. */ constructor( v1 = new Vector2(), v2 = new Vector2() ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isLineCurve = true; this.type = 'LineCurve'; /** * The start point. * * @type {Vector2} */ this.v1 = v1; /** * The end point. * * @type {Vector2} */ this.v2 = v2; } /** * Returns a point on the line. * * @param {number} t - A interpolation factor representing a position on the line. Must be in the range `[0,1]`. * @param {Vector2} [optionalTarget] - The optional target vector the result is written to. * @return {Vector2} The position on the line. */ getPoint( t, optionalTarget = new Vector2() ) { const point = optionalTarget; if ( t === 1 ) { point.copy( this.v2 ); } else { point.copy( this.v2 ).sub( this.v1 ); point.multiplyScalar( t ).add( this.v1 ); } return point; } // Line curve is linear, so we can overwrite default getPointAt getPointAt( u, optionalTarget ) { return this.getPoint( u, optionalTarget ); } getTangent( t, optionalTarget = new Vector2() ) { return optionalTarget.subVectors( this.v2, this.v1 ).normalize(); } getTangentAt( u, optionalTarget ) { return this.getTangent( u, optionalTarget ); } copy( source ) { super.copy( source ); this.v1.copy( source.v1 ); this.v2.copy( source.v2 ); return this; } toJSON() { const data = super.toJSON(); data.v1 = this.v1.toArray(); data.v2 = this.v2.toArray(); return data; } fromJSON( json ) { super.fromJSON( json ); this.v1.fromArray( json.v1 ); this.v2.fromArray( json.v2 ); return this; } } /** * A curve representing a 3D line segment. * * @augments Curve */ class LineCurve3 extends Curve { /** * Constructs a new line curve. * * @param {Vector3} [v1] - The start point. * @param {Vector3} [v2] - The end point. */ constructor( v1 = new Vector3(), v2 = new Vector3() ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isLineCurve3 = true; this.type = 'LineCurve3'; /** * The start point. * * @type {Vector3} */ this.v1 = v1; /** * The end point. * * @type {Vector2} */ this.v2 = v2; } /** * Returns a point on the line. * * @param {number} t - A interpolation factor representing a position on the line. Must be in the range `[0,1]`. * @param {Vector3} [optionalTarget] - The optional target vector the result is written to. * @return {Vector3} The position on the line. */ getPoint( t, optionalTarget = new Vector3() ) { const point = optionalTarget; if ( t === 1 ) { point.copy( this.v2 ); } else { point.copy( this.v2 ).sub( this.v1 ); point.multiplyScalar( t ).add( this.v1 ); } return point; } // Line curve is linear, so we can overwrite default getPointAt getPointAt( u, optionalTarget ) { return this.getPoint( u, optionalTarget ); } getTangent( t, optionalTarget = new Vector3() ) { return optionalTarget.subVectors( this.v2, this.v1 ).normalize(); } getTangentAt( u, optionalTarget ) { return this.getTangent( u, optionalTarget ); } copy( source ) { super.copy( source ); this.v1.copy( source.v1 ); this.v2.copy( source.v2 ); return this; } toJSON() { const data = super.toJSON(); data.v1 = this.v1.toArray(); data.v2 = this.v2.toArray(); return data; } fromJSON( json ) { super.fromJSON( json ); this.v1.fromArray( json.v1 ); this.v2.fromArray( json.v2 ); return this; } } /** * A curve representing a 2D Quadratic Bezier curve. * * ```js * const curve = new THREE.QuadraticBezierCurve( * new THREE.Vector2( - 10, 0 ), * new THREE.Vector2( 20, 15 ), * new THREE.Vector2( 10, 0 ) * ) * * const points = curve.getPoints( 50 ); * const geometry = new THREE.BufferGeometry().setFromPoints( points ); * * const material = new THREE.LineBasicMaterial( { color: 0xff0000 } ); * * // Create the final object to add to the scene * const curveObject = new THREE.Line( geometry, material ); * ``` * * @augments Curve */ class QuadraticBezierCurve extends Curve { /** * Constructs a new Quadratic Bezier curve. * * @param {Vector2} [v0] - The start point. * @param {Vector2} [v1] - The control point. * @param {Vector2} [v2] - The end point. */ constructor( v0 = new Vector2(), v1 = new Vector2(), v2 = new Vector2() ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isQuadraticBezierCurve = true; this.type = 'QuadraticBezierCurve'; /** * The start point. * * @type {Vector2} */ this.v0 = v0; /** * The control point. * * @type {Vector2} */ this.v1 = v1; /** * The end point. * * @type {Vector2} */ this.v2 = v2; } /** * Returns a point on the curve. * * @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`. * @param {Vector2} [optionalTarget] - The optional target vector the result is written to. * @return {Vector2} The position on the curve. */ getPoint( t, optionalTarget = new Vector2() ) { const point = optionalTarget; const v0 = this.v0, v1 = this.v1, v2 = this.v2; point.set( QuadraticBezier( t, v0.x, v1.x, v2.x ), QuadraticBezier( t, v0.y, v1.y, v2.y ) ); return point; } copy( source ) { super.copy( source ); this.v0.copy( source.v0 ); this.v1.copy( source.v1 ); this.v2.copy( source.v2 ); return this; } toJSON() { const data = super.toJSON(); data.v0 = this.v0.toArray(); data.v1 = this.v1.toArray(); data.v2 = this.v2.toArray(); return data; } fromJSON( json ) { super.fromJSON( json ); this.v0.fromArray( json.v0 ); this.v1.fromArray( json.v1 ); this.v2.fromArray( json.v2 ); return this; } } /** * A curve representing a 3D Quadratic Bezier curve. * * @augments Curve */ class QuadraticBezierCurve3 extends Curve { /** * Constructs a new Quadratic Bezier curve. * * @param {Vector3} [v0] - The start point. * @param {Vector3} [v1] - The control point. * @param {Vector3} [v2] - The end point. */ constructor( v0 = new Vector3(), v1 = new Vector3(), v2 = new Vector3() ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isQuadraticBezierCurve3 = true; this.type = 'QuadraticBezierCurve3'; /** * The start point. * * @type {Vector3} */ this.v0 = v0; /** * The control point. * * @type {Vector3} */ this.v1 = v1; /** * The end point. * * @type {Vector3} */ this.v2 = v2; } /** * Returns a point on the curve. * * @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`. * @param {Vector3} [optionalTarget] - The optional target vector the result is written to. * @return {Vector3} The position on the curve. */ getPoint( t, optionalTarget = new Vector3() ) { const point = optionalTarget; const v0 = this.v0, v1 = this.v1, v2 = this.v2; point.set( QuadraticBezier( t, v0.x, v1.x, v2.x ), QuadraticBezier( t, v0.y, v1.y, v2.y ), QuadraticBezier( t, v0.z, v1.z, v2.z ) ); return point; } copy( source ) { super.copy( source ); this.v0.copy( source.v0 ); this.v1.copy( source.v1 ); this.v2.copy( source.v2 ); return this; } toJSON() { const data = super.toJSON(); data.v0 = this.v0.toArray(); data.v1 = this.v1.toArray(); data.v2 = this.v2.toArray(); return data; } fromJSON( json ) { super.fromJSON( json ); this.v0.fromArray( json.v0 ); this.v1.fromArray( json.v1 ); this.v2.fromArray( json.v2 ); return this; } } /** * A curve representing a 2D spline curve. * * ```js * // Create a sine-like wave * const curve = new THREE.SplineCurve( [ * new THREE.Vector2( -10, 0 ), * new THREE.Vector2( -5, 5 ), * new THREE.Vector2( 0, 0 ), * new THREE.Vector2( 5, -5 ), * new THREE.Vector2( 10, 0 ) * ] ); * * const points = curve.getPoints( 50 ); * const geometry = new THREE.BufferGeometry().setFromPoints( points ); * * const material = new THREE.LineBasicMaterial( { color: 0xff0000 } ); * * // Create the final object to add to the scene * const splineObject = new THREE.Line( geometry, material ); * ``` * * @augments Curve */ class SplineCurve extends Curve { /** * Constructs a new 2D spline curve. * * @param {Array} [points] - An array of 2D points defining the curve. */ constructor( points = [] ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isSplineCurve = true; this.type = 'SplineCurve'; /** * An array of 2D points defining the curve. * * @type {Array} */ this.points = points; } /** * Returns a point on the curve. * * @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`. * @param {Vector2} [optionalTarget] - The optional target vector the result is written to. * @return {Vector2} The position on the curve. */ getPoint( t, optionalTarget = new Vector2() ) { const point = optionalTarget; const points = this.points; const p = ( points.length - 1 ) * t; const intPoint = Math.floor( p ); const weight = p - intPoint; const p0 = points[ intPoint === 0 ? intPoint : intPoint - 1 ]; const p1 = points[ intPoint ]; const p2 = points[ intPoint > points.length - 2 ? points.length - 1 : intPoint + 1 ]; const p3 = points[ intPoint > points.length - 3 ? points.length - 1 : intPoint + 2 ]; point.set( CatmullRom( weight, p0.x, p1.x, p2.x, p3.x ), CatmullRom( weight, p0.y, p1.y, p2.y, p3.y ) ); return point; } copy( source ) { super.copy( source ); this.points = []; for ( let i = 0, l = source.points.length; i < l; i ++ ) { const point = source.points[ i ]; this.points.push( point.clone() ); } return this; } toJSON() { const data = super.toJSON(); data.points = []; for ( let i = 0, l = this.points.length; i < l; i ++ ) { const point = this.points[ i ]; data.points.push( point.toArray() ); } return data; } fromJSON( json ) { super.fromJSON( json ); this.points = []; for ( let i = 0, l = json.points.length; i < l; i ++ ) { const point = json.points[ i ]; this.points.push( new Vector2().fromArray( point ) ); } return this; } } var Curves = /*#__PURE__*/Object.freeze({ __proto__: null, ArcCurve: ArcCurve, CatmullRomCurve3: CatmullRomCurve3, CubicBezierCurve: CubicBezierCurve, CubicBezierCurve3: CubicBezierCurve3, EllipseCurve: EllipseCurve, LineCurve: LineCurve, LineCurve3: LineCurve3, QuadraticBezierCurve: QuadraticBezierCurve, QuadraticBezierCurve3: QuadraticBezierCurve3, SplineCurve: SplineCurve }); /** * A base class extending {@link Curve}. `CurvePath` is simply an * array of connected curves, but retains the API of a curve. * * @augments Curve */ class CurvePath extends Curve { /** * Constructs a new curve path. */ constructor() { super(); this.type = 'CurvePath'; /** * An array of curves defining the * path. * * @type {Array} */ this.curves = []; /** * Whether the path should automatically be closed * by a line curve. * * @type {boolean} * @default false */ this.autoClose = false; } /** * Adds a curve to this curve path. * * @param {Curve} curve - The curve to add. */ add( curve ) { this.curves.push( curve ); } /** * Adds a line curve to close the path. * * @return {CurvePath} A reference to this curve path. */ closePath() { // Add a line curve if start and end of lines are not connected const startPoint = this.curves[ 0 ].getPoint( 0 ); const endPoint = this.curves[ this.curves.length - 1 ].getPoint( 1 ); if ( ! startPoint.equals( endPoint ) ) { const lineType = ( startPoint.isVector2 === true ) ? 'LineCurve' : 'LineCurve3'; this.curves.push( new Curves[ lineType ]( endPoint, startPoint ) ); } return this; } /** * This method returns a vector in 2D or 3D space (depending on the curve definitions) * for the given interpolation factor. * * @param {number} t - A interpolation factor representing a position on the curve. Must be in the range `[0,1]`. * @param {(Vector2|Vector3)} [optionalTarget] - The optional target vector the result is written to. * @return {?(Vector2|Vector3)} The position on the curve. It can be a 2D or 3D vector depending on the curve definition. */ getPoint( t, optionalTarget ) { // To get accurate point with reference to // entire path distance at time t, // following has to be done: // 1. Length of each sub path have to be known // 2. Locate and identify type of curve // 3. Get t for the curve // 4. Return curve.getPointAt(t') const d = t * this.getLength(); const curveLengths = this.getCurveLengths(); let i = 0; // To think about boundaries points. while ( i < curveLengths.length ) { if ( curveLengths[ i ] >= d ) { const diff = curveLengths[ i ] - d; const curve = this.curves[ i ]; const segmentLength = curve.getLength(); const u = segmentLength === 0 ? 0 : 1 - diff / segmentLength; return curve.getPointAt( u, optionalTarget ); } i ++; } return null; // loop where sum != 0, sum > d , sum+1 } The curve lengths. */ getCurveLengths() { // Compute lengths and cache them // We cannot overwrite getLengths() because UtoT mapping uses it. // We use cache values if curves and cache array are same length if ( this.cacheLengths && this.cacheLengths.length === this.curves.length ) { return this.cacheLengths; } // Get length of sub-curve // Push sums into cached array const lengths = []; let sums = 0; for ( let i = 0, l = this.curves.length; i < l; i ++ ) { sums += this.curves[ i ].getLength(); lengths.push( sums ); } this.cacheLengths = lengths; return lengths; } getSpacedPoints( divisions = 40 ) { const points = []; for ( let i = 0; i <= divisions; i ++ ) { points.push( this.getPoint( i / divisions ) ); } if ( this.autoClose ) { points.push( points[ 0 ] ); } return points; } getPoints( divisions = 12 ) { const points = []; let last; for ( let i = 0, curves = this.curves; i < curves.length; i ++ ) { const curve = curves[ i ]; const resolution = curve.isEllipseCurve ? divisions * 2 : ( curve.isLineCurve || curve.isLineCurve3 ) ? 1 : curve.isSplineCurve ? divisions * curve.points.length : divisions; const pts = curve.getPoints( resolution ); for ( let j = 0; j < pts.length; j ++ ) { const point = pts[ j ]; if ( last && last.equals( point ) ) continue; // ensures no consecutive points are duplicates points.push( point ); last = point; } } if ( this.autoClose && points.length > 1 && ! points[ points.length - 1 ].equals( points[ 0 ] ) ) { points.push( points[ 0 ] ); } return points; } copy( source ) { super.copy( source ); this.curves = []; for ( let i = 0, l = source.curves.length; i < l; i ++ ) { const curve = source.curves[ i ]; this.curves.push( curve.clone() ); } this.autoClose = source.autoClose; return this; } toJSON() { const data = super.toJSON(); data.autoClose = this.autoClose; data.curves = []; for ( let i = 0, l = this.curves.length; i < l; i ++ ) { const curve = this.curves[ i ]; data.curves.push( curve.toJSON() ); } return data; } fromJSON( json ) { super.fromJSON( json ); this.autoClose = json.autoClose; this.curves = []; for ( let i = 0, l = json.curves.length; i < l; i ++ ) { const curve = json.curves[ i ]; this.curves.push( new Curves[ curve.type ]().fromJSON( curve ) ); } return this; } } /** * A 2D path representation. The class provides methods for creating paths * and contours of 2D shapes similar to the 2D Canvas API. * * ```js * const path = new THREE.Path(); * * path.lineTo( 0, 0.8 ); * path.quadraticCurveTo( 0, 1, 0.2, 1 ); * path.lineTo( 1, 1 ); * * const points = path.getPoints(); * * const geometry = new THREE.BufferGeometry().setFromPoints( points ); * const material = new THREE.LineBasicMaterial( { color: 0xffffff } ); * * const line = new THREE.Line( geometry, material ); * scene.add( line ); * ``` * * @augments CurvePath */ class Path extends CurvePath { /** * Constructs a new path. * * @param {Array} [points] - An array of 2D points defining the path. */ constructor( points ) { super(); this.type = 'Path'; /** * The current offset of the path. Any new curve added will start here. * * @type {Vector2} */ this.currentPoint = new Vector2(); if ( points ) { this.setFromPoints( points ); } } /** * Creates a path from the given list of points. The points are added * to the path as instances of {@link LineCurve}. * * @param {Array} points - An array of 2D points. * @return {Path} A reference to this path. */ setFromPoints( points ) { this.moveTo( points[ 0 ].x, points[ 0 ].y ); for ( let i = 1, l = points.length; i < l; i ++ ) { this.lineTo( points[ i ].x, points[ i ].y ); } return this; } /** * Moves {@link Path#currentPoint} to the given point. * * @param {number} x - The x coordinate. * @param {number} y - The y coordinate. * @return {Path} A reference to this path. */ moveTo( x, y ) { this.currentPoint.set( x, y ); // TODO consider referencing vectors instead of copying? return this; } /** * Adds an instance of {@link LineCurve} to the path by connecting * the current point with the given one. * * @param {number} x - The x coordinate of the end point. * @param {number} y - The y coordinate of the end point. * @return {Path} A reference to this path. */ lineTo( x, y ) { const curve = new LineCurve( this.currentPoint.clone(), new Vector2( x, y ) ); this.curves.push( curve ); this.currentPoint.set( x, y ); return this; } /** * Adds an instance of {@link QuadraticBezierCurve} to the path by connecting * the current point with the given one. * * @param {number} aCPx - The x coordinate of the control point. * @param {number} aCPy - The y coordinate of the control point. * @param {number} aX - The x coordinate of the end point. * @param {number} aY - The y coordinate of the end point. * @return {Path} A reference to this path. */ quadraticCurveTo( aCPx, aCPy, aX, aY ) { const curve = new QuadraticBezierCurve( this.currentPoint.clone(), new Vector2( aCPx, aCPy ), new Vector2( aX, aY ) ); this.curves.push( curve ); this.currentPoint.set( aX, aY ); return this; } /** * Adds an instance of {@link CubicBezierCurve} to the path by connecting * the current point with the given one. * * @param {number} aCP1x - The x coordinate of the first control point. * @param {number} aCP1y - The y coordinate of the first control point. * @param {number} aCP2x - The x coordinate of the second control point. * @param {number} aCP2y - The y coordinate of the second control point. * @param {number} aX - The x coordinate of the end point. * @param {number} aY - The y coordinate of the end point. * @return {Path} A reference to this path. */ bezierCurveTo( aCP1x, aCP1y, aCP2x, aCP2y, aX, aY ) { const curve = new CubicBezierCurve( this.currentPoint.clone(), new Vector2( aCP1x, aCP1y ), new Vector2( aCP2x, aCP2y ), new Vector2( aX, aY ) ); this.curves.push( curve ); this.currentPoint.set( aX, aY ); return this; } /** * Adds an instance of {@link SplineCurve} to the path by connecting * the current point with the given list of points. * * @param {Array} pts - An array of points in 2D space. * @return {Path} A reference to this path. */ splineThru( pts ) { const npts = [ this.currentPoint.clone() ].concat( pts ); const curve = new SplineCurve( npts ); this.curves.push( curve ); this.currentPoint.copy( pts[ pts.length - 1 ] ); return this; } /** * Adds an arc as an instance of {@link EllipseCurve} to the path, positioned relative * to the current point. * * @param {number} aX - The x coordinate of the center of the arc offsetted from the previous curve. * @param {number} aY - The y coordinate of the center of the arc offsetted from the previous curve. * @param {number} aRadius - The radius of the arc. * @param {number} aStartAngle - The start angle in radians. * @param {number} aEndAngle - The end angle in radians. * @param {boolean} [aClockwise=false] - Whether to sweep the arc clockwise or not. * @return {Path} A reference to this path. */ arc( aX, aY, aRadius, aStartAngle, aEndAngle, aClockwise ) { const x0 = this.currentPoint.x; const y0 = this.currentPoint.y; this.absarc( aX + x0, aY + y0, aRadius, aStartAngle, aEndAngle, aClockwise ); return this; } /** * Adds an absolutely positioned arc as an instance of {@link EllipseCurve} to the path. * * @param {number} aX - The x coordinate of the center of the arc. * @param {number} aY - The y coordinate of the center of the arc. * @param {number} aRadius - The radius of the arc. * @param {number} aStartAngle - The start angle in radians. * @param {number} aEndAngle - The end angle in radians. * @param {boolean} [aClockwise=false] - Whether to sweep the arc clockwise or not. * @return {Path} A reference to this path. */ absarc( aX, aY, aRadius, aStartAngle, aEndAngle, aClockwise ) { this.absellipse( aX, aY, aRadius, aRadius, aStartAngle, aEndAngle, aClockwise ); return this; } /** * Adds an ellipse as an instance of {@link EllipseCurve} to the path, positioned relative * to the current point * * @param {number} aX - The x coordinate of the center of the ellipse offsetted from the previous curve. * @param {number} aY - The y coordinate of the center of the ellipse offsetted from the previous curve. * @param {number} xRadius - The radius of the ellipse in the x axis. * @param {number} yRadius - The radius of the ellipse in the y axis. * @param {number} aStartAngle - The start angle in radians. * @param {number} aEndAngle - The end angle in radians. * @param {boolean} [aClockwise=false] - Whether to sweep the ellipse clockwise or not. * @param {number} [aRotation=0] - The rotation angle of the ellipse in radians, counterclockwise from the positive X axis. * @return {Path} A reference to this path. */ ellipse( aX, aY, xRadius, yRadius, aStartAngle, aEndAngle, aClockwise, aRotation ) { const x0 = this.currentPoint.x; const y0 = this.currentPoint.y; this.absellipse( aX + x0, aY + y0, xRadius, yRadius, aStartAngle, aEndAngle, aClockwise, aRotation ); return this; } /** * Adds an absolutely positioned ellipse as an instance of {@link EllipseCurve} to the path. * * @param {number} aX - The x coordinate of the absolute center of the ellipse. * @param {number} aY - The y coordinate of the absolute center of the ellipse. * @param {number} xRadius - The radius of the ellipse in the x axis. * @param {number} yRadius - The radius of the ellipse in the y axis. * @param {number} aStartAngle - The start angle in radians. * @param {number} aEndAngle - The end angle in radians. * @param {boolean} [aClockwise=false] - Whether to sweep the ellipse clockwise or not. * @param {number} [aRotation=0] - The rotation angle of the ellipse in radians, counterclockwise from the positive X axis. * @return {Path} A reference to this path. */ absellipse( aX, aY, xRadius, yRadius, aStartAngle, aEndAngle, aClockwise, aRotation ) { const curve = new EllipseCurve( aX, aY, xRadius, yRadius, aStartAngle, aEndAngle, aClockwise, aRotation ); if ( this.curves.length > 0 ) { // if a previous curve is present, attempt to join const firstPoint = curve.getPoint( 0 ); if ( ! firstPoint.equals( this.currentPoint ) ) { this.lineTo( firstPoint.x, firstPoint.y ); } } this.curves.push( curve ); const lastPoint = curve.getPoint( 1 ); this.currentPoint.copy( lastPoint ); return this; } copy( source ) { super.copy( source ); this.currentPoint.copy( source.currentPoint ); return this; } toJSON() { const data = super.toJSON(); data.currentPoint = this.currentPoint.toArray(); return data; } fromJSON( json ) { super.fromJSON( json ); this.currentPoint.fromArray( json.currentPoint ); return this; } } /** * Creates meshes with axial symmetry like vases. The lathe rotates around the Y axis. * * ```js * const points = []; * for ( let i = 0; i < 10; i ++ ) { * points.push( new THREE.Vector2( Math.sin( i * 0.2 ) * 10 + 5, ( i - 5 ) * 2 ) ); * } * const geometry = new THREE.LatheGeometry( points ); * const material = new THREE.MeshBasicMaterial( { color: 0xffff00 } ); * const lathe = new THREE.Mesh( geometry, material ); * scene.add( lathe ); * ``` * * @augments BufferGeometry */ class LatheGeometry extends BufferGeometry { /** * Constructs a new lathe geometry. * * @param {Array} [points] - An array of points in 2D space. The x-coordinate of each point * must be greater than zero. * @param {number} [segments=12] - The number of circumference segments to generate. * @param {number} [phiStart=0] - The starting angle in radians. * @param {number} [phiLength=Math.PI*2] - The radian (0 to 2PI) range of the lathed section 2PI is a * closed lathe, less than 2PI is a portion. */ constructor( points = [ new Vector2( 0, -0.5 ), new Vector2( 0.5, 0 ), new Vector2( 0, 0.5 ) ], segments = 12, phiStart = 0, phiLength = Math.PI * 2 ) { super(); this.type = 'LatheGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { points: points, segments: segments, phiStart: phiStart, phiLength: phiLength }; segments = Math.floor( segments ); // clamp phiLength so it's in range of [ 0, 2PI ] phiLength = clamp( phiLength, 0, Math.PI * 2 ); // buffers const indices = []; const vertices = []; const uvs = []; const initNormals = []; const normals = []; // helper variables const inverseSegments = 1.0 / segments; const vertex = new Vector3(); const uv = new Vector2(); const normal = new Vector3(); const curNormal = new Vector3(); const prevNormal = new Vector3(); let dx = 0; let dy = 0; // pre-compute normals for initial "meridian" for ( let j = 0; j <= ( points.length - 1 ); j ++ ) { switch ( j ) { case 0: // special handling for 1st vertex on path dx = points[ j + 1 ].x - points[ j ].x; dy = points[ j + 1 ].y - points[ j ].y; normal.x = dy * 1.0; normal.y = - dx; normal.z = dy * 0.0; prevNormal.copy( normal ); normal.normalize(); initNormals.push( normal.x, normal.y, normal.z ); break; case ( points.length - 1 ): // special handling for last Vertex on path initNormals.push( prevNormal.x, prevNormal.y, prevNormal.z ); break; default: // default handling for all vertices in between dx = points[ j + 1 ].x - points[ j ].x; dy = points[ j + 1 ].y - points[ j ].y; normal.x = dy * 1.0; normal.y = - dx; normal.z = dy * 0.0; curNormal.copy( normal ); normal.x += prevNormal.x; normal.y += prevNormal.y; normal.z += prevNormal.z; normal.normalize(); initNormals.push( normal.x, normal.y, normal.z ); prevNormal.copy( curNormal ); } } // generate vertices, uvs and normals for ( let i = 0; i <= segments; i ++ ) { const phi = phiStart + i * inverseSegments * phiLength; const sin = Math.sin( phi ); const cos = Math.cos( phi ); for ( let j = 0; j <= ( points.length - 1 ); j ++ ) { // vertex vertex.x = points[ j ].x * sin; vertex.y = points[ j ].y; vertex.z = points[ j ].x * cos; vertices.push( vertex.x, vertex.y, vertex.z ); // uv uv.x = i / segments; uv.y = j / ( points.length - 1 ); uvs.push( uv.x, uv.y ); // normal const x = initNormals[ 3 * j + 0 ] * sin; const y = initNormals[ 3 * j + 1 ]; const z = initNormals[ 3 * j + 0 ] * cos; normals.push( x, y, z ); } } // indices for ( let i = 0; i < segments; i ++ ) { for ( let j = 0; j < ( points.length - 1 ); j ++ ) { const base = j + i * points.length; const a = base; const b = base + points.length; const c = base + points.length + 1; const d = base + 1; // faces indices.push( a, b, d ); indices.push( c, d, b ); } } // build geometry this.setIndex( indices ); this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) ); this.setAttribute( 'uv', new Float32BufferAttribute( uvs, 2 ) ); this.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) ); } copy( source ) { super.copy( source ); this.parameters = Object.assign( {}, source.parameters ); return this; } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @return {LatheGeometry} A new instance. */ static fromJSON( data ) { return new LatheGeometry( data.points, data.segments, data.phiStart, data.phiLength ); } } /** * A geometry class for a capsule with given radii and height. It is constructed using a lathe. * * ```js * const geometry = new THREE.CapsuleGeometry( 1, 1, 4, 8 ); * const material = new THREE.MeshBasicMaterial( { color: 0x00ff00 } ); * const capsule = new THREE.Mesh( geometry, material ); * scene.add( capsule ); * ``` * * @augments LatheGeometry */ class CapsuleGeometry extends LatheGeometry { /** * Constructs a new capsule geometry. * * @param {number} [radius=1] - Radius of the capsule. * @param {number} [length=1] - Length of the middle section. * @param {number} [capSegments=4] - Number of curve segments used to build the caps. * @param {number} [radialSegments=8] - Number of segmented faces around the circumference of the capsule. */ constructor( radius = 1, length = 1, capSegments = 4, radialSegments = 8 ) { const path = new Path(); path.absarc( 0, - length / 2, radius, Math.PI * 1.5, 0 ); path.absarc( 0, length / 2, radius, 0, Math.PI * 0.5 ); super( path.getPoints( capSegments ), radialSegments ); this.type = 'CapsuleGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { radius: radius, length: length, capSegments: capSegments, radialSegments: radialSegments, }; } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @return {CapsuleGeometry} A new instance. */ static fromJSON( data ) { return new CapsuleGeometry( data.radius, data.length, data.capSegments, data.radialSegments ); } } /** * A simple shape of Euclidean geometry. It is constructed from a * number of triangular segments that are oriented around a central point and * extend as far out as a given radius. It is built counter-clockwise from a * start angle and a given central angle. It can also be used to create * regular polygons, where the number of segments determines the number of * sides. * * ```js * const geometry = new THREE.CircleGeometry( 5, 32 ); * const material = new THREE.MeshBasicMaterial( { color: 0xffff00 } ); * const circle = new THREE.Mesh( geometry, material ); * scene.add( circle ) * ``` * * @augments BufferGeometry */ class CircleGeometry extends BufferGeometry { /** * Constructs a new circle geometry. * * @param {number} [radius=1] - Radius of the circle. * @param {number} [segments=32] - Number of segments (triangles), minimum = `3`. * @param {number} [thetaStart=0] - Start angle for first segment in radians. * @param {number} [thetaLength=Math.PI*2] - The central angle, often called theta, * of the circular sector in radians. The default value results in a complete circle. */ constructor( radius = 1, segments = 32, thetaStart = 0, thetaLength = Math.PI * 2 ) { super(); this.type = 'CircleGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { radius: radius, segments: segments, thetaStart: thetaStart, thetaLength: thetaLength }; segments = Math.max( 3, segments ); // buffers const indices = []; const vertices = []; const normals = []; const uvs = []; // helper variables const vertex = new Vector3(); const uv = new Vector2(); // center point vertices.push( 0, 0, 0 ); normals.push( 0, 0, 1 ); uvs.push( 0.5, 0.5 ); for ( let s = 0, i = 3; s <= segments; s ++, i += 3 ) { const segment = thetaStart + s / segments * thetaLength; // vertex vertex.x = radius * Math.cos( segment ); vertex.y = radius * Math.sin( segment ); vertices.push( vertex.x, vertex.y, vertex.z ); // normal normals.push( 0, 0, 1 ); // uvs uv.x = ( vertices[ i ] / radius + 1 ) / 2; uv.y = ( vertices[ i + 1 ] / radius + 1 ) / 2; uvs.push( uv.x, uv.y ); } // indices for ( let i = 1; i <= segments; i ++ ) { indices.push( i, i + 1, 0 ); } // build geometry this.setIndex( indices ); this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) ); this.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) ); this.setAttribute( 'uv', new Float32BufferAttribute( uvs, 2 ) ); } copy( source ) { super.copy( source ); this.parameters = Object.assign( {}, source.parameters ); return this; } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @return {CircleGeometry} A new instance. */ static fromJSON( data ) { return new CircleGeometry( data.radius, data.segments, data.thetaStart, data.thetaLength ); } } /** * A geometry class for representing a cylinder. * * ```js * const geometry = new THREE.CylinderGeometry( 5, 5, 20, 32 ); * const material = new THREE.MeshBasicMaterial( { color: 0xffff00 } ); * const cylinder = new THREE.Mesh( geometry, material ); * scene.add( cylinder ); * ``` * * @augments BufferGeometry */ class CylinderGeometry extends BufferGeometry { /** * Constructs a new cylinder geometry. * * @param {number} [radiusTop=1] - Radius of the cylinder at the top. * @param {number} [radiusBottom=1] - Radius of the cylinder at the bottom. * @param {number} [height=1] - Height of the cylinder. * @param {number} [radialSegments=32] - Number of segmented faces around the circumference of the cylinder. * @param {number} [heightSegments=1] - Number of rows of faces along the height of the cylinder. * @param {boolean} [openEnded=false] - Whether the base of the cylinder is open or capped. * @param {number} [thetaStart=0] - Start angle for first segment, in radians. * @param {number} [thetaLength=Math.PI*2] - The central angle, often called theta, of the circular sector, in radians. * The default value results in a complete cylinder. */ constructor( radiusTop = 1, radiusBottom = 1, height = 1, radialSegments = 32, heightSegments = 1, openEnded = false, thetaStart = 0, thetaLength = Math.PI * 2 ) { super(); this.type = 'CylinderGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { radiusTop: radiusTop, radiusBottom: radiusBottom, height: height, radialSegments: radialSegments, heightSegments: heightSegments, openEnded: openEnded, thetaStart: thetaStart, thetaLength: thetaLength }; const scope = this; radialSegments = Math.floor( radialSegments ); heightSegments = Math.floor( heightSegments ); // buffers const indices = []; const vertices = []; const normals = []; const uvs = []; // helper variables let index = 0; const indexArray = []; const halfHeight = height / 2; let groupStart = 0; // generate geometry generateTorso(); if ( openEnded === false ) { if ( radiusTop > 0 ) generateCap( true ); if ( radiusBottom > 0 ) generateCap( false ); } // build geometry this.setIndex( indices ); this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) ); this.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) ); this.setAttribute( 'uv', new Float32BufferAttribute( uvs, 2 ) ); function generateTorso() { const normal = new Vector3(); const vertex = new Vector3(); let groupCount = 0; // this will be used to calculate the normal const slope = ( radiusBottom - radiusTop ) / height; // generate vertices, normals and uvs for ( let y = 0; y <= heightSegments; y ++ ) { const indexRow = []; const v = y / heightSegments; // calculate the radius of the current row const radius = v * ( radiusBottom - radiusTop ) + radiusTop; for ( let x = 0; x <= radialSegments; x ++ ) { const u = x / radialSegments; const theta = u * thetaLength + thetaStart; const sinTheta = Math.sin( theta ); const cosTheta = Math.cos( theta ); // vertex vertex.x = radius * sinTheta; vertex.y = - v * height + halfHeight; vertex.z = radius * cosTheta; vertices.push( vertex.x, vertex.y, vertex.z ); // normal normal.set( sinTheta, slope, cosTheta ).normalize(); normals.push( normal.x, normal.y, normal.z ); // uv uvs.push( u, 1 - v ); // save index of vertex in respective row indexRow.push( index ++ ); } // now save vertices of the row in our index array indexArray.push( indexRow ); } // generate indices for ( let x = 0; x < radialSegments; x ++ ) { for ( let y = 0; y < heightSegments; y ++ ) { // we use the index array to access the correct indices const a = indexArray[ y ][ x ]; const b = indexArray[ y + 1 ][ x ]; const c = indexArray[ y + 1 ][ x + 1 ]; const d = indexArray[ y ][ x + 1 ]; // faces if ( radiusTop > 0 || y !== 0 ) { indices.push( a, b, d ); groupCount += 3; } if ( radiusBottom > 0 || y !== heightSegments - 1 ) { indices.push( b, c, d ); groupCount += 3; } } } // add a group to the geometry. this will ensure multi material support scope.addGroup( groupStart, groupCount, 0 ); // calculate new start value for groups groupStart += groupCount; } function generateCap( top ) { // save the index of the first center vertex const centerIndexStart = index; const uv = new Vector2(); const vertex = new Vector3(); let groupCount = 0; const radius = ( top === true ) ? radiusTop : radiusBottom; const sign = ( top === true ) ? 1 : -1; // first we generate the center vertex data of the cap. // because the geometry needs one set of uvs per face, // we must generate a center vertex per face/segment for ( let x = 1; x <= radialSegments; x ++ ) { // vertex vertices.push( 0, halfHeight * sign, 0 ); // normal normals.push( 0, sign, 0 ); // uv uvs.push( 0.5, 0.5 ); // increase index index ++; } // save the index of the last center vertex const centerIndexEnd = index; // now we generate the surrounding vertices, normals and uvs for ( let x = 0; x <= radialSegments; x ++ ) { const u = x / radialSegments; const theta = u * thetaLength + thetaStart; const cosTheta = Math.cos( theta ); const sinTheta = Math.sin( theta ); // vertex vertex.x = radius * sinTheta; vertex.y = halfHeight * sign; vertex.z = radius * cosTheta; vertices.push( vertex.x, vertex.y, vertex.z ); // normal normals.push( 0, sign, 0 ); // uv uv.x = ( cosTheta * 0.5 ) + 0.5; uv.y = ( sinTheta * 0.5 * sign ) + 0.5; uvs.push( uv.x, uv.y ); // increase index index ++; } // generate indices for ( let x = 0; x < radialSegments; x ++ ) { const c = centerIndexStart + x; const i = centerIndexEnd + x; if ( top === true ) { // face top indices.push( i, i + 1, c ); } else { // face bottom indices.push( i + 1, i, c ); } groupCount += 3; } // add a group to the geometry. this will ensure multi material support scope.addGroup( groupStart, groupCount, top === true ? 1 : 2 ); // calculate new start value for groups groupStart += groupCount; } } copy( source ) { super.copy( source ); this.parameters = Object.assign( {}, source.parameters ); return this; } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @return {CylinderGeometry} A new instance. */ static fromJSON( data ) { return new CylinderGeometry( data.radiusTop, data.radiusBottom, data.height, data.radialSegments, data.heightSegments, data.openEnded, data.thetaStart, data.thetaLength ); } } /** * A geometry class for representing a cone. * * ```js * const geometry = new THREE.ConeGeometry( 5, 20, 32 ); * const material = new THREE.MeshBasicMaterial( { color: 0xffff00 } ); * const cone = new THREE.Mesh(geometry, material ); * scene.add( cone ); * ``` * * @augments CylinderGeometry */ class ConeGeometry extends CylinderGeometry { /** * Constructs a new cone geometry. * * @param {number} [radius=1] - Radius of the cone base. * @param {number} [height=1] - Height of the cone. * @param {number} [radialSegments=32] - Number of segmented faces around the circumference of the cone. * @param {number} [heightSegments=1] - Number of rows of faces along the height of the cone. * @param {boolean} [openEnded=false] - Whether the base of the cone is open or capped. * @param {number} [thetaStart=0] - Start angle for first segment, in radians. * @param {number} [thetaLength=Math.PI*2] - The central angle, often called theta, of the circular sector, in radians. * The default value results in a complete cone. */ constructor( radius = 1, height = 1, radialSegments = 32, heightSegments = 1, openEnded = false, thetaStart = 0, thetaLength = Math.PI * 2 ) { super( 0, radius, height, radialSegments, heightSegments, openEnded, thetaStart, thetaLength ); this.type = 'ConeGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { radius: radius, height: height, radialSegments: radialSegments, heightSegments: heightSegments, openEnded: openEnded, thetaStart: thetaStart, thetaLength: thetaLength }; } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @return {ConeGeometry} A new instance. */ static fromJSON( data ) { return new ConeGeometry( data.radius, data.height, data.radialSegments, data.heightSegments, data.openEnded, data.thetaStart, data.thetaLength ); } } /** * A polyhedron is a solid in three dimensions with flat faces. This class * will take an array of vertices, project them onto a sphere, and then * divide them up to the desired level of detail. * * @augments BufferGeometry */ class PolyhedronGeometry extends BufferGeometry { /** * Constructs a new polyhedron geometry. * * @param {Array} [vertices] - A flat array of vertices describing the base shape. * @param {Array} [indices] - A flat array of indices describing the base shape. * @param {number} [radius=1] - The radius of the shape. * @param {number} [detail=0] - How many levels to subdivide the geometry. The more detail, the smoother the shape. */ constructor( vertices = [], indices = [], radius = 1, detail = 0 ) { super(); this.type = 'PolyhedronGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { vertices: vertices, indices: indices, radius: radius, detail: detail }; // default buffer data const vertexBuffer = []; const uvBuffer = []; // the subdivision creates the vertex buffer data subdivide( detail ); // all vertices should lie on a conceptual sphere with a given radius applyRadius( radius ); // finally, create the uv data generateUVs(); // build non-indexed geometry this.setAttribute( 'position', new Float32BufferAttribute( vertexBuffer, 3 ) ); this.setAttribute( 'normal', new Float32BufferAttribute( vertexBuffer.slice(), 3 ) ); this.setAttribute( 'uv', new Float32BufferAttribute( uvBuffer, 2 ) ); if ( detail === 0 ) { this.computeVertexNormals(); // flat normals } else { this.normalizeNormals(); // smooth normals } // helper functions function subdivide( detail ) { const a = new Vector3(); const b = new Vector3(); const c = new Vector3(); // iterate over all faces and apply a subdivision with the given detail value for ( let i = 0; i < indices.length; i += 3 ) { // get the vertices of the face getVertexByIndex( indices[ i + 0 ], a ); getVertexByIndex( indices[ i + 1 ], b ); getVertexByIndex( indices[ i + 2 ], c ); // perform subdivision subdivideFace( a, b, c, detail ); } } function subdivideFace( a, b, c, detail ) { const cols = detail + 1; // we use this multidimensional array as a data structure for creating the subdivision const v = []; // construct all of the vertices for this subdivision for ( let i = 0; i <= cols; i ++ ) { v[ i ] = []; const aj = a.clone().lerp( c, i / cols ); const bj = b.clone().lerp( c, i / cols ); const rows = cols - i; for ( let j = 0; j <= rows; j ++ ) { if ( j === 0 && i === cols ) { v[ i ][ j ] = aj; } else { v[ i ][ j ] = aj.clone().lerp( bj, j / rows ); } } } // construct all of the faces for ( let i = 0; i < cols; i ++ ) { for ( let j = 0; j < 2 * ( cols - i ) - 1; j ++ ) { const k = Math.floor( j / 2 ); if ( j % 2 === 0 ) { pushVertex( v[ i ][ k + 1 ] ); pushVertex( v[ i + 1 ][ k ] ); pushVertex( v[ i ][ k ] ); } else { pushVertex( v[ i ][ k + 1 ] ); pushVertex( v[ i + 1 ][ k + 1 ] ); pushVertex( v[ i + 1 ][ k ] ); } } } } function applyRadius( radius ) { const vertex = new Vector3(); // iterate over the entire buffer and apply the radius to each vertex for ( let i = 0; i < vertexBuffer.length; i += 3 ) { vertex.x = vertexBuffer[ i + 0 ]; vertex.y = vertexBuffer[ i + 1 ]; vertex.z = vertexBuffer[ i + 2 ]; vertex.normalize().multiplyScalar( radius ); vertexBuffer[ i + 0 ] = vertex.x; vertexBuffer[ i + 1 ] = vertex.y; vertexBuffer[ i + 2 ] = vertex.z; } } function generateUVs() { const vertex = new Vector3(); for ( let i = 0; i < vertexBuffer.length; i += 3 ) { vertex.x = vertexBuffer[ i + 0 ]; vertex.y = vertexBuffer[ i + 1 ]; vertex.z = vertexBuffer[ i + 2 ]; const u = azimuth( vertex ) / 2 / Math.PI + 0.5; const v = inclination( vertex ) / Math.PI + 0.5; uvBuffer.push( u, 1 - v ); } correctUVs(); correctSeam(); } function correctSeam() { // handle case when face straddles the seam, see #3269 for ( let i = 0; i < uvBuffer.length; i += 6 ) { // uv data of a single face const x0 = uvBuffer[ i + 0 ]; const x1 = uvBuffer[ i + 2 ]; const x2 = uvBuffer[ i + 4 ]; const max = Math.max( x0, x1, x2 ); const min = Math.min( x0, x1, x2 ); // 0.9 is somewhat arbitrary if ( max > 0.9 && min < 0.1 ) { if ( x0 < 0.2 ) uvBuffer[ i + 0 ] += 1; if ( x1 < 0.2 ) uvBuffer[ i + 2 ] += 1; if ( x2 < 0.2 ) uvBuffer[ i + 4 ] += 1; } } } function pushVertex( vertex ) { vertexBuffer.push( vertex.x, vertex.y, vertex.z ); } function getVertexByIndex( index, vertex ) { const stride = index * 3; vertex.x = vertices[ stride + 0 ]; vertex.y = vertices[ stride + 1 ]; vertex.z = vertices[ stride + 2 ]; } function correctUVs() { const a = new Vector3(); const b = new Vector3(); const c = new Vector3(); const centroid = new Vector3(); const uvA = new Vector2(); const uvB = new Vector2(); const uvC = new Vector2(); for ( let i = 0, j = 0; i < vertexBuffer.length; i += 9, j += 6 ) { a.set( vertexBuffer[ i + 0 ], vertexBuffer[ i + 1 ], vertexBuffer[ i + 2 ] ); b.set( vertexBuffer[ i + 3 ], vertexBuffer[ i + 4 ], vertexBuffer[ i + 5 ] ); c.set( vertexBuffer[ i + 6 ], vertexBuffer[ i + 7 ], vertexBuffer[ i + 8 ] ); uvA.set( uvBuffer[ j + 0 ], uvBuffer[ j + 1 ] ); uvB.set( uvBuffer[ j + 2 ], uvBuffer[ j + 3 ] ); uvC.set( uvBuffer[ j + 4 ], uvBuffer[ j + 5 ] ); centroid.copy( a ).add( b ).add( c ).divideScalar( 3 ); const azi = azimuth( centroid ); correctUV( uvA, j + 0, a, azi ); correctUV( uvB, j + 2, b, azi ); correctUV( uvC, j + 4, c, azi ); } } function correctUV( uv, stride, vector, azimuth ) { if ( ( azimuth < 0 ) && ( uv.x === 1 ) ) { uvBuffer[ stride ] = uv.x - 1; } if ( ( vector.x === 0 ) && ( vector.z === 0 ) ) { uvBuffer[ stride ] = azimuth / 2 / Math.PI + 0.5; } } // Angle around the Y axis, counter-clockwise when looking from above. function azimuth( vector ) { return Math.atan2( vector.z, - vector.x ); } // Angle above the XZ plane. function inclination( vector ) { return Math.atan2( - vector.y, Math.sqrt( ( vector.x * vector.x ) + ( vector.z * vector.z ) ) ); } } copy( source ) { super.copy( source ); this.parameters = Object.assign( {}, source.parameters ); return this; } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @return {PolyhedronGeometry} A new instance. */ static fromJSON( data ) { return new PolyhedronGeometry( data.vertices, data.indices, data.radius, data.details ); } } /** * A geometry class for representing a dodecahedron. * * ```js * const geometry = new THREE.DodecahedronGeometry(); * const material = new THREE.MeshBasicMaterial( { color: 0xffff00 } ); * const dodecahedron = new THREE.Mesh( geometry, material ); * scene.add( dodecahedron ); * ``` * * @augments PolyhedronGeometry */ class DodecahedronGeometry extends PolyhedronGeometry { /** * Constructs a new dodecahedron geometry. * * @param {number} [radius=1] - Radius of the dodecahedron. * @param {number} [detail=0] - Setting this to a value greater than `0` adds vertices making it no longer a dodecahedron. */ constructor( radius = 1, detail = 0 ) { const t = ( 1 + Math.sqrt( 5 ) ) / 2; const r = 1 / t; const vertices = [ // (±1, ±1, ±1) -1, -1, -1, -1, -1, 1, -1, 1, -1, -1, 1, 1, 1, -1, -1, 1, -1, 1, 1, 1, -1, 1, 1, 1, // (0, ±1/φ, ±φ) 0, - r, - t, 0, - r, t, 0, r, - t, 0, r, t, // (±1/φ, ±φ, 0) - r, - t, 0, - r, t, 0, r, - t, 0, r, t, 0, // (±φ, 0, ±1/φ) - t, 0, - r, t, 0, - r, - t, 0, r, t, 0, r ]; const indices = [ 3, 11, 7, 3, 7, 15, 3, 15, 13, 7, 19, 17, 7, 17, 6, 7, 6, 15, 17, 4, 8, 17, 8, 10, 17, 10, 6, 8, 0, 16, 8, 16, 2, 8, 2, 10, 0, 12, 1, 0, 1, 18, 0, 18, 16, 6, 10, 2, 6, 2, 13, 6, 13, 15, 2, 16, 18, 2, 18, 3, 2, 3, 13, 18, 1, 9, 18, 9, 11, 18, 11, 3, 4, 14, 12, 4, 12, 0, 4, 0, 8, 11, 9, 5, 11, 5, 19, 11, 19, 7, 19, 5, 14, 19, 14, 4, 19, 4, 17, 1, 12, 14, 1, 14, 5, 1, 5, 9 ]; super( vertices, indices, radius, detail ); this.type = 'DodecahedronGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { radius: radius, detail: detail }; } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @return {DodecahedronGeometry} A new instance. */ static fromJSON( data ) { return new DodecahedronGeometry( data.radius, data.detail ); } } const _v0$1 = /*@__PURE__*/ new Vector3(); const _v1$1 = /*@__PURE__*/ new Vector3(); const _normal = /*@__PURE__*/ new Vector3(); const _triangle = /*@__PURE__*/ new Triangle(); /** * Can be used as a helper object to view the edges of a geometry. * * ```js * const geometry = new THREE.BoxGeometry(); * const edges = new THREE.EdgesGeometry( geometry ); * const line = new THREE.LineSegments( edges ); * scene.add( line ); * ``` * * Note: It is not yet possible to serialize/deserialize instances of this class. * * @augments BufferGeometry */ class EdgesGeometry extends BufferGeometry { /** * Constructs a new edges geometry. * * @param {?BufferGeometry} [geometry=null] - The geometry. * @param {number} [thresholdAngle=1] - An edge is only rendered if the angle (in degrees) * between the face normals of the adjoining faces exceeds this value. */ constructor( geometry = null, thresholdAngle = 1 ) { super(); this.type = 'EdgesGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { geometry: geometry, thresholdAngle: thresholdAngle }; if ( geometry !== null ) { const precisionPoints = 4; const precision = Math.pow( 10, precisionPoints ); const thresholdDot = Math.cos( DEG2RAD * thresholdAngle ); const indexAttr = geometry.getIndex(); const positionAttr = geometry.getAttribute( 'position' ); const indexCount = indexAttr ? indexAttr.count : positionAttr.count; const indexArr = [ 0, 0, 0 ]; const vertKeys = [ 'a', 'b', 'c' ]; const hashes = new Array( 3 ); const edgeData = {}; const vertices = []; for ( let i = 0; i < indexCount; i += 3 ) { if ( indexAttr ) { indexArr[ 0 ] = indexAttr.getX( i ); indexArr[ 1 ] = indexAttr.getX( i + 1 ); indexArr[ 2 ] = indexAttr.getX( i + 2 ); } else { indexArr[ 0 ] = i; indexArr[ 1 ] = i + 1; indexArr[ 2 ] = i + 2; } const { a, b, c } = _triangle; a.fromBufferAttribute( positionAttr, indexArr[ 0 ] ); b.fromBufferAttribute( positionAttr, indexArr[ 1 ] ); c.fromBufferAttribute( positionAttr, indexArr[ 2 ] ); _triangle.getNormal( _normal ); // create hashes for the edge from the vertices hashes[ 0 ] = `${ Math.round( a.x * precision ) },${ Math.round( a.y * precision ) },${ Math.round( a.z * precision ) }`; hashes[ 1 ] = `${ Math.round( b.x * precision ) },${ Math.round( b.y * precision ) },${ Math.round( b.z * precision ) }`; hashes[ 2 ] = `${ Math.round( c.x * precision ) },${ Math.round( c.y * precision ) },${ Math.round( c.z * precision ) }`; // skip degenerate triangles if ( hashes[ 0 ] === hashes[ 1 ] || hashes[ 1 ] === hashes[ 2 ] || hashes[ 2 ] === hashes[ 0 ] ) { continue; } // iterate over every edge for ( let j = 0; j < 3; j ++ ) { // get the first and next vertex making up the edge const jNext = ( j + 1 ) % 3; const vecHash0 = hashes[ j ]; const vecHash1 = hashes[ jNext ]; const v0 = _triangle[ vertKeys[ j ] ]; const v1 = _triangle[ vertKeys[ jNext ] ]; const hash = `${ vecHash0 }_${ vecHash1 }`; const reverseHash = `${ vecHash1 }_${ vecHash0 }`; if ( reverseHash in edgeData && edgeData[ reverseHash ] ) { // if we found a sibling edge add it into the vertex array if // it meets the angle threshold and delete the edge from the map. if ( _normal.dot( edgeData[ reverseHash ].normal ) <= thresholdDot ) { vertices.push( v0.x, v0.y, v0.z ); vertices.push( v1.x, v1.y, v1.z ); } edgeData[ reverseHash ] = null; } else if ( ! ( hash in edgeData ) ) { // if we've already got an edge here then skip adding a new one edgeData[ hash ] = { index0: indexArr[ j ], index1: indexArr[ jNext ], normal: _normal.clone(), }; } } } // iterate over all remaining, unmatched edges and add them to the vertex array for ( const key in edgeData ) { if ( edgeData[ key ] ) { const { index0, index1 } = edgeData[ key ]; _v0$1.fromBufferAttribute( positionAttr, index0 ); _v1$1.fromBufferAttribute( positionAttr, index1 ); vertices.push( _v0$1.x, _v0$1.y, _v0$1.z ); vertices.push( _v1$1.x, _v1$1.y, _v1$1.z ); } } this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) ); } } copy( source ) { super.copy( source ); this.parameters = Object.assign( {}, source.parameters ); return this; } } /** * Defines an arbitrary 2d shape plane using paths with optional holes. It * can be used with {@link ExtrudeGeometry}, {@link ShapeGeometry}, to get * points, or to get triangulated faces. * * ```js * const heartShape = new THREE.Shape(); * * heartShape.moveTo( 25, 25 ); * heartShape.bezierCurveTo( 25, 25, 20, 0, 0, 0 ); * heartShape.bezierCurveTo( - 30, 0, - 30, 35, - 30, 35 ); * heartShape.bezierCurveTo( - 30, 55, - 10, 77, 25, 95 ); * heartShape.bezierCurveTo( 60, 77, 80, 55, 80, 35 ); * heartShape.bezierCurveTo( 80, 35, 80, 0, 50, 0 ); * heartShape.bezierCurveTo( 35, 0, 25, 25, 25, 25 ); * * const extrudeSettings = { * depth: 8, * bevelEnabled: true, * bevelSegments: 2, * steps: 2, * bevelSize: 1, * bevelThickness: 1 * }; * * const geometry = new THREE.ExtrudeGeometry( heartShape, extrudeSettings ); * const mesh = new THREE.Mesh( geometry, new THREE.MeshBasicMaterial() ); * ``` * * @augments Path */ class Shape extends Path { /** * Constructs a new shape. * * @param {Array} [points] - An array of 2D points defining the shape. */ constructor( points ) { super( points ); /** * The UUID of the shape. * * @type {string} * @readonly */ this.uuid = generateUUID(); this.type = 'Shape'; /** * Defines the holes in the shape. Hole definitions must use the * opposite winding order (CW/CCW) than the outer shape. * * @type {Array} * @readonly */ this.holes = []; } /** * Returns an array representing each contour of the holes * as a list of 2D points. * * @param {number} divisions - The fineness of the result. * @return {Array>} The holes as a series of 2D points. */ getPointsHoles( divisions ) { const holesPts = []; for ( let i = 0, l = this.holes.length; i < l; i ++ ) { holesPts[ i ] = this.holes[ i ].getPoints( divisions ); } return holesPts; } // get points of shape and holes (keypoints based on segments parameter) /** * Returns an object that holds contour data for the shape and its holes as * arrays of 2D points. * * @param {number} divisions - The fineness of the result. * @return {{shape:Array,holes:Array>}} An object with contour data. */ extractPoints( divisions ) { return { shape: this.getPoints( divisions ), holes: this.getPointsHoles( divisions ) }; } copy( source ) { super.copy( source ); this.holes = []; for ( let i = 0, l = source.holes.length; i < l; i ++ ) { const hole = source.holes[ i ]; this.holes.push( hole.clone() ); } return this; } toJSON() { const data = super.toJSON(); data.uuid = this.uuid; data.holes = []; for ( let i = 0, l = this.holes.length; i < l; i ++ ) { const hole = this.holes[ i ]; data.holes.push( hole.toJSON() ); } return data; } fromJSON( json ) { super.fromJSON( json ); this.uuid = json.uuid; this.holes = []; for ( let i = 0, l = json.holes.length; i < l; i ++ ) { const hole = json.holes[ i ]; this.holes.push( new Path().fromJSON( hole ) ); } return this; } } /* eslint-disable */ // copy of mapbox/earcut version 3.0.1 // https://github.com/mapbox/earcut/tree/v3.0.1 function earcut(data, holeIndices, dim = 2) { const hasHoles = holeIndices && holeIndices.length; const outerLen = hasHoles ? holeIndices[0] * dim : data.length; let outerNode = linkedList(data, 0, outerLen, dim, true); const triangles = []; if (!outerNode || outerNode.next === outerNode.prev) return triangles; let minX, minY, invSize; if (hasHoles) outerNode = eliminateHoles(data, holeIndices, outerNode, dim); // if the shape is not too simple, we'll use z-order curve hash later; calculate polygon bbox if (data.length > 80 * dim) { minX = Infinity; minY = Infinity; let maxX = -Infinity; let maxY = -Infinity; for (let i = dim; i < outerLen; i += dim) { const x = data[i]; const y = data[i + 1]; if (x < minX) minX = x; if (y < minY) minY = y; if (x > maxX) maxX = x; if (y > maxY) maxY = y; } // minX, minY and invSize are later used to transform coords into integers for z-order calculation invSize = Math.max(maxX - minX, maxY - minY); invSize = invSize !== 0 ? 32767 / invSize : 0; } earcutLinked(outerNode, triangles, dim, minX, minY, invSize, 0); return triangles; } // create a circular doubly linked list from polygon points in the specified winding order function linkedList(data, start, end, dim, clockwise) { let last; if (clockwise === (signedArea(data, start, end, dim) > 0)) { for (let i = start; i < end; i += dim) last = insertNode(i / dim | 0, data[i], data[i + 1], last); } else { for (let i = end - dim; i >= start; i -= dim) last = insertNode(i / dim | 0, data[i], data[i + 1], last); } if (last && equals(last, last.next)) { removeNode(last); last = last.next; } return last; } // eliminate colinear or duplicate points function filterPoints(start, end) { if (!start) return start; if (!end) end = start; let p = start, again; do { again = false; if (!p.steiner && (equals(p, p.next) || area(p.prev, p, p.next) === 0)) { removeNode(p); p = end = p.prev; if (p === p.next) break; again = true; } else { p = p.next; } } while (again || p !== end); return end; } // main ear slicing loop which triangulates a polygon (given as a linked list) function earcutLinked(ear, triangles, dim, minX, minY, invSize, pass) { if (!ear) return; // interlink polygon nodes in z-order if (!pass && invSize) indexCurve(ear, minX, minY, invSize); let stop = ear; // iterate through ears, slicing them one by one while (ear.prev !== ear.next) { const prev = ear.prev; const next = ear.next; if (invSize ? isEarHashed(ear, minX, minY, invSize) : isEar(ear)) { triangles.push(prev.i, ear.i, next.i); // cut off the triangle removeNode(ear); // skipping the next vertex leads to less sliver triangles ear = next.next; stop = next.next; continue; } ear = next; // if we looped through the whole remaining polygon and can't find any more ears if (ear === stop) { // try filtering points and slicing again if (!pass) { earcutLinked(filterPoints(ear), triangles, dim, minX, minY, invSize, 1); // if this didn't work, try curing all small self-intersections locally } else if (pass === 1) { ear = cureLocalIntersections(filterPoints(ear), triangles); earcutLinked(ear, triangles, dim, minX, minY, invSize, 2); // as a last resort, try splitting the remaining polygon into two } else if (pass === 2) { splitEarcut(ear, triangles, dim, minX, minY, invSize); } break; } } } // check whether a polygon node forms a valid ear with adjacent nodes function isEar(ear) { const a = ear.prev, b = ear, c = ear.next; if (area(a, b, c) >= 0) return false; // reflex, can't be an ear // now make sure we don't have other points inside the potential ear const ax = a.x, bx = b.x, cx = c.x, ay = a.y, by = b.y, cy = c.y; // triangle bbox const x0 = Math.min(ax, bx, cx), y0 = Math.min(ay, by, cy), x1 = Math.max(ax, bx, cx), y1 = Math.max(ay, by, cy); let p = c.next; while (p !== a) { if (p.x >= x0 && p.x <= x1 && p.y >= y0 && p.y <= y1 && pointInTriangleExceptFirst(ax, ay, bx, by, cx, cy, p.x, p.y) && area(p.prev, p, p.next) >= 0) return false; p = p.next; } return true; } function isEarHashed(ear, minX, minY, invSize) { const a = ear.prev, b = ear, c = ear.next; if (area(a, b, c) >= 0) return false; // reflex, can't be an ear const ax = a.x, bx = b.x, cx = c.x, ay = a.y, by = b.y, cy = c.y; // triangle bbox const x0 = Math.min(ax, bx, cx), y0 = Math.min(ay, by, cy), x1 = Math.max(ax, bx, cx), y1 = Math.max(ay, by, cy); // z-order range for the current triangle bbox; const minZ = zOrder(x0, y0, minX, minY, invSize), maxZ = zOrder(x1, y1, minX, minY, invSize); let p = ear.prevZ, n = ear.nextZ; // look for points inside the triangle in both directions while (p && p.z >= minZ && n && n.z <= maxZ) { if (p.x >= x0 && p.x <= x1 && p.y >= y0 && p.y <= y1 && p !== a && p !== c && pointInTriangleExceptFirst(ax, ay, bx, by, cx, cy, p.x, p.y) && area(p.prev, p, p.next) >= 0) return false; p = p.prevZ; if (n.x >= x0 && n.x <= x1 && n.y >= y0 && n.y <= y1 && n !== a && n !== c && pointInTriangleExceptFirst(ax, ay, bx, by, cx, cy, n.x, n.y) && area(n.prev, n, n.next) >= 0) return false; n = n.nextZ; } // look for remaining points in decreasing z-order while (p && p.z >= minZ) { if (p.x >= x0 && p.x <= x1 && p.y >= y0 && p.y <= y1 && p !== a && p !== c && pointInTriangleExceptFirst(ax, ay, bx, by, cx, cy, p.x, p.y) && area(p.prev, p, p.next) >= 0) return false; p = p.prevZ; } // look for remaining points in increasing z-order while (n && n.z <= maxZ) { if (n.x >= x0 && n.x <= x1 && n.y >= y0 && n.y <= y1 && n !== a && n !== c && pointInTriangleExceptFirst(ax, ay, bx, by, cx, cy, n.x, n.y) && area(n.prev, n, n.next) >= 0) return false; n = n.nextZ; } return true; } // go through all polygon nodes and cure small local self-intersections function cureLocalIntersections(start, triangles) { let p = start; do { const a = p.prev, b = p.next.next; if (!equals(a, b) && intersects(a, p, p.next, b) && locallyInside(a, b) && locallyInside(b, a)) { triangles.push(a.i, p.i, b.i); // remove two nodes involved removeNode(p); removeNode(p.next); p = start = b; } p = p.next; } while (p !== start); return filterPoints(p); } // try splitting polygon into two and triangulate them independently function splitEarcut(start, triangles, dim, minX, minY, invSize) { // look for a valid diagonal that divides the polygon into two let a = start; do { let b = a.next.next; while (b !== a.prev) { if (a.i !== b.i && isValidDiagonal(a, b)) { // split the polygon in two by the diagonal let c = splitPolygon(a, b); // filter colinear points around the cuts a = filterPoints(a, a.next); c = filterPoints(c, c.next); // run earcut on each half earcutLinked(a, triangles, dim, minX, minY, invSize, 0); earcutLinked(c, triangles, dim, minX, minY, invSize, 0); return; } b = b.next; } a = a.next; } while (a !== start); } // link every hole into the outer loop, producing a single-ring polygon without holes function eliminateHoles(data, holeIndices, outerNode, dim) { const queue = []; for (let i = 0, len = holeIndices.length; i < len; i++) { const start = holeIndices[i] * dim; const end = i < len - 1 ? holeIndices[i + 1] * dim : data.length; const list = linkedList(data, start, end, dim, false); if (list === list.next) list.steiner = true; queue.push(getLeftmost(list)); } queue.sort(compareXYSlope); // process holes from left to right for (let i = 0; i < queue.length; i++) { outerNode = eliminateHole(queue[i], outerNode); } return outerNode; } function compareXYSlope(a, b) { let result = a.x - b.x; // when the left-most point of 2 holes meet at a vertex, sort the holes counterclockwise so that when we find // the bridge to the outer shell is always the point that they meet at. if (result === 0) { result = a.y - b.y; if (result === 0) { const aSlope = (a.next.y - a.y) / (a.next.x - a.x); const bSlope = (b.next.y - b.y) / (b.next.x - b.x); result = aSlope - bSlope; } } return result; } // find a bridge between vertices that connects hole with an outer ring and and link it function eliminateHole(hole, outerNode) { const bridge = findHoleBridge(hole, outerNode); if (!bridge) { return outerNode; } const bridgeReverse = splitPolygon(bridge, hole); // filter collinear points around the cuts filterPoints(bridgeReverse, bridgeReverse.next); return filterPoints(bridge, bridge.next); } // David Eberly's algorithm for finding a bridge between hole and outer polygon function findHoleBridge(hole, outerNode) { let p = outerNode; const hx = hole.x; const hy = hole.y; let qx = -Infinity; let m; // find a segment intersected by a ray from the hole's leftmost point to the left; // segment's endpoint with lesser x will be potential connection point // unless they intersect at a vertex, then choose the vertex if (equals(hole, p)) return p; do { if (equals(hole, p.next)) return p.next; else if (hy <= p.y && hy >= p.next.y && p.next.y !== p.y) { const x = p.x + (hy - p.y) * (p.next.x - p.x) / (p.next.y - p.y); if (x <= hx && x > qx) { qx = x; m = p.x < p.next.x ? p : p.next; if (x === hx) return m; // hole touches outer segment; pick leftmost endpoint } } p = p.next; } while (p !== outerNode); if (!m) return null; // look for points inside the triangle of hole point, segment intersection and endpoint; // if there are no points found, we have a valid connection; // otherwise choose the point of the minimum angle with the ray as connection point const stop = m; const mx = m.x; const my = m.y; let tanMin = Infinity; p = m; do { if (hx >= p.x && p.x >= mx && hx !== p.x && pointInTriangle(hy < my ? hx : qx, hy, mx, my, hy < my ? qx : hx, hy, p.x, p.y)) { const tan = Math.abs(hy - p.y) / (hx - p.x); // tangential if (locallyInside(p, hole) && (tan < tanMin || (tan === tanMin && (p.x > m.x || (p.x === m.x && sectorContainsSector(m, p)))))) { m = p; tanMin = tan; } } p = p.next; } while (p !== stop); return m; } // whether sector in vertex m contains sector in vertex p in the same coordinates function sectorContainsSector(m, p) { return area(m.prev, m, p.prev) < 0 && area(p.next, m, m.next) < 0; } // interlink polygon nodes in z-order function indexCurve(start, minX, minY, invSize) { let p = start; do { if (p.z === 0) p.z = zOrder(p.x, p.y, minX, minY, invSize); p.prevZ = p.prev; p.nextZ = p.next; p = p.next; } while (p !== start); p.prevZ.nextZ = null; p.prevZ = null; sortLinked(p); } // Simon Tatham's linked list merge sort algorithm // http://www.chiark.greenend.org.uk/~sgtatham/algorithms/listsort.html function sortLinked(list) { let numMerges; let inSize = 1; do { let p = list; let e; list = null; let tail = null; numMerges = 0; while (p) { numMerges++; let q = p; let pSize = 0; for (let i = 0; i < inSize; i++) { pSize++; q = q.nextZ; if (!q) break; } let qSize = inSize; while (pSize > 0 || (qSize > 0 && q)) { if (pSize !== 0 && (qSize === 0 || !q || p.z <= q.z)) { e = p; p = p.nextZ; pSize--; } else { e = q; q = q.nextZ; qSize--; } if (tail) tail.nextZ = e; else list = e; e.prevZ = tail; tail = e; } p = q; } tail.nextZ = null; inSize *= 2; } while (numMerges > 1); return list; } // z-order of a point given coords and inverse of the longer side of data bbox function zOrder(x, y, minX, minY, invSize) { // coords are transformed into non-negative 15-bit integer range x = (x - minX) * invSize | 0; y = (y - minY) * invSize | 0; x = (x | (x << 8)) & 0x00FF00FF; x = (x | (x << 4)) & 0x0F0F0F0F; x = (x | (x << 2)) & 0x33333333; x = (x | (x << 1)) & 0x55555555; y = (y | (y << 8)) & 0x00FF00FF; y = (y | (y << 4)) & 0x0F0F0F0F; y = (y | (y << 2)) & 0x33333333; y = (y | (y << 1)) & 0x55555555; return x | (y << 1); } // find the leftmost node of a polygon ring function getLeftmost(start) { let p = start, leftmost = start; do { if (p.x < leftmost.x || (p.x === leftmost.x && p.y < leftmost.y)) leftmost = p; p = p.next; } while (p !== start); return leftmost; } // check if a point lies within a convex triangle function pointInTriangle(ax, ay, bx, by, cx, cy, px, py) { return (cx - px) * (ay - py) >= (ax - px) * (cy - py) && (ax - px) * (by - py) >= (bx - px) * (ay - py) && (bx - px) * (cy - py) >= (cx - px) * (by - py); } // check if a point lies within a convex triangle but false if its equal to the first point of the triangle function pointInTriangleExceptFirst(ax, ay, bx, by, cx, cy, px, py) { return !(ax === px && ay === py) && pointInTriangle(ax, ay, bx, by, cx, cy, px, py); } // check if a diagonal between two polygon nodes is valid (lies in polygon interior) function isValidDiagonal(a, b) { return a.next.i !== b.i && a.prev.i !== b.i && !intersectsPolygon(a, b) && // dones't intersect other edges (locallyInside(a, b) && locallyInside(b, a) && middleInside(a, b) && // locally visible (area(a.prev, a, b.prev) || area(a, b.prev, b)) || // does not create opposite-facing sectors equals(a, b) && area(a.prev, a, a.next) > 0 && area(b.prev, b, b.next) > 0); // special zero-length case } // signed area of a triangle function area(p, q, r) { return (q.y - p.y) * (r.x - q.x) - (q.x - p.x) * (r.y - q.y); } // check if two points are equal function equals(p1, p2) { return p1.x === p2.x && p1.y === p2.y; } // check if two segments intersect function intersects(p1, q1, p2, q2) { const o1 = sign(area(p1, q1, p2)); const o2 = sign(area(p1, q1, q2)); const o3 = sign(area(p2, q2, p1)); const o4 = sign(area(p2, q2, q1)); if (o1 !== o2 && o3 !== o4) return true; // general case if (o1 === 0 && onSegment(p1, p2, q1)) return true; // p1, q1 and p2 are collinear and p2 lies on p1q1 if (o2 === 0 && onSegment(p1, q2, q1)) return true; // p1, q1 and q2 are collinear and q2 lies on p1q1 if (o3 === 0 && onSegment(p2, p1, q2)) return true; // p2, q2 and p1 are collinear and p1 lies on p2q2 if (o4 === 0 && onSegment(p2, q1, q2)) return true; // p2, q2 and q1 are collinear and q1 lies on p2q2 return false; } // for collinear points p, q, r, check if point q lies on segment pr function onSegment(p, q, r) { return q.x <= Math.max(p.x, r.x) && q.x >= Math.min(p.x, r.x) && q.y <= Math.max(p.y, r.y) && q.y >= Math.min(p.y, r.y); } function sign(num) { return num > 0 ? 1 : num < 0 ? -1 : 0; } // check if a polygon diagonal intersects any polygon segments function intersectsPolygon(a, b) { let p = a; do { if (p.i !== a.i && p.next.i !== a.i && p.i !== b.i && p.next.i !== b.i && intersects(p, p.next, a, b)) return true; p = p.next; } while (p !== a); return false; } // check if a polygon diagonal is locally inside the polygon function locallyInside(a, b) { return area(a.prev, a, a.next) < 0 ? area(a, b, a.next) >= 0 && area(a, a.prev, b) >= 0 : area(a, b, a.prev) < 0 || area(a, a.next, b) < 0; } // check if the middle point of a polygon diagonal is inside the polygon function middleInside(a, b) { let p = a; let inside = false; const px = (a.x + b.x) / 2; const py = (a.y + b.y) / 2; do { if (((p.y > py) !== (p.next.y > py)) && p.next.y !== p.y && (px < (p.next.x - p.x) * (py - p.y) / (p.next.y - p.y) + p.x)) inside = !inside; p = p.next; } while (p !== a); return inside; } // link two polygon vertices with a bridge; if the vertices belong to the same ring, it splits polygon into two; // if one belongs to the outer ring and another to a hole, it merges it into a single ring function splitPolygon(a, b) { const a2 = createNode(a.i, a.x, a.y), b2 = createNode(b.i, b.x, b.y), an = a.next, bp = b.prev; a.next = b; b.prev = a; a2.next = an; an.prev = a2; b2.next = a2; a2.prev = b2; bp.next = b2; b2.prev = bp; return b2; } // create a node and optionally link it with previous one (in a circular doubly linked list) function insertNode(i, x, y, last) { const p = createNode(i, x, y); if (!last) { p.prev = p; p.next = p; } else { p.next = last.next; p.prev = last; last.next.prev = p; last.next = p; } return p; } function removeNode(p) { p.next.prev = p.prev; p.prev.next = p.next; if (p.prevZ) p.prevZ.nextZ = p.nextZ; if (p.nextZ) p.nextZ.prevZ = p.prevZ; } function createNode(i, x, y) { return { i, // vertex index in coordinates array x, y, // vertex coordinates prev: null, // previous and next vertex nodes in a polygon ring next: null, z: 0, // z-order curve value prevZ: null, // previous and next nodes in z-order nextZ: null, steiner: false // indicates whether this is a steiner point }; } function signedArea(data, start, end, dim) { let sum = 0; for (let i = start, j = end - dim; i < end; i += dim) { sum += (data[j] - data[i]) * (data[i + 1] + data[j + 1]); j = i; } return sum; } class Earcut { /** * Triangulates the given shape definition by returning an array of triangles. * * @param {Array} data - An array with 2D points. * @param {Array} holeIndices - An array with indices defining holes. * @param {number} [dim=2] - The number of coordinates per vertex in the input array. * @return {Array} An array representing the triangulated faces. Each face is defined by three consecutive numbers * representing vertex indices. */ static triangulate( data, holeIndices, dim = 2 ) { return earcut( data, holeIndices, dim ); } } /** * A class containing utility functions for shapes. * * @hideconstructor */ class ShapeUtils { /** * Calculate area of a ( 2D ) contour polygon. * * @param {Array} contour - An array of 2D points. * @return {number} The area. */ static area( contour ) { const n = contour.length; let a = 0.0; for ( let p = n - 1, q = 0; q < n; p = q ++ ) { a += contour[ p ].x * contour[ q ].y - contour[ q ].x * contour[ p ].y; } return a * 0.5; } /** * Returns `true` if the given contour uses a clockwise winding order. * * @param {Array} pts - An array of 2D points defining a polygon. * @return {boolean} Whether the given contour uses a clockwise winding order or not. */ static isClockWise( pts ) { return ShapeUtils.area( pts ) < 0; } /** * Triangulates the given shape definition. * * @param {Array} contour - An array of 2D points defining the contour. * @param {Array>} holes - An array that holds arrays of 2D points defining the holes. * @return {Array>} An array that holds for each face definition an array with three indices. */ static triangulateShape( contour, holes ) { const vertices = []; // flat array of vertices like [ x0,y0, x1,y1, x2,y2, ... ] const holeIndices = []; // array of hole indices const faces = []; // final array of vertex indices like [ [ a,b,d ], [ b,c,d ] ] removeDupEndPts( contour ); addContour( vertices, contour ); // let holeIndex = contour.length; holes.forEach( removeDupEndPts ); for ( let i = 0; i < holes.length; i ++ ) { holeIndices.push( holeIndex ); holeIndex += holes[ i ].length; addContour( vertices, holes[ i ] ); } // const triangles = Earcut.triangulate( vertices, holeIndices ); // for ( let i = 0; i < triangles.length; i += 3 ) { faces.push( triangles.slice( i, i + 3 ) ); } return faces; } } function removeDupEndPts( points ) { const l = points.length; if ( l > 2 && points[ l - 1 ].equals( points[ 0 ] ) ) { points.pop(); } } function addContour( vertices, contour ) { for ( let i = 0; i < contour.length; i ++ ) { vertices.push( contour[ i ].x ); vertices.push( contour[ i ].y ); } } /** * Creates extruded geometry from a path shape. * * ```js * const length = 12, width = 8; * * const shape = new THREE.Shape(); * shape.moveTo( 0,0 ); * shape.lineTo( 0, width ); * shape.lineTo( length, width ); * shape.lineTo( length, 0 ); * shape.lineTo( 0, 0 ); * * const geometry = new THREE.ExtrudeGeometry( shape ); * const material = new THREE.MeshBasicMaterial( { color: 0x00ff00 } ); * const mesh = new THREE.Mesh( geometry, material ) ; * scene.add( mesh ); * ``` * * @augments BufferGeometry */ class ExtrudeGeometry extends BufferGeometry { /** * Constructs a new extrude geometry. * * @param {Shape|Array} [shapes] - A shape or an array of shapes. * @param {ExtrudeGeometry~Options} [options] - The extrude settings. */ constructor( shapes = new Shape( [ new Vector2( 0.5, 0.5 ), new Vector2( -0.5, 0.5 ), new Vector2( -0.5, -0.5 ), new Vector2( 0.5, -0.5 ) ] ), options = {} ) { super(); this.type = 'ExtrudeGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { shapes: shapes, options: options }; shapes = Array.isArray( shapes ) ? shapes : [ shapes ]; const scope = this; const verticesArray = []; const uvArray = []; for ( let i = 0, l = shapes.length; i < l; i ++ ) { const shape = shapes[ i ]; addShape( shape ); } // build geometry this.setAttribute( 'position', new Float32BufferAttribute( verticesArray, 3 ) ); this.setAttribute( 'uv', new Float32BufferAttribute( uvArray, 2 ) ); this.computeVertexNormals(); // functions function addShape( shape ) { const placeholder = []; // options const curveSegments = options.curveSegments !== undefined ? options.curveSegments : 12; const steps = options.steps !== undefined ? options.steps : 1; const depth = options.depth !== undefined ? options.depth : 1; let bevelEnabled = options.bevelEnabled !== undefined ? options.bevelEnabled : true; let bevelThickness = options.bevelThickness !== undefined ? options.bevelThickness : 0.2; let bevelSize = options.bevelSize !== undefined ? options.bevelSize : bevelThickness - 0.1; let bevelOffset = options.bevelOffset !== undefined ? options.bevelOffset : 0; let bevelSegments = options.bevelSegments !== undefined ? options.bevelSegments : 3; const extrudePath = options.extrudePath; const uvgen = options.UVGenerator !== undefined ? options.UVGenerator : WorldUVGenerator; // let extrudePts, extrudeByPath = false; let splineTube, binormal, normal, position2; if ( extrudePath ) { extrudePts = extrudePath.getSpacedPoints( steps ); extrudeByPath = true; bevelEnabled = false; // bevels not supported for path extrusion // SETUP TNB variables // TODO1 - have a .isClosed in spline? splineTube = extrudePath.computeFrenetFrames( steps, false ); // console.log(splineTube, 'splineTube', splineTube.normals.length, 'steps', steps, 'extrudePts', extrudePts.length); binormal = new Vector3(); normal = new Vector3(); position2 = new Vector3(); } // Safeguards if bevels are not enabled if ( ! bevelEnabled ) { bevelSegments = 0; bevelThickness = 0; bevelSize = 0; bevelOffset = 0; } // Variables initialization const shapePoints = shape.extractPoints( curveSegments ); let vertices = shapePoints.shape; const holes = shapePoints.holes; const reverse = ! ShapeUtils.isClockWise( vertices ); if ( reverse ) { vertices = vertices.reverse(); // Maybe we should also check if holes are in the opposite direction, just to be safe ... for ( let h = 0, hl = holes.length; h < hl; h ++ ) { const ahole = holes[ h ]; if ( ShapeUtils.isClockWise( ahole ) ) { holes[ h ] = ahole.reverse(); } } } /**Merges index-adjacent points that are within a threshold distance of each other. Array is modified in-place. Threshold distance is empirical, and scaled based on the magnitude of point coordinates. * @param {Array} points */ function mergeOverlappingPoints( points ) { const THRESHOLD = 1e-10; const THRESHOLD_SQ = THRESHOLD * THRESHOLD; let prevPos = points[ 0 ]; for ( let i = 1; i <= points.length; i ++ ) { const currentIndex = i % points.length; const currentPos = points[ currentIndex ]; const dx = currentPos.x - prevPos.x; const dy = currentPos.y - prevPos.y; const distSq = dx * dx + dy * dy; const scalingFactorSqrt = Math.max( Math.abs( currentPos.x ), Math.abs( currentPos.y ), Math.abs( prevPos.x ), Math.abs( prevPos.y ) ); const thesholdSqScaled = THRESHOLD_SQ * scalingFactorSqrt * scalingFactorSqrt; if ( distSq <= thesholdSqScaled ) { points.splice( currentIndex, 1 ); i --; continue; } prevPos = currentPos; } } mergeOverlappingPoints( vertices ); holes.forEach( mergeOverlappingPoints ); const numHoles = holes.length; /* Vertices */ const contour = vertices; // vertices has all points but contour has only points of circumference for ( let h = 0; h < numHoles; h ++ ) { const ahole = holes[ h ]; vertices = vertices.concat( ahole ); } function scalePt2( pt, vec, size ) { if ( ! vec ) console.error( 'THREE.ExtrudeGeometry: vec does not exist' ); return pt.clone().addScaledVector( vec, size ); } const vlen = vertices.length; // Find directions for point movement function getBevelVec( inPt, inPrev, inNext ) { // computes for inPt the corresponding point inPt' on a new contour // shifted by 1 unit (length of normalized vector) to the left // if we walk along contour clockwise, this new contour is outside the old one // // inPt' is the intersection of the two lines parallel to the two // adjacent edges of inPt at a distance of 1 unit on the left side. let v_trans_x, v_trans_y, shrink_by; // resulting translation vector for inPt // good reading for geometry algorithms (here: line-line intersection) // http://geomalgorithms.com/a05-_intersect-1.html const v_prev_x = inPt.x - inPrev.x, v_prev_y = inPt.y - inPrev.y; const v_next_x = inNext.x - inPt.x, v_next_y = inNext.y - inPt.y; const v_prev_lensq = ( v_prev_x * v_prev_x + v_prev_y * v_prev_y ); // check for collinear edges const collinear0 = ( v_prev_x * v_next_y - v_prev_y * v_next_x ); if ( Math.abs( collinear0 ) > Number.EPSILON ) { // not collinear // length of vectors for normalizing const v_prev_len = Math.sqrt( v_prev_lensq ); const v_next_len = Math.sqrt( v_next_x * v_next_x + v_next_y * v_next_y ); // shift adjacent points by unit vectors to the left const ptPrevShift_x = ( inPrev.x - v_prev_y / v_prev_len ); const ptPrevShift_y = ( inPrev.y + v_prev_x / v_prev_len ); const ptNextShift_x = ( inNext.x - v_next_y / v_next_len ); const ptNextShift_y = ( inNext.y + v_next_x / v_next_len ); // scaling factor for v_prev to intersection point const sf = ( ( ptNextShift_x - ptPrevShift_x ) * v_next_y - ( ptNextShift_y - ptPrevShift_y ) * v_next_x ) / ( v_prev_x * v_next_y - v_prev_y * v_next_x ); // vector from inPt to intersection point v_trans_x = ( ptPrevShift_x + v_prev_x * sf - inPt.x ); v_trans_y = ( ptPrevShift_y + v_prev_y * sf - inPt.y ); // Don't normalize!, otherwise sharp corners become ugly // but prevent crazy spikes const v_trans_lensq = ( v_trans_x * v_trans_x + v_trans_y * v_trans_y ); if ( v_trans_lensq <= 2 ) { return new Vector2( v_trans_x, v_trans_y ); } else { shrink_by = Math.sqrt( v_trans_lensq / 2 ); } } else { // handle special case of collinear edges let direction_eq = false; // assumes: opposite if ( v_prev_x > Number.EPSILON ) { if ( v_next_x > Number.EPSILON ) { direction_eq = true; } } else { if ( v_prev_x < - Number.EPSILON ) { if ( v_next_x < - Number.EPSILON ) { direction_eq = true; } } else { if ( Math.sign( v_prev_y ) === Math.sign( v_next_y ) ) { direction_eq = true; } } } if ( direction_eq ) { // console.log("Warning: lines are a straight sequence"); v_trans_x = - v_prev_y; v_trans_y = v_prev_x; shrink_by = Math.sqrt( v_prev_lensq ); } else { // console.log("Warning: lines are a straight spike"); v_trans_x = v_prev_x; v_trans_y = v_prev_y; shrink_by = Math.sqrt( v_prev_lensq / 2 ); } } return new Vector2( v_trans_x / shrink_by, v_trans_y / shrink_by ); } const contourMovements = []; for ( let i = 0, il = contour.length, j = il - 1, k = i + 1; i < il; i ++, j ++, k ++ ) { if ( j === il ) j = 0; if ( k === il ) k = 0; // (j)---(i)---(k) // console.log('i,j,k', i, j , k) contourMovements[ i ] = getBevelVec( contour[ i ], contour[ j ], contour[ k ] ); } const holesMovements = []; let oneHoleMovements, verticesMovements = contourMovements.concat(); for ( let h = 0, hl = numHoles; h < hl; h ++ ) { const ahole = holes[ h ]; oneHoleMovements = []; for ( let i = 0, il = ahole.length, j = il - 1, k = i + 1; i < il; i ++, j ++, k ++ ) { if ( j === il ) j = 0; if ( k === il ) k = 0; // (j)---(i)---(k) oneHoleMovements[ i ] = getBevelVec( ahole[ i ], ahole[ j ], ahole[ k ] ); } holesMovements.push( oneHoleMovements ); verticesMovements = verticesMovements.concat( oneHoleMovements ); } const contractedContourVertices = []; const expandedHoleVertices = []; // Loop bevelSegments, 1 for the front, 1 for the back for ( let b = 0; b < bevelSegments; b ++ ) { //for ( b = bevelSegments; b > 0; b -- ) { const t = b / bevelSegments; const z = bevelThickness * Math.cos( t * Math.PI / 2 ); const bs = bevelSize * Math.sin( t * Math.PI / 2 ) + bevelOffset; // contract shape for ( let i = 0, il = contour.length; i < il; i ++ ) { const vert = scalePt2( contour[ i ], contourMovements[ i ], bs ); v( vert.x, vert.y, - z ); if ( t == 0 ) contractedContourVertices.push( vert ); } // expand holes for ( let h = 0, hl = numHoles; h < hl; h ++ ) { const ahole = holes[ h ]; oneHoleMovements = holesMovements[ h ]; const oneHoleVertices = []; for ( let i = 0, il = ahole.length; i < il; i ++ ) { const vert = scalePt2( ahole[ i ], oneHoleMovements[ i ], bs ); v( vert.x, vert.y, - z ); if ( t == 0 ) oneHoleVertices.push( vert ); } if ( t == 0 ) expandedHoleVertices.push( oneHoleVertices ); } } const faces = ShapeUtils.triangulateShape( contractedContourVertices, expandedHoleVertices ); const flen = faces.length; const bs = bevelSize + bevelOffset; // Back facing vertices for ( let i = 0; i < vlen; i ++ ) { const vert = bevelEnabled ? scalePt2( vertices[ i ], verticesMovements[ i ], bs ) : vertices[ i ]; if ( ! extrudeByPath ) { v( vert.x, vert.y, 0 ); } else { // v( vert.x, vert.y + extrudePts[ 0 ].y, extrudePts[ 0 ].x ); normal.copy( splineTube.normals[ 0 ] ).multiplyScalar( vert.x ); binormal.copy( splineTube.binormals[ 0 ] ).multiplyScalar( vert.y ); position2.copy( extrudePts[ 0 ] ).add( normal ).add( binormal ); v( position2.x, position2.y, position2.z ); } } // Add stepped vertices... // Including front facing vertices for ( let s = 1; s <= steps; s ++ ) { for ( let i = 0; i < vlen; i ++ ) { const vert = bevelEnabled ? scalePt2( vertices[ i ], verticesMovements[ i ], bs ) : vertices[ i ]; if ( ! extrudeByPath ) { v( vert.x, vert.y, depth / steps * s ); } else { // v( vert.x, vert.y + extrudePts[ s - 1 ].y, extrudePts[ s - 1 ].x ); normal.copy( splineTube.normals[ s ] ).multiplyScalar( vert.x ); binormal.copy( splineTube.binormals[ s ] ).multiplyScalar( vert.y ); position2.copy( extrudePts[ s ] ).add( normal ).add( binormal ); v( position2.x, position2.y, position2.z ); } } } // Add bevel segments planes //for ( b = 1; b <= bevelSegments; b ++ ) { for ( let b = bevelSegments - 1; b >= 0; b -- ) { const t = b / bevelSegments; const z = bevelThickness * Math.cos( t * Math.PI / 2 ); const bs = bevelSize * Math.sin( t * Math.PI / 2 ) + bevelOffset; // contract shape for ( let i = 0, il = contour.length; i < il; i ++ ) { const vert = scalePt2( contour[ i ], contourMovements[ i ], bs ); v( vert.x, vert.y, depth + z ); } // expand holes for ( let h = 0, hl = holes.length; h < hl; h ++ ) { const ahole = holes[ h ]; oneHoleMovements = holesMovements[ h ]; for ( let i = 0, il = ahole.length; i < il; i ++ ) { const vert = scalePt2( ahole[ i ], oneHoleMovements[ i ], bs ); if ( ! extrudeByPath ) { v( vert.x, vert.y, depth + z ); } else { v( vert.x, vert.y + extrudePts[ steps - 1 ].y, extrudePts[ steps - 1 ].x + z ); } } } } /* Faces */ // Top and bottom faces buildLidFaces(); // Sides faces buildSideFaces(); ///// Internal functions function buildLidFaces() { const start = verticesArray.length / 3; if ( bevelEnabled ) { let layer = 0; // steps + 1 let offset = vlen * layer; // Bottom faces for ( let i = 0; i < flen; i ++ ) { const face = faces[ i ]; f3( face[ 2 ] + offset, face[ 1 ] + offset, face[ 0 ] + offset ); } layer = steps + bevelSegments * 2; offset = vlen * layer; // Top faces for ( let i = 0; i < flen; i ++ ) { const face = faces[ i ]; f3( face[ 0 ] + offset, face[ 1 ] + offset, face[ 2 ] + offset ); } } else { // Bottom faces for ( let i = 0; i < flen; i ++ ) { const face = faces[ i ]; f3( face[ 2 ], face[ 1 ], face[ 0 ] ); } // Top faces for ( let i = 0; i < flen; i ++ ) { const face = faces[ i ]; f3( face[ 0 ] + vlen * steps, face[ 1 ] + vlen * steps, face[ 2 ] + vlen * steps ); } } scope.addGroup( start, verticesArray.length / 3 - start, 0 ); } // Create faces for the z-sides of the shape function buildSideFaces() { const start = verticesArray.length / 3; let layeroffset = 0; sidewalls( contour, layeroffset ); layeroffset += contour.length; for ( let h = 0, hl = holes.length; h < hl; h ++ ) { const ahole = holes[ h ]; sidewalls( ahole, layeroffset ); //, true layeroffset += ahole.length; } scope.addGroup( start, verticesArray.length / 3 - start, 1 ); } function sidewalls( contour, layeroffset ) { let i = contour.length; while ( -- i >= 0 ) { const j = i; let k = i - 1; if ( k < 0 ) k = contour.length - 1; //console.log('b', i,j, i-1, k,vertices.length); for ( let s = 0, sl = ( steps + bevelSegments * 2 ); s < sl; s ++ ) { const slen1 = vlen * s; const slen2 = vlen * ( s + 1 ); const a = layeroffset + j + slen1, b = layeroffset + k + slen1, c = layeroffset + k + slen2, d = layeroffset + j + slen2; f4( a, b, c, d ); } } } function v( x, y, z ) { placeholder.push( x ); placeholder.push( y ); placeholder.push( z ); } function f3( a, b, c ) { addVertex( a ); addVertex( b ); addVertex( c ); const nextIndex = verticesArray.length / 3; const uvs = uvgen.generateTopUV( scope, verticesArray, nextIndex - 3, nextIndex - 2, nextIndex - 1 ); addUV( uvs[ 0 ] ); addUV( uvs[ 1 ] ); addUV( uvs[ 2 ] ); } function f4( a, b, c, d ) { addVertex( a ); addVertex( b ); addVertex( d ); addVertex( b ); addVertex( c ); addVertex( d ); const nextIndex = verticesArray.length / 3; const uvs = uvgen.generateSideWallUV( scope, verticesArray, nextIndex - 6, nextIndex - 3, nextIndex - 2, nextIndex - 1 ); addUV( uvs[ 0 ] ); addUV( uvs[ 1 ] ); addUV( uvs[ 3 ] ); addUV( uvs[ 1 ] ); addUV( uvs[ 2 ] ); addUV( uvs[ 3 ] ); } function addVertex( index ) { verticesArray.push( placeholder[ index * 3 + 0 ] ); verticesArray.push( placeholder[ index * 3 + 1 ] ); verticesArray.push( placeholder[ index * 3 + 2 ] ); } function addUV( vector2 ) { uvArray.push( vector2.x ); uvArray.push( vector2.y ); } } } copy( source ) { super.copy( source ); this.parameters = Object.assign( {}, source.parameters ); return this; } toJSON() { const data = super.toJSON(); const shapes = this.parameters.shapes; const options = this.parameters.options; return toJSON$1( shapes, options, data ); } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @param {Array} shapes - An array of shapes. * @return {ExtrudeGeometry} A new instance. */ static fromJSON( data, shapes ) { const geometryShapes = []; for ( let j = 0, jl = data.shapes.length; j < jl; j ++ ) { const shape = shapes[ data.shapes[ j ] ]; geometryShapes.push( shape ); } const extrudePath = data.options.extrudePath; if ( extrudePath !== undefined ) { data.options.extrudePath = new Curves[ extrudePath.type ]().fromJSON( extrudePath ); } return new ExtrudeGeometry( geometryShapes, data.options ); } } const WorldUVGenerator = { generateTopUV: function ( geometry, vertices, indexA, indexB, indexC ) { const a_x = vertices[ indexA * 3 ]; const a_y = vertices[ indexA * 3 + 1 ]; const b_x = vertices[ indexB * 3 ]; const b_y = vertices[ indexB * 3 + 1 ]; const c_x = vertices[ indexC * 3 ]; const c_y = vertices[ indexC * 3 + 1 ]; return [ new Vector2( a_x, a_y ), new Vector2( b_x, b_y ), new Vector2( c_x, c_y ) ]; }, generateSideWallUV: function ( geometry, vertices, indexA, indexB, indexC, indexD ) { const a_x = vertices[ indexA * 3 ]; const a_y = vertices[ indexA * 3 + 1 ]; const a_z = vertices[ indexA * 3 + 2 ]; const b_x = vertices[ indexB * 3 ]; const b_y = vertices[ indexB * 3 + 1 ]; const b_z = vertices[ indexB * 3 + 2 ]; const c_x = vertices[ indexC * 3 ]; const c_y = vertices[ indexC * 3 + 1 ]; const c_z = vertices[ indexC * 3 + 2 ]; const d_x = vertices[ indexD * 3 ]; const d_y = vertices[ indexD * 3 + 1 ]; const d_z = vertices[ indexD * 3 + 2 ]; if ( Math.abs( a_y - b_y ) < Math.abs( a_x - b_x ) ) { return [ new Vector2( a_x, 1 - a_z ), new Vector2( b_x, 1 - b_z ), new Vector2( c_x, 1 - c_z ), new Vector2( d_x, 1 - d_z ) ]; } else { return [ new Vector2( a_y, 1 - a_z ), new Vector2( b_y, 1 - b_z ), new Vector2( c_y, 1 - c_z ), new Vector2( d_y, 1 - d_z ) ]; } } }; function toJSON$1( shapes, options, data ) { data.shapes = []; if ( Array.isArray( shapes ) ) { for ( let i = 0, l = shapes.length; i < l; i ++ ) { const shape = shapes[ i ]; data.shapes.push( shape.uuid ); } } else { data.shapes.push( shapes.uuid ); } data.options = Object.assign( {}, options ); if ( options.extrudePath !== undefined ) data.options.extrudePath = options.extrudePath.toJSON(); return data; } /** * A geometry class for representing an icosahedron. * * ```js * const geometry = new THREE.IcosahedronGeometry(); * const material = new THREE.MeshBasicMaterial( { color: 0xffff00 } ); * const icosahedron = new THREE.Mesh( geometry, material ); * scene.add( icosahedron ); * ``` * * @augments PolyhedronGeometry */ class IcosahedronGeometry extends PolyhedronGeometry { /** * Constructs a new icosahedron geometry. * * @param {number} [radius=1] - Radius of the icosahedron. * @param {number} [detail=0] - Setting this to a value greater than `0` adds vertices making it no longer a icosahedron. */ constructor( radius = 1, detail = 0 ) { const t = ( 1 + Math.sqrt( 5 ) ) / 2; const vertices = [ -1, t, 0, 1, t, 0, -1, - t, 0, 1, - t, 0, 0, -1, t, 0, 1, t, 0, -1, - t, 0, 1, - t, t, 0, -1, t, 0, 1, - t, 0, -1, - t, 0, 1 ]; const indices = [ 0, 11, 5, 0, 5, 1, 0, 1, 7, 0, 7, 10, 0, 10, 11, 1, 5, 9, 5, 11, 4, 11, 10, 2, 10, 7, 6, 7, 1, 8, 3, 9, 4, 3, 4, 2, 3, 2, 6, 3, 6, 8, 3, 8, 9, 4, 9, 5, 2, 4, 11, 6, 2, 10, 8, 6, 7, 9, 8, 1 ]; super( vertices, indices, radius, detail ); this.type = 'IcosahedronGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { radius: radius, detail: detail }; } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @return {IcosahedronGeometry} A new instance. */ static fromJSON( data ) { return new IcosahedronGeometry( data.radius, data.detail ); } } /** * A geometry class for representing an octahedron. * * ```js * const geometry = new THREE.OctahedronGeometry(); * const material = new THREE.MeshBasicMaterial( { color: 0xffff00 } ); * const octahedron = new THREE.Mesh( geometry, material ); * scene.add( octahedron ); * ``` * * @augments PolyhedronGeometry */ class OctahedronGeometry extends PolyhedronGeometry { /** * Constructs a new octahedron geometry. * * @param {number} [radius=1] - Radius of the octahedron. * @param {number} [detail=0] - Setting this to a value greater than `0` adds vertices making it no longer a octahedron. */ constructor( radius = 1, detail = 0 ) { const vertices = [ 1, 0, 0, -1, 0, 0, 0, 1, 0, 0, -1, 0, 0, 0, 1, 0, 0, -1 ]; const indices = [ 0, 2, 4, 0, 4, 3, 0, 3, 5, 0, 5, 2, 1, 2, 5, 1, 5, 3, 1, 3, 4, 1, 4, 2 ]; super( vertices, indices, radius, detail ); this.type = 'OctahedronGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { radius: radius, detail: detail }; } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @return {OctahedronGeometry} A new instance. */ static fromJSON( data ) { return new OctahedronGeometry( data.radius, data.detail ); } } /** * A geometry class for representing a plane. * * ```js * const geometry = new THREE.PlaneGeometry( 1, 1 ); * const material = new THREE.MeshBasicMaterial( { color: 0xffff00, side: THREE.DoubleSide } ); * const plane = new THREE.Mesh( geometry, material ); * scene.add( plane ); * ``` * * @augments BufferGeometry */ class PlaneGeometry extends BufferGeometry { /** * Constructs a new plane geometry. * * @param {number} [width=1] - The width along the X axis. * @param {number} [height=1] - The height along the Y axis * @param {number} [widthSegments=1] - The number of segments along the X axis. * @param {number} [heightSegments=1] - The number of segments along the Y axis. */ constructor( width = 1, height = 1, widthSegments = 1, heightSegments = 1 ) { super(); this.type = 'PlaneGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { width: width, height: height, widthSegments: widthSegments, heightSegments: heightSegments }; const width_half = width / 2; const height_half = height / 2; const gridX = Math.floor( widthSegments ); const gridY = Math.floor( heightSegments ); const gridX1 = gridX + 1; const gridY1 = gridY + 1; const segment_width = width / gridX; const segment_height = height / gridY; // const indices = []; const vertices = []; const normals = []; const uvs = []; for ( let iy = 0; iy < gridY1; iy ++ ) { const y = iy * segment_height - height_half; for ( let ix = 0; ix < gridX1; ix ++ ) { const x = ix * segment_width - width_half; vertices.push( x, - y, 0 ); normals.push( 0, 0, 1 ); uvs.push( ix / gridX ); uvs.push( 1 - ( iy / gridY ) ); } } for ( let iy = 0; iy < gridY; iy ++ ) { for ( let ix = 0; ix < gridX; ix ++ ) { const a = ix + gridX1 * iy; const b = ix + gridX1 * ( iy + 1 ); const c = ( ix + 1 ) + gridX1 * ( iy + 1 ); const d = ( ix + 1 ) + gridX1 * iy; indices.push( a, b, d ); indices.push( b, c, d ); } } this.setIndex( indices ); this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) ); this.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) ); this.setAttribute( 'uv', new Float32BufferAttribute( uvs, 2 ) ); } copy( source ) { super.copy( source ); this.parameters = Object.assign( {}, source.parameters ); return this; } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @return {PlaneGeometry} A new instance. */ static fromJSON( data ) { return new PlaneGeometry( data.width, data.height, data.widthSegments, data.heightSegments ); } } /** * A class for generating a two-dimensional ring geometry. * * ```js * const geometry = new THREE.RingGeometry( 1, 5, 32 ); * const material = new THREE.MeshBasicMaterial( { color: 0xffff00, side: THREE.DoubleSide } ); * const mesh = new THREE.Mesh( geometry, material ); * scene.add( mesh ); * ``` * * @augments BufferGeometry */ class RingGeometry extends BufferGeometry { /** * Constructs a new ring geometry. * * @param {number} [innerRadius=0.5] - The inner radius of the ring. * @param {number} [outerRadius=1] - The outer radius of the ring. * @param {number} [thetaSegments=32] - Number of segments. A higher number means the ring will be more round. Minimum is `3`. * @param {number} [phiSegments=1] - Number of segments per ring segment. Minimum is `1`. * @param {number} [thetaStart=0] - Starting angle in radians. * @param {number} [thetaLength=Math.PI*2] - Central angle in radians. */ constructor( innerRadius = 0.5, outerRadius = 1, thetaSegments = 32, phiSegments = 1, thetaStart = 0, thetaLength = Math.PI * 2 ) { super(); this.type = 'RingGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { innerRadius: innerRadius, outerRadius: outerRadius, thetaSegments: thetaSegments, phiSegments: phiSegments, thetaStart: thetaStart, thetaLength: thetaLength }; thetaSegments = Math.max( 3, thetaSegments ); phiSegments = Math.max( 1, phiSegments ); // buffers const indices = []; const vertices = []; const normals = []; const uvs = []; // some helper variables let radius = innerRadius; const radiusStep = ( ( outerRadius - innerRadius ) / phiSegments ); const vertex = new Vector3(); const uv = new Vector2(); // generate vertices, normals and uvs for ( let j = 0; j <= phiSegments; j ++ ) { for ( let i = 0; i <= thetaSegments; i ++ ) { // values are generate from the inside of the ring to the outside const segment = thetaStart + i / thetaSegments * thetaLength; // vertex vertex.x = radius * Math.cos( segment ); vertex.y = radius * Math.sin( segment ); vertices.push( vertex.x, vertex.y, vertex.z ); // normal normals.push( 0, 0, 1 ); // uv uv.x = ( vertex.x / outerRadius + 1 ) / 2; uv.y = ( vertex.y / outerRadius + 1 ) / 2; uvs.push( uv.x, uv.y ); } // increase the radius for next row of vertices radius += radiusStep; } // indices for ( let j = 0; j < phiSegments; j ++ ) { const thetaSegmentLevel = j * ( thetaSegments + 1 ); for ( let i = 0; i < thetaSegments; i ++ ) { const segment = i + thetaSegmentLevel; const a = segment; const b = segment + thetaSegments + 1; const c = segment + thetaSegments + 2; const d = segment + 1; // faces indices.push( a, b, d ); indices.push( b, c, d ); } } // build geometry this.setIndex( indices ); this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) ); this.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) ); this.setAttribute( 'uv', new Float32BufferAttribute( uvs, 2 ) ); } copy( source ) { super.copy( source ); this.parameters = Object.assign( {}, source.parameters ); return this; } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @return {RingGeometry} A new instance. */ static fromJSON( data ) { return new RingGeometry( data.innerRadius, data.outerRadius, data.thetaSegments, data.phiSegments, data.thetaStart, data.thetaLength ); } } /** * Creates an one-sided polygonal geometry from one or more path shapes. * * ```js * const arcShape = new THREE.Shape() * .moveTo( 5, 1 ) * .absarc( 1, 1, 4, 0, Math.PI * 2, false ); * * const geometry = new THREE.ShapeGeometry( arcShape ); * const material = new THREE.MeshBasicMaterial( { color: 0x00ff00, side: THREE.DoubleSide } ); * const mesh = new THREE.Mesh( geometry, material ) ; * scene.add( mesh ); * ``` * * @augments BufferGeometry */ class ShapeGeometry extends BufferGeometry { /** * Constructs a new shape geometry. * * @param {Shape|Array} [shapes] - A shape or an array of shapes. * @param {number} [curveSegments=12] - Number of segments per shape. */ constructor( shapes = new Shape( [ new Vector2( 0, 0.5 ), new Vector2( -0.5, -0.5 ), new Vector2( 0.5, -0.5 ) ] ), curveSegments = 12 ) { super(); this.type = 'ShapeGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { shapes: shapes, curveSegments: curveSegments }; // buffers const indices = []; const vertices = []; const normals = []; const uvs = []; // helper variables let groupStart = 0; let groupCount = 0; // allow single and array values for "shapes" parameter if ( Array.isArray( shapes ) === false ) { addShape( shapes ); } else { for ( let i = 0; i < shapes.length; i ++ ) { addShape( shapes[ i ] ); this.addGroup( groupStart, groupCount, i ); // enables MultiMaterial support groupStart += groupCount; groupCount = 0; } } // build geometry this.setIndex( indices ); this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) ); this.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) ); this.setAttribute( 'uv', new Float32BufferAttribute( uvs, 2 ) ); // helper functions function addShape( shape ) { const indexOffset = vertices.length / 3; const points = shape.extractPoints( curveSegments ); let shapeVertices = points.shape; const shapeHoles = points.holes; // check direction of vertices if ( ShapeUtils.isClockWise( shapeVertices ) === false ) { shapeVertices = shapeVertices.reverse(); } for ( let i = 0, l = shapeHoles.length; i < l; i ++ ) { const shapeHole = shapeHoles[ i ]; if ( ShapeUtils.isClockWise( shapeHole ) === true ) { shapeHoles[ i ] = shapeHole.reverse(); } } const faces = ShapeUtils.triangulateShape( shapeVertices, shapeHoles ); // join vertices of inner and outer paths to a single array for ( let i = 0, l = shapeHoles.length; i < l; i ++ ) { const shapeHole = shapeHoles[ i ]; shapeVertices = shapeVertices.concat( shapeHole ); } // vertices, normals, uvs for ( let i = 0, l = shapeVertices.length; i < l; i ++ ) { const vertex = shapeVertices[ i ]; vertices.push( vertex.x, vertex.y, 0 ); normals.push( 0, 0, 1 ); uvs.push( vertex.x, vertex.y ); // world uvs } // indices for ( let i = 0, l = faces.length; i < l; i ++ ) { const face = faces[ i ]; const a = face[ 0 ] + indexOffset; const b = face[ 1 ] + indexOffset; const c = face[ 2 ] + indexOffset; indices.push( a, b, c ); groupCount += 3; } } } copy( source ) { super.copy( source ); this.parameters = Object.assign( {}, source.parameters ); return this; } toJSON() { const data = super.toJSON(); const shapes = this.parameters.shapes; return toJSON( shapes, data ); } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @param {Array} shapes - An array of shapes. * @return {ShapeGeometry} A new instance. */ static fromJSON( data, shapes ) { const geometryShapes = []; for ( let j = 0, jl = data.shapes.length; j < jl; j ++ ) { const shape = shapes[ data.shapes[ j ] ]; geometryShapes.push( shape ); } return new ShapeGeometry( geometryShapes, data.curveSegments ); } } function toJSON( shapes, data ) { data.shapes = []; if ( Array.isArray( shapes ) ) { for ( let i = 0, l = shapes.length; i < l; i ++ ) { const shape = shapes[ i ]; data.shapes.push( shape.uuid ); } } else { data.shapes.push( shapes.uuid ); } return data; } /** * A class for generating a sphere geometry. * * ```js * const geometry = new THREE.SphereGeometry( 15, 32, 16 ); * const material = new THREE.MeshBasicMaterial( { color: 0xffff00 } ); * const sphere = new THREE.Mesh( geometry, material ); * scene.add( sphere ); * ``` * * @augments BufferGeometry */ class SphereGeometry extends BufferGeometry { /** * Constructs a new sphere geometry. * * @param {number} [radius=1] - The sphere radius. * @param {number} [widthSegments=32] - The number of horizontal segments. Minimum value is `3`. * @param {number} [heightSegments=16] - The number of vertical segments. Minimum value is `2`. * @param {number} [phiStart=0] - The horizontal starting angle in radians. * @param {number} [phiLength=Math.PI*2] - The horizontal sweep angle size. * @param {number} [thetaStart=0] - The vertical starting angle in radians. * @param {number} [thetaLength=Math.PI] - The vertical sweep angle size. */ constructor( radius = 1, widthSegments = 32, heightSegments = 16, phiStart = 0, phiLength = Math.PI * 2, thetaStart = 0, thetaLength = Math.PI ) { super(); this.type = 'SphereGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { radius: radius, widthSegments: widthSegments, heightSegments: heightSegments, phiStart: phiStart, phiLength: phiLength, thetaStart: thetaStart, thetaLength: thetaLength }; widthSegments = Math.max( 3, Math.floor( widthSegments ) ); heightSegments = Math.max( 2, Math.floor( heightSegments ) ); const thetaEnd = Math.min( thetaStart + thetaLength, Math.PI ); let index = 0; const grid = []; const vertex = new Vector3(); const normal = new Vector3(); // buffers const indices = []; const vertices = []; const normals = []; const uvs = []; // generate vertices, normals and uvs for ( let iy = 0; iy <= heightSegments; iy ++ ) { const verticesRow = []; const v = iy / heightSegments; // special case for the poles let uOffset = 0; if ( iy === 0 && thetaStart === 0 ) { uOffset = 0.5 / widthSegments; } else if ( iy === heightSegments && thetaEnd === Math.PI ) { uOffset = -0.5 / widthSegments; } for ( let ix = 0; ix <= widthSegments; ix ++ ) { const u = ix / widthSegments; // vertex vertex.x = - radius * Math.cos( phiStart + u * phiLength ) * Math.sin( thetaStart + v * thetaLength ); vertex.y = radius * Math.cos( thetaStart + v * thetaLength ); vertex.z = radius * Math.sin( phiStart + u * phiLength ) * Math.sin( thetaStart + v * thetaLength ); vertices.push( vertex.x, vertex.y, vertex.z ); // normal normal.copy( vertex ).normalize(); normals.push( normal.x, normal.y, normal.z ); // uv uvs.push( u + uOffset, 1 - v ); verticesRow.push( index ++ ); } grid.push( verticesRow ); } // indices for ( let iy = 0; iy < heightSegments; iy ++ ) { for ( let ix = 0; ix < widthSegments; ix ++ ) { const a = grid[ iy ][ ix + 1 ]; const b = grid[ iy ][ ix ]; const c = grid[ iy + 1 ][ ix ]; const d = grid[ iy + 1 ][ ix + 1 ]; if ( iy !== 0 || thetaStart > 0 ) indices.push( a, b, d ); if ( iy !== heightSegments - 1 || thetaEnd < Math.PI ) indices.push( b, c, d ); } } // build geometry this.setIndex( indices ); this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) ); this.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) ); this.setAttribute( 'uv', new Float32BufferAttribute( uvs, 2 ) ); } copy( source ) { super.copy( source ); this.parameters = Object.assign( {}, source.parameters ); return this; } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @return {SphereGeometry} A new instance. */ static fromJSON( data ) { return new SphereGeometry( data.radius, data.widthSegments, data.heightSegments, data.phiStart, data.phiLength, data.thetaStart, data.thetaLength ); } } /** * A geometry class for representing an tetrahedron. * * ```js * const geometry = new THREE.TetrahedronGeometry(); * const material = new THREE.MeshBasicMaterial( { color: 0xffff00 } ); * const tetrahedron = new THREE.Mesh( geometry, material ); * scene.add( tetrahedron ); * ``` * * @augments PolyhedronGeometry */ class TetrahedronGeometry extends PolyhedronGeometry { /** * Constructs a new tetrahedron geometry. * * @param {number} [radius=1] - Radius of the tetrahedron. * @param {number} [detail=0] - Setting this to a value greater than `0` adds vertices making it no longer a tetrahedron. */ constructor( radius = 1, detail = 0 ) { const vertices = [ 1, 1, 1, -1, -1, 1, -1, 1, -1, 1, -1, -1 ]; const indices = [ 2, 1, 0, 0, 3, 2, 1, 3, 0, 2, 3, 1 ]; super( vertices, indices, radius, detail ); this.type = 'TetrahedronGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { radius: radius, detail: detail }; } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @return {TetrahedronGeometry} A new instance. */ static fromJSON( data ) { return new TetrahedronGeometry( data.radius, data.detail ); } } /** * A geometry class for representing an torus. * * ```js * const geometry = new THREE.TorusGeometry( 10, 3, 16, 100 ); * const material = new THREE.MeshBasicMaterial( { color: 0xffff00 } ); * const torus = new THREE.Mesh( geometry, material ); * scene.add( torus ); * ``` * * @augments BufferGeometry */ class TorusGeometry extends BufferGeometry { /** * Constructs a new torus geometry. * * @param {number} [radius=1] - Radius of the torus, from the center of the torus to the center of the tube. * @param {number} [tube=0.4] - Radius of the tube. Must be smaller than `radius`. * @param {number} [radialSegments=12] - The number of radial segments. * @param {number} [tubularSegments=48] - The number of tubular segments. * @param {number} [arc=Math.PI*2] - Central angle in radians. */ constructor( radius = 1, tube = 0.4, radialSegments = 12, tubularSegments = 48, arc = Math.PI * 2 ) { super(); this.type = 'TorusGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { radius: radius, tube: tube, radialSegments: radialSegments, tubularSegments: tubularSegments, arc: arc }; radialSegments = Math.floor( radialSegments ); tubularSegments = Math.floor( tubularSegments ); // buffers const indices = []; const vertices = []; const normals = []; const uvs = []; // helper variables const center = new Vector3(); const vertex = new Vector3(); const normal = new Vector3(); // generate vertices, normals and uvs for ( let j = 0; j <= radialSegments; j ++ ) { for ( let i = 0; i <= tubularSegments; i ++ ) { const u = i / tubularSegments * arc; const v = j / radialSegments * Math.PI * 2; // vertex vertex.x = ( radius + tube * Math.cos( v ) ) * Math.cos( u ); vertex.y = ( radius + tube * Math.cos( v ) ) * Math.sin( u ); vertex.z = tube * Math.sin( v ); vertices.push( vertex.x, vertex.y, vertex.z ); // normal center.x = radius * Math.cos( u ); center.y = radius * Math.sin( u ); normal.subVectors( vertex, center ).normalize(); normals.push( normal.x, normal.y, normal.z ); // uv uvs.push( i / tubularSegments ); uvs.push( j / radialSegments ); } } // generate indices for ( let j = 1; j <= radialSegments; j ++ ) { for ( let i = 1; i <= tubularSegments; i ++ ) { // indices const a = ( tubularSegments + 1 ) * j + i - 1; const b = ( tubularSegments + 1 ) * ( j - 1 ) + i - 1; const c = ( tubularSegments + 1 ) * ( j - 1 ) + i; const d = ( tubularSegments + 1 ) * j + i; // faces indices.push( a, b, d ); indices.push( b, c, d ); } } // build geometry this.setIndex( indices ); this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) ); this.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) ); this.setAttribute( 'uv', new Float32BufferAttribute( uvs, 2 ) ); } copy( source ) { super.copy( source ); this.parameters = Object.assign( {}, source.parameters ); return this; } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @return {TorusGeometry} A new instance. */ static fromJSON( data ) { return new TorusGeometry( data.radius, data.tube, data.radialSegments, data.tubularSegments, data.arc ); } } /** * Creates a torus knot, the particular shape of which is defined by a pair * of coprime integers, p and q. If p and q are not coprime, the result will * be a torus link. * * ```js * const geometry = new THREE.TorusKnotGeometry( 10, 3, 100, 16 ); * const material = new THREE.MeshBasicMaterial( { color: 0xffff00 } ); * const torusKnot = new THREE.Mesh( geometry, material ); * scene.add( torusKnot ); * ``` * * @augments BufferGeometry */ class TorusKnotGeometry extends BufferGeometry { /** * Constructs a new torus knot geometry. * * @param {number} [radius=1] - Radius of the torus knot. * @param {number} [tube=0.4] - Radius of the tube. * @param {number} [tubularSegments=64] - The number of tubular segments. * @param {number} [radialSegments=8] - The number of radial segments. * @param {number} [p=2] - This value determines, how many times the geometry winds around its axis of rotational symmetry. * @param {number} [q=3] - This value determines, how many times the geometry winds around a circle in the interior of the torus. */ constructor( radius = 1, tube = 0.4, tubularSegments = 64, radialSegments = 8, p = 2, q = 3 ) { super(); this.type = 'TorusKnotGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { radius: radius, tube: tube, tubularSegments: tubularSegments, radialSegments: radialSegments, p: p, q: q }; tubularSegments = Math.floor( tubularSegments ); radialSegments = Math.floor( radialSegments ); // buffers const indices = []; const vertices = []; const normals = []; const uvs = []; // helper variables const vertex = new Vector3(); const normal = new Vector3(); const P1 = new Vector3(); const P2 = new Vector3(); const B = new Vector3(); const T = new Vector3(); const N = new Vector3(); // generate vertices, normals and uvs for ( let i = 0; i <= tubularSegments; ++ i ) { // the radian "u" is used to calculate the position on the torus curve of the current tubular segment const u = i / tubularSegments * p * Math.PI * 2; // now we calculate two points. P1 is our current position on the curve, P2 is a little farther ahead. // these points are used to create a special "coordinate space", which is necessary to calculate the correct vertex positions calculatePositionOnCurve( u, p, q, radius, P1 ); calculatePositionOnCurve( u + 0.01, p, q, radius, P2 ); // calculate orthonormal basis T.subVectors( P2, P1 ); N.addVectors( P2, P1 ); B.crossVectors( T, N ); N.crossVectors( B, T ); // normalize B, N. T can be ignored, we don't use it B.normalize(); N.normalize(); for ( let j = 0; j <= radialSegments; ++ j ) { // now calculate the vertices. they are nothing more than an extrusion of the torus curve. // because we extrude a shape in the xy-plane, there is no need to calculate a z-value. const v = j / radialSegments * Math.PI * 2; const cx = - tube * Math.cos( v ); const cy = tube * Math.sin( v ); // now calculate the final vertex position. // first we orient the extrusion with our basis vectors, then we add it to the current position on the curve vertex.x = P1.x + ( cx * N.x + cy * B.x ); vertex.y = P1.y + ( cx * N.y + cy * B.y ); vertex.z = P1.z + ( cx * N.z + cy * B.z ); vertices.push( vertex.x, vertex.y, vertex.z ); // normal (P1 is always the center/origin of the extrusion, thus we can use it to calculate the normal) normal.subVectors( vertex, P1 ).normalize(); normals.push( normal.x, normal.y, normal.z ); // uv uvs.push( i / tubularSegments ); uvs.push( j / radialSegments ); } } // generate indices for ( let j = 1; j <= tubularSegments; j ++ ) { for ( let i = 1; i <= radialSegments; i ++ ) { // indices const a = ( radialSegments + 1 ) * ( j - 1 ) + ( i - 1 ); const b = ( radialSegments + 1 ) * j + ( i - 1 ); const c = ( radialSegments + 1 ) * j + i; const d = ( radialSegments + 1 ) * ( j - 1 ) + i; // faces indices.push( a, b, d ); indices.push( b, c, d ); } } // build geometry this.setIndex( indices ); this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) ); this.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) ); this.setAttribute( 'uv', new Float32BufferAttribute( uvs, 2 ) ); // this function calculates the current position on the torus curve function calculatePositionOnCurve( u, p, q, radius, position ) { const cu = Math.cos( u ); const su = Math.sin( u ); const quOverP = q / p * u; const cs = Math.cos( quOverP ); position.x = radius * ( 2 + cs ) * 0.5 * cu; position.y = radius * ( 2 + cs ) * su * 0.5; position.z = radius * Math.sin( quOverP ) * 0.5; } } copy( source ) { super.copy( source ); this.parameters = Object.assign( {}, source.parameters ); return this; } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @return {TorusKnotGeometry} A new instance. */ static fromJSON( data ) { return new TorusKnotGeometry( data.radius, data.tube, data.tubularSegments, data.radialSegments, data.p, data.q ); } } /** * Creates a tube that extrudes along a 3D curve. * * ```js * class CustomSinCurve extends THREE.Curve { * * getPoint( t, optionalTarget = new THREE.Vector3() ) { * * const tx = t * 3 - 1.5; * const ty = Math.sin( 2 * Math.PI * t ); * const tz = 0; * * return optionalTarget.set( tx, ty, tz ); * } * * } * * const path = new CustomSinCurve( 10 ); * const geometry = new THREE.TubeGeometry( path, 20, 2, 8, false ); * const material = new THREE.MeshBasicMaterial( { color: 0x00ff00 } ); * const mesh = new THREE.Mesh( geometry, material ); * scene.add( mesh ); * ``` * * @augments BufferGeometry */ class TubeGeometry extends BufferGeometry { /** * Constructs a new tube geometry. * * @param {Curve} [path=QuadraticBezierCurve3] - A 3D curve defining the path of the tube. * @param {number} [tubularSegments=64] - The number of segments that make up the tube. * @param {number} [radius=1] -The radius of the tube. * @param {number} [radialSegments=8] - The number of segments that make up the cross-section. * @param {boolean} [closed=false] - Whether the tube is closed or not. */ constructor( path = new QuadraticBezierCurve3( new Vector3( -1, -1, 0 ), new Vector3( -1, 1, 0 ), new Vector3( 1, 1, 0 ) ), tubularSegments = 64, radius = 1, radialSegments = 8, closed = false ) { super(); this.type = 'TubeGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { path: path, tubularSegments: tubularSegments, radius: radius, radialSegments: radialSegments, closed: closed }; const frames = path.computeFrenetFrames( tubularSegments, closed ); // expose internals this.tangents = frames.tangents; this.normals = frames.normals; this.binormals = frames.binormals; // helper variables const vertex = new Vector3(); const normal = new Vector3(); const uv = new Vector2(); let P = new Vector3(); // buffer const vertices = []; const normals = []; const uvs = []; const indices = []; // create buffer data generateBufferData(); // build geometry this.setIndex( indices ); this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) ); this.setAttribute( 'normal', new Float32BufferAttribute( normals, 3 ) ); this.setAttribute( 'uv', new Float32BufferAttribute( uvs, 2 ) ); // functions function generateBufferData() { for ( let i = 0; i < tubularSegments; i ++ ) { generateSegment( i ); } // if the geometry is not closed, generate the last row of vertices and normals // at the regular position on the given path // // if the geometry is closed, duplicate the first row of vertices and normals (uvs will differ) generateSegment( ( closed === false ) ? tubularSegments : 0 ); // uvs are generated in a separate function. // this makes it easy compute correct values for closed geometries generateUVs(); // finally create faces generateIndices(); } function generateSegment( i ) { // we use getPointAt to sample evenly distributed points from the given path P = path.getPointAt( i / tubularSegments, P ); // retrieve corresponding normal and binormal const N = frames.normals[ i ]; const B = frames.binormals[ i ]; // generate normals and vertices for the current segment for ( let j = 0; j <= radialSegments; j ++ ) { const v = j / radialSegments * Math.PI * 2; const sin = Math.sin( v ); const cos = - Math.cos( v ); // normal normal.x = ( cos * N.x + sin * B.x ); normal.y = ( cos * N.y + sin * B.y ); normal.z = ( cos * N.z + sin * B.z ); normal.normalize(); normals.push( normal.x, normal.y, normal.z ); // vertex vertex.x = P.x + radius * normal.x; vertex.y = P.y + radius * normal.y; vertex.z = P.z + radius * normal.z; vertices.push( vertex.x, vertex.y, vertex.z ); } } function generateIndices() { for ( let j = 1; j <= tubularSegments; j ++ ) { for ( let i = 1; i <= radialSegments; i ++ ) { const a = ( radialSegments + 1 ) * ( j - 1 ) + ( i - 1 ); const b = ( radialSegments + 1 ) * j + ( i - 1 ); const c = ( radialSegments + 1 ) * j + i; const d = ( radialSegments + 1 ) * ( j - 1 ) + i; // faces indices.push( a, b, d ); indices.push( b, c, d ); } } } function generateUVs() { for ( let i = 0; i <= tubularSegments; i ++ ) { for ( let j = 0; j <= radialSegments; j ++ ) { uv.x = i / tubularSegments; uv.y = j / radialSegments; uvs.push( uv.x, uv.y ); } } } } copy( source ) { super.copy( source ); this.parameters = Object.assign( {}, source.parameters ); return this; } toJSON() { const data = super.toJSON(); data.path = this.parameters.path.toJSON(); return data; } /** * Factory method for creating an instance of this class from the given * JSON object. * * @param {Object} data - A JSON object representing the serialized geometry. * @return {TubeGeometry} A new instance. */ static fromJSON( data ) { // This only works for built-in curves (e.g. CatmullRomCurve3). // User defined curves or instances of CurvePath will not be deserialized. return new TubeGeometry( new Curves[ data.path.type ]().fromJSON( data.path ), data.tubularSegments, data.radius, data.radialSegments, data.closed ); } } /** * Can be used as a helper object to visualize a geometry as a wireframe. * * ```js * const geometry = new THREE.SphereGeometry(); * * const wireframe = new THREE.WireframeGeometry( geometry ); * * const line = new THREE.LineSegments( wireframe ); * line.material.depthWrite = false; * line.material.opacity = 0.25; * line.material.transparent = true; * * scene.add( line ); * ``` * * Note: It is not yet possible to serialize/deserialize instances of this class. * * @augments BufferGeometry */ class WireframeGeometry extends BufferGeometry { /** * Constructs a new wireframe geometry. * * @param {?BufferGeometry} [geometry=null] - The geometry. */ constructor( geometry = null ) { super(); this.type = 'WireframeGeometry'; /** * Holds the constructor parameters that have been * used to generate the geometry. Any modification * after instantiation does not change the geometry. * * @type {Object} */ this.parameters = { geometry: geometry }; if ( geometry !== null ) { // buffer const vertices = []; const edges = new Set(); // helper variables const start = new Vector3(); const end = new Vector3(); if ( geometry.index !== null ) { // indexed BufferGeometry const position = geometry.attributes.position; const indices = geometry.index; let groups = geometry.groups; if ( groups.length === 0 ) { groups = [ { start: 0, count: indices.count, materialIndex: 0 } ]; } // create a data structure that contains all edges without duplicates for ( let o = 0, ol = groups.length; o < ol; ++ o ) { const group = groups[ o ]; const groupStart = group.start; const groupCount = group.count; for ( let i = groupStart, l = ( groupStart + groupCount ); i < l; i += 3 ) { for ( let j = 0; j < 3; j ++ ) { const index1 = indices.getX( i + j ); const index2 = indices.getX( i + ( j + 1 ) % 3 ); start.fromBufferAttribute( position, index1 ); end.fromBufferAttribute( position, index2 ); if ( isUniqueEdge( start, end, edges ) === true ) { vertices.push( start.x, start.y, start.z ); vertices.push( end.x, end.y, end.z ); } } } } } else { // non-indexed BufferGeometry const position = geometry.attributes.position; for ( let i = 0, l = ( position.count / 3 ); i < l; i ++ ) { for ( let j = 0; j < 3; j ++ ) { // three edges per triangle, an edge is represented as (index1, index2) // e.g. the first triangle has the following edges: (0,1),(1,2),(2,0) const index1 = 3 * i + j; const index2 = 3 * i + ( ( j + 1 ) % 3 ); start.fromBufferAttribute( position, index1 ); end.fromBufferAttribute( position, index2 ); if ( isUniqueEdge( start, end, edges ) === true ) { vertices.push( start.x, start.y, start.z ); vertices.push( end.x, end.y, end.z ); } } } } // build geometry this.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) ); } } copy( source ) { super.copy( source ); this.parameters = Object.assign( {}, source.parameters ); return this; } } function isUniqueEdge( start, end, edges ) { const hash1 = `${start.x},${start.y},${start.z}-${end.x},${end.y},${end.z}`; const hash2 = `${end.x},${end.y},${end.z}-${start.x},${start.y},${start.z}`; // coincident edge if ( edges.has( hash1 ) === true || edges.has( hash2 ) === true ) { return false; } else { edges.add( hash1 ); edges.add( hash2 ); return true; } } var Geometries = /*#__PURE__*/Object.freeze({ __proto__: null, BoxGeometry: BoxGeometry, CapsuleGeometry: CapsuleGeometry, CircleGeometry: CircleGeometry, ConeGeometry: ConeGeometry, CylinderGeometry: CylinderGeometry, DodecahedronGeometry: DodecahedronGeometry, EdgesGeometry: EdgesGeometry, ExtrudeGeometry: ExtrudeGeometry, IcosahedronGeometry: IcosahedronGeometry, LatheGeometry: LatheGeometry, OctahedronGeometry: OctahedronGeometry, PlaneGeometry: PlaneGeometry, PolyhedronGeometry: PolyhedronGeometry, RingGeometry: RingGeometry, ShapeGeometry: ShapeGeometry, SphereGeometry: SphereGeometry, TetrahedronGeometry: TetrahedronGeometry, TorusGeometry: TorusGeometry, TorusKnotGeometry: TorusKnotGeometry, TubeGeometry: TubeGeometry, WireframeGeometry: WireframeGeometry }); /** * This material can receive shadows, but otherwise is completely transparent. * * ```js * const geometry = new THREE.PlaneGeometry( 2000, 2000 ); * geometry.rotateX( - Math.PI / 2 ); * * const material = new THREE.ShadowMaterial(); * material.opacity = 0.2; * * const plane = new THREE.Mesh( geometry, material ); * plane.position.y = -200; * plane.receiveShadow = true; * scene.add( plane ); * ``` * * @augments Material */ class ShadowMaterial extends Material { /** * Constructs a new shadow material. * * @param {Object} [parameters] - An object with one or more properties * defining the material's appearance. Any property of the material * (including any property from inherited materials) can be passed * in here. Color values can be passed any type of value accepted * by {@link Color#set}. */ constructor( parameters ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isShadowMaterial = true; this.type = 'ShadowMaterial'; /** * Color of the material. * * @type {Color} * @default (0,0,0) */ this.color = new Color( 0x000000 ); /** * Overwritten since shadow materials are transparent * by default. * * @type {boolean} * @default true */ this.transparent = true; /** * Whether the material is affected by fog or not. * * @type {boolean} * @default true */ this.fog = true; this.setValues( parameters ); } copy( source ) { super.copy( source ); this.color.copy( source.color ); this.fog = source.fog; return this; } } /** * This class works just like {@link ShaderMaterial}, except that definitions * of built-in uniforms and attributes are not automatically prepended to the * GLSL shader code. * * `RawShaderMaterial` can only be used with {@link WebGLRenderer}. * * @augments ShaderMaterial */ class RawShaderMaterial extends ShaderMaterial { /** * Constructs a new raw shader material. * * @param {Object} [parameters] - An object with one or more properties * defining the material's appearance. Any property of the material * (including any property from inherited materials) can be passed * in here. Color values can be passed any type of value accepted * by {@link Color#set}. */ constructor( parameters ) { super( parameters ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isRawShaderMaterial = true; this.type = 'RawShaderMaterial'; } } /** * A standard physically based material, using Metallic-Roughness workflow. * * Physically based rendering (PBR) has recently become the standard in many * 3D applications, such as [Unity]{@link https://blogs.unity3d.com/2014/10/29/physically-based-shading-in-unity-5-a-primer/}, * [Unreal]{@link https://docs.unrealengine.com/latest/INT/Engine/Rendering/Materials/PhysicallyBased/} and * [3D Studio Max]{@link http://area.autodesk.com/blogs/the-3ds-max-blog/what039s-new-for-rendering-in-3ds-max-2017}. * * This approach differs from older approaches in that instead of using * approximations for the way in which light interacts with a surface, a * physically correct model is used. The idea is that, instead of tweaking * materials to look good under specific lighting, a material can be created * that will react 'correctly' under all lighting scenarios. * * In practice this gives a more accurate and realistic looking result than * the {@link MeshLambertMaterial} or {@link MeshPhongMaterial}, at the cost of * being somewhat more computationally expensive. `MeshStandardMaterial` uses per-fragment * shading. * * Note that for best results you should always specify an environment map when using this material. * * For a non-technical introduction to the concept of PBR and how to set up a * PBR material, check out these articles by the people at [marmoset]{@link https://www.marmoset.co}: * * - [Basic Theory of Physically Based Rendering]{@link https://www.marmoset.co/posts/basic-theory-of-physically-based-rendering/} * - [Physically Based Rendering and You Can Too]{@link https://www.marmoset.co/posts/physically-based-rendering-and-you-can-too/} * * Technical details of the approach used in three.js (and most other PBR systems) can be found is this * [paper from Disney]{@link https://media.disneyanimation.com/uploads/production/publication_asset/48/asset/s2012_pbs_disney_brdf_notes_v3.pdf} * (pdf), by Brent Burley. * * @augments Material */ class MeshStandardMaterial extends Material { /** * Constructs a new mesh standard material. * * @param {Object} [parameters] - An object with one or more properties * defining the material's appearance. Any property of the material * (including any property from inherited materials) can be passed * in here. Color values can be passed any type of value accepted * by {@link Color#set}. */ constructor( parameters ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isMeshStandardMaterial = true; this.type = 'MeshStandardMaterial'; this.defines = { 'STANDARD': '' }; /** * Color of the material. * * @type {Color} * @default (1,1,1) */ this.color = new Color( 0xffffff ); // diffuse /** * How rough the material appears. `0.0` means a smooth mirror reflection, `1.0` * means fully diffuse. If `roughnessMap` is also provided, * both values are multiplied. * * @type {number} * @default 1 */ this.roughness = 1.0; /** * How much the material is like a metal. Non-metallic materials such as wood * or stone use `0.0`, metallic use `1.0`, with nothing (usually) in between. * A value between `0.0` and `1.0` could be used for a rusty metal look. * If `metalnessMap` is also provided, both values are multiplied. * * @type {number} * @default 0 */ this.metalness = 0.0; /** * The color map. May optionally include an alpha channel, typically combined * with {@link Material#transparent} or {@link Material#alphaTest}. The texture map * color is modulated by the diffuse `color`. * * @type {?Texture} * @default null */ this.map = null; /** * The light map. Requires a second set of UVs. * * @type {?Texture} * @default null */ this.lightMap = null; /** * Intensity of the baked light. * * @type {number} * @default 1 */ this.lightMapIntensity = 1.0; /** * The red channel of this texture is used as the ambient occlusion map. * Requires a second set of UVs. * * @type {?Texture} * @default null */ this.aoMap = null; /** * Intensity of the ambient occlusion effect. Range is `[0,1]`, where `0` * disables ambient occlusion. Where intensity is `1` and the AO map's * red channel is also `1`, ambient light is fully occluded on a surface. * * @type {number} * @default 1 */ this.aoMapIntensity = 1.0; /** * Emissive (light) color of the material, essentially a solid color * unaffected by other lighting. * * @type {Color} * @default (0,0,0) */ this.emissive = new Color( 0x000000 ); /** * Intensity of the emissive light. Modulates the emissive color. * * @type {number} * @default 1 */ this.emissiveIntensity = 1.0; /** * Set emissive (glow) map. The emissive map color is modulated by the * emissive color and the emissive intensity. If you have an emissive map, * be sure to set the emissive color to something other than black. * * @type {?Texture} * @default null */ this.emissiveMap = null; /** * The texture to create a bump map. The black and white values map to the * perceived depth in relation to the lights. Bump doesn't actually affect * the geometry of the object, only the lighting. If a normal map is defined * this will be ignored. * * @type {?Texture} * @default null */ this.bumpMap = null; /** * How much the bump map affects the material. Typical range is `[0,1]`. * * @type {number} * @default 1 */ this.bumpScale = 1; /** * The texture to create a normal map. The RGB values affect the surface * normal for each pixel fragment and change the way the color is lit. Normal * maps do not change the actual shape of the surface, only the lighting. In * case the material has a normal map authored using the left handed * convention, the `y` component of `normalScale` should be negated to compensate * for the different handedness. * * @type {?Texture} * @default null */ this.normalMap = null; /** * The type of normal map. * * @type {(TangentSpaceNormalMap|ObjectSpaceNormalMap)} * @default TangentSpaceNormalMap */ this.normalMapType = TangentSpaceNormalMap; /** * How much the normal map affects the material. Typical value range is `[0,1]`. * * @type {Vector2} * @default (1,1) */ this.normalScale = new Vector2( 1, 1 ); /** * The displacement map affects the position of the mesh's vertices. Unlike * other maps which only affect the light and shade of the material the * displaced vertices can cast shadows, block other objects, and otherwise * act as real geometry. The displacement texture is an image where the value * of each pixel (white being the highest) is mapped against, and * repositions, the vertices of the mesh. * * @type {?Texture} * @default null */ this.displacementMap = null; /** * How much the displacement map affects the mesh (where black is no * displacement, and white is maximum displacement). Without a displacement * map set, this value is not applied. * * @type {number} * @default 0 */ this.displacementScale = 1; /** * The offset of the displacement map's values on the mesh's vertices. * The bias is added to the scaled sample of the displacement map. * Without a displacement map set, this value is not applied. * * @type {number} * @default 0 */ this.displacementBias = 0; /** * The green channel of this texture is used to alter the roughness of the * material. * * @type {?Texture} * @default null */ this.roughnessMap = null; /** * The blue channel of this texture is used to alter the metalness of the * material. * * @type {?Texture} * @default null */ this.metalnessMap = null; /** * The alpha map is a grayscale texture that controls the opacity across the * surface (black: fully transparent; white: fully opaque). * * Only the color of the texture is used, ignoring the alpha channel if one * exists. For RGB and RGBA textures, the renderer will use the green channel * when sampling this texture due to the extra bit of precision provided for * green in DXT-compressed and uncompressed RGB 565 formats. Luminance-only and * luminance/alpha textures will also still work as expected. * * @type {?Texture} * @default null */ this.alphaMap = null; /** * The environment map. To ensure a physically correct rendering, environment maps * are internally pre-processed with {@link PMREMGenerator}. * * @type {?Texture} * @default null */ this.envMap = null; /** * The rotation of the environment map in radians. * * @type {Euler} * @default (0,0,0) */ this.envMapRotation = new Euler(); /** * Scales the effect of the environment map by multiplying its color. * * @type {number} * @default 1 */ this.envMapIntensity = 1.0; /** * Renders the geometry as a wireframe. * * @type {boolean} * @default false */ this.wireframe = false; /** * Controls the thickness of the wireframe. * * Can only be used with {@link SVGRenderer}. * * @type {number} * @default 1 */ this.wireframeLinewidth = 1; /** * Defines appearance of wireframe ends. * * Can only be used with {@link SVGRenderer}. * * @type {('round'|'bevel'|'miter')} * @default 'round' */ this.wireframeLinecap = 'round'; /** * Defines appearance of wireframe joints. * * Can only be used with {@link SVGRenderer}. * * @type {('round'|'bevel'|'miter')} * @default 'round' */ this.wireframeLinejoin = 'round'; /** * Whether the material is rendered with flat shading or not. * * @type {boolean} * @default false */ this.flatShading = false; /** * Whether the material is affected by fog or not. * * @type {boolean} * @default true */ this.fog = true; this.setValues( parameters ); } copy( source ) { super.copy( source ); this.defines = { 'STANDARD': '' }; this.color.copy( source.color ); this.roughness = source.roughness; this.metalness = source.metalness; this.map = source.map; this.lightMap = source.lightMap; this.lightMapIntensity = source.lightMapIntensity; this.aoMap = source.aoMap; this.aoMapIntensity = source.aoMapIntensity; this.emissive.copy( source.emissive ); this.emissiveMap = source.emissiveMap; this.emissiveIntensity = source.emissiveIntensity; this.bumpMap = source.bumpMap; this.bumpScale = source.bumpScale; this.normalMap = source.normalMap; this.normalMapType = source.normalMapType; this.normalScale.copy( source.normalScale ); this.displacementMap = source.displacementMap; this.displacementScale = source.displacementScale; this.displacementBias = source.displacementBias; this.roughnessMap = source.roughnessMap; this.metalnessMap = source.metalnessMap; this.alphaMap = source.alphaMap; this.envMap = source.envMap; this.envMapRotation.copy( source.envMapRotation ); this.envMapIntensity = source.envMapIntensity; this.wireframe = source.wireframe; this.wireframeLinewidth = source.wireframeLinewidth; this.wireframeLinecap = source.wireframeLinecap; this.wireframeLinejoin = source.wireframeLinejoin; this.flatShading = source.flatShading; this.fog = source.fog; return this; } } /** * An extension of the {@link MeshStandardMaterial}, providing more advanced * physically-based rendering properties: * * - Anisotropy: Ability to represent the anisotropic property of materials * as observable with brushed metals. * - Clearcoat: Some materials — like car paints, carbon fiber, and wet surfaces — require * a clear, reflective layer on top of another layer that may be irregular or rough. * Clearcoat approximates this effect, without the need for a separate transparent surface. * - Iridescence: Allows to render the effect where hue varies depending on the viewing * angle and illumination angle. This can be seen on soap bubbles, oil films, or on the * wings of many insects. * - Physically-based transparency: One limitation of {@link Material#opacity} is that highly * transparent materials are less reflective. Physically-based transmission provides a more * realistic option for thin, transparent surfaces like glass. * - Advanced reflectivity: More flexible reflectivity for non-metallic materials. * - Sheen: Can be used for representing cloth and fabric materials. * * As a result of these complex shading features, `MeshPhysicalMaterial` has a * higher performance cost, per pixel, than other three.js materials. Most * effects are disabled by default, and add cost as they are enabled. For * best results, always specify an environment map when using this material. * * @augments MeshStandardMaterial */ class MeshPhysicalMaterial extends MeshStandardMaterial { /** * Constructs a new mesh physical material. * * @param {Object} [parameters] - An object with one or more properties * defining the material's appearance. Any property of the material * (including any property from inherited materials) can be passed * in here. Color values can be passed any type of value accepted * by {@link Color#set}. */ constructor( parameters ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isMeshPhysicalMaterial = true; this.defines = { 'STANDARD': '', 'PHYSICAL': '' }; this.type = 'MeshPhysicalMaterial'; /** * The rotation of the anisotropy in tangent, bitangent space, measured in radians * counter-clockwise from the tangent. When `anisotropyMap` is present, this * property provides additional rotation to the vectors in the texture. * * @type {number} * @default 1 */ this.anisotropyRotation = 0; /** * Red and green channels represent the anisotropy direction in `[-1, 1]` tangent, * bitangent space, to be rotated by `anisotropyRotation`. The blue channel * contains strength as `[0, 1]` to be multiplied by `anisotropy`. * * @type {?Texture} * @default null */ this.anisotropyMap = null; /** * The red channel of this texture is multiplied against `clearcoat`, * for per-pixel control over a coating's intensity. * * @type {?Texture} * @default null */ this.clearcoatMap = null; /** * Roughness of the clear coat layer, from `0.0` to `1.0`. * * @type {number} * @default 0 */ this.clearcoatRoughness = 0.0; /** * The green channel of this texture is multiplied against * `clearcoatRoughness`, for per-pixel control over a coating's roughness. * * @type {?Texture} * @default null */ this.clearcoatRoughnessMap = null; /** * How much `clearcoatNormalMap` affects the clear coat layer, from * `(0,0)` to `(1,1)`. * * @type {Vector2} * @default (1,1) */ this.clearcoatNormalScale = new Vector2( 1, 1 ); /** * Can be used to enable independent normals for the clear coat layer. * * @type {?Texture} * @default null */ this.clearcoatNormalMap = null; /** * Index-of-refraction for non-metallic materials, from `1.0` to `2.333`. * * @type {number} * @default 1.5 */ this.ior = 1.5; /** * Degree of reflectivity, from `0.0` to `1.0`. Default is `0.5`, which * corresponds to an index-of-refraction of `1.5`. * * This models the reflectivity of non-metallic materials. It has no effect * when `metalness` is `1.0` * * @name MeshPhysicalMaterial#reflectivity * @type {number} * @default 0.5 */ Object.defineProperty( this, 'reflectivity', { get: function () { return ( clamp( 2.5 * ( this.ior - 1 ) / ( this.ior + 1 ), 0, 1 ) ); }, set: function ( reflectivity ) { this.ior = ( 1 + 0.4 * reflectivity ) / ( 1 - 0.4 * reflectivity ); } } ); /** * The red channel of this texture is multiplied against `iridescence`, for per-pixel * control over iridescence. * * @type {?Texture} * @default null */ this.iridescenceMap = null; /** * Strength of the iridescence RGB color shift effect, represented by an index-of-refraction. * Between `1.0` to `2.333`. * * @type {number} * @default 1.3 */ this.iridescenceIOR = 1.3; /** *Array of exactly 2 elements, specifying minimum and maximum thickness of the iridescence layer. Thickness of iridescence layer has an equivalent effect of the one `thickness` has on `ior`. * * @type {Array} * @default [100,400] */ this.iridescenceThicknessRange = [ 100, 400 ]; /** * A texture that defines the thickness of the iridescence layer, stored in the green channel. * Minimum and maximum values of thickness are defined by `iridescenceThicknessRange` array: * - `0.0` in the green channel will result in thickness equal to first element of the array. * - `1.0` in the green channel will result in thickness equal to second element of the array. * - Values in-between will linearly interpolate between the elements of the array. * * @type {?Texture} * @default null */ this.iridescenceThicknessMap = null; /** * The sheen tint. * * @type {Color} * @default (0,0,0) */ this.sheenColor = new Color( 0x000000 ); /** * The RGB channels of this texture are multiplied against `sheenColor`, for per-pixel control * over sheen tint. * * @type {?Texture} * @default null */ this.sheenColorMap = null; /** * Roughness of the sheen layer, from `0.0` to `1.0`. * * @type {number} * @default 1 */ this.sheenRoughness = 1.0; /** * The alpha channel of this texture is multiplied against `sheenRoughness`, for per-pixel control * over sheen roughness. * * @type {?Texture} * @default null */ this.sheenRoughnessMap = null; /** * The red channel of this texture is multiplied against `transmission`, for per-pixel control over * optical transparency. * * @type {?Texture} * @default null */ this.transmissionMap = null; /** * The thickness of the volume beneath the surface. The value is given in the * coordinate space of the mesh. If the value is `0` the material is * thin-walled. Otherwise the material is a volume boundary. * * @type {number} * @default 0 */ this.thickness = 0; /** * A texture that defines the thickness, stored in the green channel. This will * be multiplied by `thickness`. * * @type {?Texture} * @default null */ this.thicknessMap = null; /** * Density of the medium given as the average distance that light travels in * the medium before interacting with a particle. The value is given in world * space units, and must be greater than zero. * * @type {number} * @default Infinity */ this.attenuationDistance = Infinity; /** * The color that white light turns into due to absorption when reaching the * attenuation distance. * * @type {Color} * @default (1,1,1) */ this.attenuationColor = new Color( 1, 1, 1 ); /** * A float that scales the amount of specular reflection for non-metals only. * When set to zero, the model is effectively Lambertian. From `0.0` to `1.0`. * * @type {number} * @default 1 */ this.specularIntensity = 1.0; /** * The alpha channel of this texture is multiplied against `specularIntensity`, * for per-pixel control over specular intensity. * * @type {?Texture} * @default null */ this.specularIntensityMap = null; /** * Tints the specular reflection at normal incidence for non-metals only. * * @type {Color} * @default (1,1,1) */ this.specularColor = new Color( 1, 1, 1 ); /** * The RGB channels of this texture are multiplied against `specularColor`, * for per-pixel control over specular color. * * @type {?Texture} * @default null */ this.specularColorMap = null; this._anisotropy = 0; this._clearcoat = 0; this._dispersion = 0; this._iridescence = 0; this._sheen = 0.0; this._transmission = 0; this.setValues( parameters ); } /** * The anisotropy strength. * * @type {number} * @default 0 */ get anisotropy() { return this._anisotropy; } set anisotropy( value ) { if ( this._anisotropy > 0 !== value > 0 ) { this.version ++; } this._anisotropy = value; } /** * Represents the intensity of the clear coat layer, from `0.0` to `1.0`. Use * clear coat related properties to enable multilayer materials that have a * thin translucent layer over the base layer. * * @type {number} * @default 0 */ get clearcoat() { return this._clearcoat; } set clearcoat( value ) { if ( this._clearcoat > 0 !== value > 0 ) { this.version ++; } this._clearcoat = value; } /** * The intensity of the iridescence layer, simulating RGB color shift based on the angle between * the surface and the viewer, from `0.0` to `1.0`. * * @type {number} * @default 0 */ get iridescence() { return this._iridescence; } set iridescence( value ) { if ( this._iridescence > 0 !== value > 0 ) { this.version ++; } this._iridescence = value; } /** * Defines the strength of the angular separation of colors (chromatic aberration) transmitting * through a relatively clear volume. Any value zero or larger is valid, the typical range of * realistic values is `[0, 1]`. This property can be only be used with transmissive objects. * * @type {number} * @default 0 */ get dispersion() { return this._dispersion; } set dispersion( value ) { if ( this._dispersion > 0 !== value > 0 ) { this.version ++; } this._dispersion = value; } /** * The intensity of the sheen layer, from `0.0` to `1.0`. * * @type {number} * @default 0 */ get sheen() { return this._sheen; } set sheen( value ) { if ( this._sheen > 0 !== value > 0 ) { this.version ++; } this._sheen = value; } /** * Degree of transmission (or optical transparency), from `0.0` to `1.0`. * * Thin, transparent or semitransparent, plastic or glass materials remain * largely reflective even if they are fully transmissive. The transmission * property can be used to model these materials. * * When transmission is non-zero, `opacity` should be set to `1`. * * @type {number} * @default 0 */ get transmission() { return this._transmission; } set transmission( value ) { if ( this._transmission > 0 !== value > 0 ) { this.version ++; } this._transmission = value; } copy( source ) { super.copy( source ); this.defines = { 'STANDARD': '', 'PHYSICAL': '' }; this.anisotropy = source.anisotropy; this.anisotropyRotation = source.anisotropyRotation; this.anisotropyMap = source.anisotropyMap; this.clearcoat = source.clearcoat; this.clearcoatMap = source.clearcoatMap; this.clearcoatRoughness = source.clearcoatRoughness; this.clearcoatRoughnessMap = source.clearcoatRoughnessMap; this.clearcoatNormalMap = source.clearcoatNormalMap; this.clearcoatNormalScale.copy( source.clearcoatNormalScale ); this.dispersion = source.dispersion; this.ior = source.ior; this.iridescence = source.iridescence; this.iridescenceMap = source.iridescenceMap; this.iridescenceIOR = source.iridescenceIOR; this.iridescenceThicknessRange = [ ...source.iridescenceThicknessRange ]; this.iridescenceThicknessMap = source.iridescenceThicknessMap; this.sheen = source.sheen; this.sheenColor.copy( source.sheenColor ); this.sheenColorMap = source.sheenColorMap; this.sheenRoughness = source.sheenRoughness; this.sheenRoughnessMap = source.sheenRoughnessMap; this.transmission = source.transmission; this.transmissionMap = source.transmissionMap; this.thickness = source.thickness; this.thicknessMap = source.thicknessMap; this.attenuationDistance = source.attenuationDistance; this.attenuationColor.copy( source.attenuationColor ); this.specularIntensity = source.specularIntensity; this.specularIntensityMap = source.specularIntensityMap; this.specularColor.copy( source.specularColor ); this.specularColorMap = source.specularColorMap; return this; } } /** * A material for shiny surfaces with specular highlights. * * The material uses a non-physically based [Blinn-Phong]{@link https://en.wikipedia.org/wiki/Blinn-Phong_shading_model} * model for calculating reflectance. Unlike the Lambertian model used in the * {@link MeshLambertMaterial} this can simulate shiny surfaces with specular * highlights (such as varnished wood). `MeshPhongMaterial` uses per-fragment shading. * * Performance will generally be greater when using this material over the * {@link MeshStandardMaterial} or {@link MeshPhysicalMaterial}, at the cost of * some graphical accuracy. * * @augments Material */ class MeshPhongMaterial extends Material { /** * Constructs a new mesh phong material. * * @param {Object} [parameters] - An object with one or more properties * defining the material's appearance. Any property of the material * (including any property from inherited materials) can be passed * in here. Color values can be passed any type of value accepted * by {@link Color#set}. */ constructor( parameters ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isMeshPhongMaterial = true; this.type = 'MeshPhongMaterial'; /** * Color of the material. * * @type {Color} * @default (1,1,1) */ this.color = new Color( 0xffffff ); // diffuse /** * Specular color of the material. The default color is set to `0x111111` (very dark grey) * * This defines how shiny the material is and the color of its shine. * * @type {Color} */ this.specular = new Color( 0x111111 ); /** * How shiny the specular highlight is; a higher value gives a sharper highlight. * * @type {number} * @default 30 */ this.shininess = 30; /** * The color map. May optionally include an alpha channel, typically combined * with {@link Material#transparent} or {@link Material#alphaTest}. The texture map * color is modulated by the diffuse `color`. * * @type {?Texture} * @default null */ this.map = null; /** * The light map. Requires a second set of UVs. * * @type {?Texture} * @default null */ this.lightMap = null; /** * Intensity of the baked light. * * @type {number} * @default 1 */ this.lightMapIntensity = 1.0; /** * The red channel of this texture is used as the ambient occlusion map. * Requires a second set of UVs. * * @type {?Texture} * @default null */ this.aoMap = null; /** * Intensity of the ambient occlusion effect. Range is `[0,1]`, where `0` * disables ambient occlusion. Where intensity is `1` and the AO map's * red channel is also `1`, ambient light is fully occluded on a surface. * * @type {number} * @default 1 */ this.aoMapIntensity = 1.0; /** * Emissive (light) color of the material, essentially a solid color * unaffected by other lighting. * * @type {Color} * @default (0,0,0) */ this.emissive = new Color( 0x000000 ); /** * Intensity of the emissive light. Modulates the emissive color. * * @type {number} * @default 1 */ this.emissiveIntensity = 1.0; /** * Set emissive (glow) map. The emissive map color is modulated by the * emissive color and the emissive intensity. If you have an emissive map, * be sure to set the emissive color to something other than black. * * @type {?Texture} * @default null */ this.emissiveMap = null; /** * The texture to create a bump map. The black and white values map to the * perceived depth in relation to the lights. Bump doesn't actually affect * the geometry of the object, only the lighting. If a normal map is defined * this will be ignored. * * @type {?Texture} * @default null */ this.bumpMap = null; /** * How much the bump map affects the material. Typical range is `[0,1]`. * * @type {number} * @default 1 */ this.bumpScale = 1; /** * The texture to create a normal map. The RGB values affect the surface * normal for each pixel fragment and change the way the color is lit. Normal * maps do not change the actual shape of the surface, only the lighting. In * case the material has a normal map authored using the left handed * convention, the `y` component of `normalScale` should be negated to compensate * for the different handedness. * * @type {?Texture} * @default null */ this.normalMap = null; /** * The type of normal map. * * @type {(TangentSpaceNormalMap|ObjectSpaceNormalMap)} * @default TangentSpaceNormalMap */ this.normalMapType = TangentSpaceNormalMap; /** * How much the normal map affects the material. Typical value range is `[0,1]`. * * @type {Vector2} * @default (1,1) */ this.normalScale = new Vector2( 1, 1 ); /** * The displacement map affects the position of the mesh's vertices. Unlike * other maps which only affect the light and shade of the material the * displaced vertices can cast shadows, block other objects, and otherwise * act as real geometry. The displacement texture is an image where the value * of each pixel (white being the highest) is mapped against, and * repositions, the vertices of the mesh. * * @type {?Texture} * @default null */ this.displacementMap = null; /** * How much the displacement map affects the mesh (where black is no * displacement, and white is maximum displacement). Without a displacement * map set, this value is not applied. * * @type {number} * @default 0 */ this.displacementScale = 1; /** * The offset of the displacement map's values on the mesh's vertices. * The bias is added to the scaled sample of the displacement map. * Without a displacement map set, this value is not applied. * * @type {number} * @default 0 */ this.displacementBias = 0; /** * The specular map value affects both how much the specular surface * highlight contributes and how much of the environment map affects the * surface. * * @type {?Texture} * @default null */ this.specularMap = null; /** * The alpha map is a grayscale texture that controls the opacity across the * surface (black: fully transparent; white: fully opaque). * * Only the color of the texture is used, ignoring the alpha channel if one * exists. For RGB and RGBA textures, the renderer will use the green channel * when sampling this texture due to the extra bit of precision provided for * green in DXT-compressed and uncompressed RGB 565 formats. Luminance-only and * luminance/alpha textures will also still work as expected. * * @type {?Texture} * @default null */ this.alphaMap = null; /** * The environment map. * * @type {?Texture} * @default null */ this.envMap = null; /** * The rotation of the environment map in radians. * * @type {Euler} * @default (0,0,0) */ this.envMapRotation = new Euler(); /** * How to combine the result of the surface's color with the environment map, if any. * * When set to `MixOperation`, the {@link MeshBasicMaterial#reflectivity} is used to * blend between the two colors. * * @type {(MultiplyOperation|MixOperation|AddOperation)} * @default MultiplyOperation */ this.combine = MultiplyOperation; /** * How much the environment map affects the surface. * The valid range is between `0` (no reflections) and `1` (full reflections). * * @type {number} * @default 1 */ this.reflectivity = 1; /** * The index of refraction (IOR) of air (approximately 1) divided by the * index of refraction of the material. It is used with environment mapping * modes {@link CubeRefractionMapping} and {@link EquirectangularRefractionMapping}. * The refraction ratio should not exceed `1`. * * @type {number} * @default 0.98 */ this.refractionRatio = 0.98; /** * Renders the geometry as a wireframe. * * @type {boolean} * @default false */ this.wireframe = false; /** * Controls the thickness of the wireframe. * * Can only be used with {@link SVGRenderer}. * * @type {number} * @default 1 */ this.wireframeLinewidth = 1; /** * Defines appearance of wireframe ends. * * Can only be used with {@link SVGRenderer}. * * @type {('round'|'bevel'|'miter')} * @default 'round' */ this.wireframeLinecap = 'round'; /** * Defines appearance of wireframe joints. * * Can only be used with {@link SVGRenderer}. * * @type {('round'|'bevel'|'miter')} * @default 'round' */ this.wireframeLinejoin = 'round'; /** * Whether the material is rendered with flat shading or not. * * @type {boolean} * @default false */ this.flatShading = false; /** * Whether the material is affected by fog or not. * * @type {boolean} * @default true */ this.fog = true; this.setValues( parameters ); } copy( source ) { super.copy( source ); this.color.copy( source.color ); this.specular.copy( source.specular ); this.shininess = source.shininess; this.map = source.map; this.lightMap = source.lightMap; this.lightMapIntensity = source.lightMapIntensity; this.aoMap = source.aoMap; this.aoMapIntensity = source.aoMapIntensity; this.emissive.copy( source.emissive ); this.emissiveMap = source.emissiveMap; this.emissiveIntensity = source.emissiveIntensity; this.bumpMap = source.bumpMap; this.bumpScale = source.bumpScale; this.normalMap = source.normalMap; this.normalMapType = source.normalMapType; this.normalScale.copy( source.normalScale ); this.displacementMap = source.displacementMap; this.displacementScale = source.displacementScale; this.displacementBias = source.displacementBias; this.specularMap = source.specularMap; this.alphaMap = source.alphaMap; this.envMap = source.envMap; this.envMapRotation.copy( source.envMapRotation ); this.combine = source.combine; this.reflectivity = source.reflectivity; this.refractionRatio = source.refractionRatio; this.wireframe = source.wireframe; this.wireframeLinewidth = source.wireframeLinewidth; this.wireframeLinecap = source.wireframeLinecap; this.wireframeLinejoin = source.wireframeLinejoin; this.flatShading = source.flatShading; this.fog = source.fog; return this; } } /** * A material implementing toon shading. * * @augments Material */ class MeshToonMaterial extends Material { /** * Constructs a new mesh toon material. * * @param {Object} [parameters] - An object with one or more properties * defining the material's appearance. Any property of the material * (including any property from inherited materials) can be passed * in here. Color values can be passed any type of value accepted * by {@link Color#set}. */ constructor( parameters ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isMeshToonMaterial = true; this.defines = { 'TOON': '' }; this.type = 'MeshToonMaterial'; /** * Color of the material. * * @type {Color} * @default (1,1,1) */ this.color = new Color( 0xffffff ); /** * The color map. May optionally include an alpha channel, typically combined * with {@link Material#transparent} or {@link Material#alphaTest}. The texture map * color is modulated by the diffuse `color`. * * @type {?Texture} * @default null */ this.map = null; /** * Gradient map for toon shading. It's required to set * {@link Texture#minFilter} and {@link Texture#magFilter} to {@linkNearestFilter} * when using this type of texture. * * @type {?Texture} * @default null */ this.gradientMap = null; /** * The light map. Requires a second set of UVs. * * @type {?Texture} * @default null */ this.lightMap = null; /** * Intensity of the baked light. * * @type {number} * @default 1 */ this.lightMapIntensity = 1.0; /** * The red channel of this texture is used as the ambient occlusion map. * Requires a second set of UVs. * * @type {?Texture} * @default null */ this.aoMap = null; /** * Intensity of the ambient occlusion effect. Range is `[0,1]`, where `0` * disables ambient occlusion. Where intensity is `1` and the AO map's * red channel is also `1`, ambient light is fully occluded on a surface. * * @type {number} * @default 1 */ this.aoMapIntensity = 1.0; /** * Emissive (light) color of the material, essentially a solid color * unaffected by other lighting. * * @type {Color} * @default (0,0,0) */ this.emissive = new Color( 0x000000 ); /** * Intensity of the emissive light. Modulates the emissive color. * * @type {number} * @default 1 */ this.emissiveIntensity = 1.0; /** * Set emissive (glow) map. The emissive map color is modulated by the * emissive color and the emissive intensity. If you have an emissive map, * be sure to set the emissive color to something other than black. * * @type {?Texture} * @default null */ this.emissiveMap = null; /** * The texture to create a bump map. The black and white values map to the * perceived depth in relation to the lights. Bump doesn't actually affect * the geometry of the object, only the lighting. If a normal map is defined * this will be ignored. * * @type {?Texture} * @default null */ this.bumpMap = null; /** * How much the bump map affects the material. Typical range is `[0,1]`. * * @type {number} * @default 1 */ this.bumpScale = 1; /** * The texture to create a normal map. The RGB values affect the surface * normal for each pixel fragment and change the way the color is lit. Normal * maps do not change the actual shape of the surface, only the lighting. In * case the material has a normal map authored using the left handed * convention, the `y` component of `normalScale` should be negated to compensate * for the different handedness. * * @type {?Texture} * @default null */ this.normalMap = null; /** * The type of normal map. * * @type {(TangentSpaceNormalMap|ObjectSpaceNormalMap)} * @default TangentSpaceNormalMap */ this.normalMapType = TangentSpaceNormalMap; /** * How much the normal map affects the material. Typical value range is `[0,1]`. * * @type {Vector2} * @default (1,1) */ this.normalScale = new Vector2( 1, 1 ); /** * The displacement map affects the position of the mesh's vertices. Unlike * other maps which only affect the light and shade of the material the * displaced vertices can cast shadows, block other objects, and otherwise * act as real geometry. The displacement texture is an image where the value * of each pixel (white being the highest) is mapped against, and * repositions, the vertices of the mesh. * * @type {?Texture} * @default null */ this.displacementMap = null; /** * How much the displacement map affects the mesh (where black is no * displacement, and white is maximum displacement). Without a displacement * map set, this value is not applied. * * @type {number} * @default 0 */ this.displacementScale = 1; /** * The offset of the displacement map's values on the mesh's vertices. * The bias is added to the scaled sample of the displacement map. * Without a displacement map set, this value is not applied. * * @type {number} * @default 0 */ this.displacementBias = 0; /** * The alpha map is a grayscale texture that controls the opacity across the * surface (black: fully transparent; white: fully opaque). * * Only the color of the texture is used, ignoring the alpha channel if one * exists. For RGB and RGBA textures, the renderer will use the green channel * when sampling this texture due to the extra bit of precision provided for * green in DXT-compressed and uncompressed RGB 565 formats. Luminance-only and * luminance/alpha textures will also still work as expected. * * @type {?Texture} * @default null */ this.alphaMap = null; /** * Renders the geometry as a wireframe. * * @type {boolean} * @default false */ this.wireframe = false; /** * Controls the thickness of the wireframe. * * Can only be used with {@link SVGRenderer}. * * @type {number} * @default 1 */ this.wireframeLinewidth = 1; /** * Defines appearance of wireframe ends. * * Can only be used with {@link SVGRenderer}. * * @type {('round'|'bevel'|'miter')} * @default 'round' */ this.wireframeLinecap = 'round'; /** * Defines appearance of wireframe joints. * * Can only be used with {@link SVGRenderer}. * * @type {('round'|'bevel'|'miter')} * @default 'round' */ this.wireframeLinejoin = 'round'; /** * Whether the material is affected by fog or not. * * @type {boolean} * @default true */ this.fog = true; this.setValues( parameters ); } copy( source ) { super.copy( source ); this.color.copy( source.color ); this.map = source.map; this.gradientMap = source.gradientMap; this.lightMap = source.lightMap; this.lightMapIntensity = source.lightMapIntensity; this.aoMap = source.aoMap; this.aoMapIntensity = source.aoMapIntensity; this.emissive.copy( source.emissive ); this.emissiveMap = source.emissiveMap; this.emissiveIntensity = source.emissiveIntensity; this.bumpMap = source.bumpMap; this.bumpScale = source.bumpScale; this.normalMap = source.normalMap; this.normalMapType = source.normalMapType; this.normalScale.copy( source.normalScale ); this.displacementMap = source.displacementMap; this.displacementScale = source.displacementScale; this.displacementBias = source.displacementBias; this.alphaMap = source.alphaMap; this.wireframe = source.wireframe; this.wireframeLinewidth = source.wireframeLinewidth; this.wireframeLinecap = source.wireframeLinecap; this.wireframeLinejoin = source.wireframeLinejoin; this.fog = source.fog; return this; } } /** * A material that maps the normal vectors to RGB colors. * * @augments Material */ class MeshNormalMaterial extends Material { /** * Constructs a new mesh normal material. * * @param {Object} [parameters] - An object with one or more properties * defining the material's appearance. Any property of the material * (including any property from inherited materials) can be passed * in here. Color values can be passed any type of value accepted * by {@link Color#set}. */ constructor( parameters ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isMeshNormalMaterial = true; this.type = 'MeshNormalMaterial'; /** * The texture to create a bump map. The black and white values map to the * perceived depth in relation to the lights. Bump doesn't actually affect * the geometry of the object, only the lighting. If a normal map is defined * this will be ignored. * * @type {?Texture} * @default null */ this.bumpMap = null; /** * How much the bump map affects the material. Typical range is `[0,1]`. * * @type {number} * @default 1 */ this.bumpScale = 1; /** * The texture to create a normal map. The RGB values affect the surface * normal for each pixel fragment and change the way the color is lit. Normal * maps do not change the actual shape of the surface, only the lighting. In * case the material has a normal map authored using the left handed * convention, the `y` component of `normalScale` should be negated to compensate * for the different handedness. * * @type {?Texture} * @default null */ this.normalMap = null; /** * The type of normal map. * * @type {(TangentSpaceNormalMap|ObjectSpaceNormalMap)} * @default TangentSpaceNormalMap */ this.normalMapType = TangentSpaceNormalMap; /** * How much the normal map affects the material. Typical value range is `[0,1]`. * * @type {Vector2} * @default (1,1) */ this.normalScale = new Vector2( 1, 1 ); /** * The displacement map affects the position of the mesh's vertices. Unlike * other maps which only affect the light and shade of the material the * displaced vertices can cast shadows, block other objects, and otherwise * act as real geometry. The displacement texture is an image where the value * of each pixel (white being the highest) is mapped against, and * repositions, the vertices of the mesh. * * @type {?Texture} * @default null */ this.displacementMap = null; /** * How much the displacement map affects the mesh (where black is no * displacement, and white is maximum displacement). Without a displacement * map set, this value is not applied. * * @type {number} * @default 0 */ this.displacementScale = 1; /** * The offset of the displacement map's values on the mesh's vertices. * The bias is added to the scaled sample of the displacement map. * Without a displacement map set, this value is not applied. * * @type {number} * @default 0 */ this.displacementBias = 0; /** * Renders the geometry as a wireframe. * * @type {boolean} * @default false */ this.wireframe = false; /** * Controls the thickness of the wireframe. * * WebGL and WebGPU ignore this property and always render * 1 pixel wide lines. * * @type {number} * @default 1 */ this.wireframeLinewidth = 1; /** * Whether the material is rendered with flat shading or not. * * @type {boolean} * @default false */ this.flatShading = false; this.setValues( parameters ); } copy( source ) { super.copy( source ); this.bumpMap = source.bumpMap; this.bumpScale = source.bumpScale; this.normalMap = source.normalMap; this.normalMapType = source.normalMapType; this.normalScale.copy( source.normalScale ); this.displacementMap = source.displacementMap; this.displacementScale = source.displacementScale; this.displacementBias = source.displacementBias; this.wireframe = source.wireframe; this.wireframeLinewidth = source.wireframeLinewidth; this.flatShading = source.flatShading; return this; } } /** * A material for non-shiny surfaces, without specular highlights. * * The material uses a non-physically based [Lambertian]{@link https://en.wikipedia.org/wiki/Lambertian_reflectance} * model for calculating reflectance. This can simulate some surfaces (such * as untreated wood or stone) well, but cannot simulate shiny surfaces with * specular highlights (such as varnished wood). `MeshLambertMaterial` uses per-fragment * shading. * * Due to the simplicity of the reflectance and illumination models, * performance will be greater when using this material over the * {@link MeshPhongMaterial}, {@link MeshStandardMaterial} or * {@link MeshPhysicalMaterial}, at the cost of some graphical accuracy. * * @augments Material */ class MeshLambertMaterial extends Material { /** * Constructs a new mesh lambert material. * * @param {Object} [parameters] - An object with one or more properties * defining the material's appearance. Any property of the material * (including any property from inherited materials) can be passed * in here. Color values can be passed any type of value accepted * by {@link Color#set}. */ constructor( parameters ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isMeshLambertMaterial = true; this.type = 'MeshLambertMaterial'; /** * Color of the material. * * @type {Color} * @default (1,1,1) */ this.color = new Color( 0xffffff ); // diffuse /** * The color map. May optionally include an alpha channel, typically combined * with {@link Material#transparent} or {@link Material#alphaTest}. The texture map * color is modulated by the diffuse `color`. * * @type {?Texture} * @default null */ this.map = null; /** * The light map. Requires a second set of UVs. * * @type {?Texture} * @default null */ this.lightMap = null; /** * Intensity of the baked light. * * @type {number} * @default 1 */ this.lightMapIntensity = 1.0; /** * The red channel of this texture is used as the ambient occlusion map. * Requires a second set of UVs. * * @type {?Texture} * @default null */ this.aoMap = null; /** * Intensity of the ambient occlusion effect. Range is `[0,1]`, where `0` * disables ambient occlusion. Where intensity is `1` and the AO map's * red channel is also `1`, ambient light is fully occluded on a surface. * * @type {number} * @default 1 */ this.aoMapIntensity = 1.0; /** * Emissive (light) color of the material, essentially a solid color * unaffected by other lighting. * * @type {Color} * @default (0,0,0) */ this.emissive = new Color( 0x000000 ); /** * Intensity of the emissive light. Modulates the emissive color. * * @type {number} * @default 1 */ this.emissiveIntensity = 1.0; /** * Set emissive (glow) map. The emissive map color is modulated by the * emissive color and the emissive intensity. If you have an emissive map, * be sure to set the emissive color to something other than black. * * @type {?Texture} * @default null */ this.emissiveMap = null; /** * The texture to create a bump map. The black and white values map to the * perceived depth in relation to the lights. Bump doesn't actually affect * the geometry of the object, only the lighting. If a normal map is defined * this will be ignored. * * @type {?Texture} * @default null */ this.bumpMap = null; /** * How much the bump map affects the material. Typical range is `[0,1]`. * * @type {number} * @default 1 */ this.bumpScale = 1; /** * The texture to create a normal map. The RGB values affect the surface * normal for each pixel fragment and change the way the color is lit. Normal * maps do not change the actual shape of the surface, only the lighting. In * case the material has a normal map authored using the left handed * convention, the `y` component of `normalScale` should be negated to compensate * for the different handedness. * * @type {?Texture} * @default null */ this.normalMap = null; /** * The type of normal map. * * @type {(TangentSpaceNormalMap|ObjectSpaceNormalMap)} * @default TangentSpaceNormalMap */ this.normalMapType = TangentSpaceNormalMap; /** * How much the normal map affects the material. Typical value range is `[0,1]`. * * @type {Vector2} * @default (1,1) */ this.normalScale = new Vector2( 1, 1 ); /** * The displacement map affects the position of the mesh's vertices. Unlike * other maps which only affect the light and shade of the material the * displaced vertices can cast shadows, block other objects, and otherwise * act as real geometry. The displacement texture is an image where the value * of each pixel (white being the highest) is mapped against, and * repositions, the vertices of the mesh. * * @type {?Texture} * @default null */ this.displacementMap = null; /** * How much the displacement map affects the mesh (where black is no * displacement, and white is maximum displacement). Without a displacement * map set, this value is not applied. * * @type {number} * @default 0 */ this.displacementScale = 1; /** * The offset of the displacement map's values on the mesh's vertices. * The bias is added to the scaled sample of the displacement map. * Without a displacement map set, this value is not applied. * * @type {number} * @default 0 */ this.displacementBias = 0; /** * Specular map used by the material. * * @type {?Texture} * @default null */ this.specularMap = null; /** * The alpha map is a grayscale texture that controls the opacity across the * surface (black: fully transparent; white: fully opaque). * * Only the color of the texture is used, ignoring the alpha channel if one * exists. For RGB and RGBA textures, the renderer will use the green channel * when sampling this texture due to the extra bit of precision provided for * green in DXT-compressed and uncompressed RGB 565 formats. Luminance-only and * luminance/alpha textures will also still work as expected. * * @type {?Texture} * @default null */ this.alphaMap = null; /** * The environment map. * * @type {?Texture} * @default null */ this.envMap = null; /** * The rotation of the environment map in radians. * * @type {Euler} * @default (0,0,0) */ this.envMapRotation = new Euler(); /** * How to combine the result of the surface's color with the environment map, if any. * * When set to `MixOperation`, the {@link MeshBasicMaterial#reflectivity} is used to * blend between the two colors. * * @type {(MultiplyOperation|MixOperation|AddOperation)} * @default MultiplyOperation */ this.combine = MultiplyOperation; /** * How much the environment map affects the surface. * The valid range is between `0` (no reflections) and `1` (full reflections). * * @type {number} * @default 1 */ this.reflectivity = 1; /** * The index of refraction (IOR) of air (approximately 1) divided by the * index of refraction of the material. It is used with environment mapping * modes {@link CubeRefractionMapping} and {@link EquirectangularRefractionMapping}. * The refraction ratio should not exceed `1`. * * @type {number} * @default 0.98 */ this.refractionRatio = 0.98; /** * Renders the geometry as a wireframe. * * @type {boolean} * @default false */ this.wireframe = false; /** * Controls the thickness of the wireframe. * * Can only be used with {@link SVGRenderer}. * * @type {number} * @default 1 */ this.wireframeLinewidth = 1; /** * Defines appearance of wireframe ends. * * Can only be used with {@link SVGRenderer}. * * @type {('round'|'bevel'|'miter')} * @default 'round' */ this.wireframeLinecap = 'round'; /** * Defines appearance of wireframe joints. * * Can only be used with {@link SVGRenderer}. * * @type {('round'|'bevel'|'miter')} * @default 'round' */ this.wireframeLinejoin = 'round'; /** * Whether the material is rendered with flat shading or not. * * @type {boolean} * @default false */ this.flatShading = false; /** * Whether the material is affected by fog or not. * * @type {boolean} * @default true */ this.fog = true; this.setValues( parameters ); } copy( source ) { super.copy( source ); this.color.copy( source.color ); this.map = source.map; this.lightMap = source.lightMap; this.lightMapIntensity = source.lightMapIntensity; this.aoMap = source.aoMap; this.aoMapIntensity = source.aoMapIntensity; this.emissive.copy( source.emissive ); this.emissiveMap = source.emissiveMap; this.emissiveIntensity = source.emissiveIntensity; this.bumpMap = source.bumpMap; this.bumpScale = source.bumpScale; this.normalMap = source.normalMap; this.normalMapType = source.normalMapType; this.normalScale.copy( source.normalScale ); this.displacementMap = source.displacementMap; this.displacementScale = source.displacementScale; this.displacementBias = source.displacementBias; this.specularMap = source.specularMap; this.alphaMap = source.alphaMap; this.envMap = source.envMap; this.envMapRotation.copy( source.envMapRotation ); this.combine = source.combine; this.reflectivity = source.reflectivity; this.refractionRatio = source.refractionRatio; this.wireframe = source.wireframe; this.wireframeLinewidth = source.wireframeLinewidth; this.wireframeLinecap = source.wireframeLinecap; this.wireframeLinejoin = source.wireframeLinejoin; this.flatShading = source.flatShading; this.fog = source.fog; return this; } } /** * A material for drawing geometry by depth. Depth is based off of the camera * near and far plane. White is nearest, black is farthest. * * @augments Material */ class MeshDepthMaterial extends Material { /** * Constructs a new mesh depth material. * * @param {Object} [parameters] - An object with one or more properties * defining the material's appearance. Any property of the material * (including any property from inherited materials) can be passed * in here. Color values can be passed any type of value accepted * by {@link Color#set}. */ constructor( parameters ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isMeshDepthMaterial = true; this.type = 'MeshDepthMaterial'; /** * Type for depth packing. * * @type {(BasicDepthPacking|RGBADepthPacking|RGBDepthPacking|RGDepthPacking)} * @default BasicDepthPacking */ this.depthPacking = BasicDepthPacking; /** * The color map. May optionally include an alpha channel, typically combined * with {@link Material#transparent} or {@link Material#alphaTest}. * * @type {?Texture} * @default null */ this.map = null; /** * The alpha map is a grayscale texture that controls the opacity across the * surface (black: fully transparent; white: fully opaque). * * Only the color of the texture is used, ignoring the alpha channel if one * exists. For RGB and RGBA textures, the renderer will use the green channel * when sampling this texture due to the extra bit of precision provided for * green in DXT-compressed and uncompressed RGB 565 formats. Luminance-only and * luminance/alpha textures will also still work as expected. * * @type {?Texture} * @default null */ this.alphaMap = null; /** * The displacement map affects the position of the mesh's vertices. Unlike * other maps which only affect the light and shade of the material the * displaced vertices can cast shadows, block other objects, and otherwise * act as real geometry. The displacement texture is an image where the value * of each pixel (white being the highest) is mapped against, and * repositions, the vertices of the mesh. * * @type {?Texture} * @default null */ this.displacementMap = null; /** * How much the displacement map affects the mesh (where black is no * displacement, and white is maximum displacement). Without a displacement * map set, this value is not applied. * * @type {number} * @default 0 */ this.displacementScale = 1; /** * The offset of the displacement map's values on the mesh's vertices. * The bias is added to the scaled sample of the displacement map. * Without a displacement map set, this value is not applied. * * @type {number} * @default 0 */ this.displacementBias = 0; /** * Renders the geometry as a wireframe. * * @type {boolean} * @default false */ this.wireframe = false; /** * Controls the thickness of the wireframe. * * WebGL and WebGPU ignore this property and always render * 1 pixel wide lines. * * @type {number} * @default 1 */ this.wireframeLinewidth = 1; this.setValues( parameters ); } copy( source ) { super.copy( source ); this.depthPacking = source.depthPacking; this.map = source.map; this.alphaMap = source.alphaMap; this.displacementMap = source.displacementMap; this.displacementScale = source.displacementScale; this.displacementBias = source.displacementBias; this.wireframe = source.wireframe; this.wireframeLinewidth = source.wireframeLinewidth; return this; } } /** * A material used internally for implementing shadow mapping with * point lights. * * Can also be used to customize the shadow casting of an object by assigning * an instance of `MeshDistanceMaterial` to {@link Object3D#customDistanceMaterial}. * The following examples demonstrates this approach in order to ensure * transparent parts of objects do no cast shadows. * * @augments Material */ class MeshDistanceMaterial extends Material { /** * Constructs a new mesh distance material. * * @param {Object} [parameters] - An object with one or more properties * defining the material's appearance. Any property of the material * (including any property from inherited materials) can be passed * in here. Color values can be passed any type of value accepted * by {@link Color#set}. */ constructor( parameters ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isMeshDistanceMaterial = true; this.type = 'MeshDistanceMaterial'; /** * The color map. May optionally include an alpha channel, typically combined * with {@link Material#transparent} or {@link Material#alphaTest}. * * @type {?Texture} * @default null */ this.map = null; /** * The alpha map is a grayscale texture that controls the opacity across the * surface (black: fully transparent; white: fully opaque). * * Only the color of the texture is used, ignoring the alpha channel if one * exists. For RGB and RGBA textures, the renderer will use the green channel * when sampling this texture due to the extra bit of precision provided for * green in DXT-compressed and uncompressed RGB 565 formats. Luminance-only and * luminance/alpha textures will also still work as expected. * * @type {?Texture} * @default null */ this.alphaMap = null; /** * The displacement map affects the position of the mesh's vertices. Unlike * other maps which only affect the light and shade of the material the * displaced vertices can cast shadows, block other objects, and otherwise * act as real geometry. The displacement texture is an image where the value * of each pixel (white being the highest) is mapped against, and * repositions, the vertices of the mesh. * * @type {?Texture} * @default null */ this.displacementMap = null; /** * How much the displacement map affects the mesh (where black is no * displacement, and white is maximum displacement). Without a displacement * map set, this value is not applied. * * @type {number} * @default 0 */ this.displacementScale = 1; /** * The offset of the displacement map's values on the mesh's vertices. * The bias is added to the scaled sample of the displacement map. * Without a displacement map set, this value is not applied. * * @type {number} * @default 0 */ this.displacementBias = 0; this.setValues( parameters ); } copy( source ) { super.copy( source ); this.map = source.map; this.alphaMap = source.alphaMap; this.displacementMap = source.displacementMap; this.displacementScale = source.displacementScale; this.displacementBias = source.displacementBias; return this; } } /** * This material is defined by a MatCap (or Lit Sphere) texture, which encodes the * material color and shading. * * `MeshMatcapMaterial` does not respond to lights since the matcap image file encodes * baked lighting. It will cast a shadow onto an object that receives shadows * (and shadow clipping works), but it will not self-shadow or receive * shadows. * * @augments Material */ class MeshMatcapMaterial extends Material { /** * Constructs a new mesh matcap material. * * @param {Object} [parameters] - An object with one or more properties * defining the material's appearance. Any property of the material * (including any property from inherited materials) can be passed * in here. Color values can be passed any type of value accepted * by {@link Color#set}. */ constructor( parameters ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isMeshMatcapMaterial = true; this.defines = { 'MATCAP': '' }; this.type = 'MeshMatcapMaterial'; /** * Color of the material. * * @type {Color} * @default (1,1,1) */ this.color = new Color( 0xffffff ); // diffuse /** * The matcap map. * * @type {?Texture} * @default null */ this.matcap = null; /** * The color map. May optionally include an alpha channel, typically combined * with {@link Material#transparent} or {@link Material#alphaTest}. The texture map * color is modulated by the diffuse `color`. * * @type {?Texture} * @default null */ this.map = null; /** * The texture to create a bump map. The black and white values map to the * perceived depth in relation to the lights. Bump doesn't actually affect * the geometry of the object, only the lighting. If a normal map is defined * this will be ignored. * * @type {?Texture} * @default null */ this.bumpMap = null; /** * How much the bump map affects the material. Typical range is `[0,1]`. * * @type {number} * @default 1 */ this.bumpScale = 1; /** * The texture to create a normal map. The RGB values affect the surface * normal for each pixel fragment and change the way the color is lit. Normal * maps do not change the actual shape of the surface, only the lighting. In * case the material has a normal map authored using the left handed * convention, the `y` component of `normalScale` should be negated to compensate * for the different handedness. * * @type {?Texture} * @default null */ this.normalMap = null; /** * The type of normal map. * * @type {(TangentSpaceNormalMap|ObjectSpaceNormalMap)} * @default TangentSpaceNormalMap */ this.normalMapType = TangentSpaceNormalMap; /** * How much the normal map affects the material. Typical value range is `[0,1]`. * * @type {Vector2} * @default (1,1) */ this.normalScale = new Vector2( 1, 1 ); /** * The displacement map affects the position of the mesh's vertices. Unlike * other maps which only affect the light and shade of the material the * displaced vertices can cast shadows, block other objects, and otherwise * act as real geometry. The displacement texture is an image where the value * of each pixel (white being the highest) is mapped against, and * repositions, the vertices of the mesh. * * @type {?Texture} * @default null */ this.displacementMap = null; /** * How much the displacement map affects the mesh (where black is no * displacement, and white is maximum displacement). Without a displacement * map set, this value is not applied. * * @type {number} * @default 0 */ this.displacementScale = 1; /** * The offset of the displacement map's values on the mesh's vertices. * The bias is added to the scaled sample of the displacement map. * Without a displacement map set, this value is not applied. * * @type {number} * @default 0 */ this.displacementBias = 0; /** * The alpha map is a grayscale texture that controls the opacity across the * surface (black: fully transparent; white: fully opaque). * * Only the color of the texture is used, ignoring the alpha channel if one * exists. For RGB and RGBA textures, the renderer will use the green channel * when sampling this texture due to the extra bit of precision provided for * green in DXT-compressed and uncompressed RGB 565 formats. Luminance-only and * luminance/alpha textures will also still work as expected. * * @type {?Texture} * @default null */ this.alphaMap = null; /** * Whether the material is rendered with flat shading or not. * * @type {boolean} * @default false */ this.flatShading = false; /** * Whether the material is affected by fog or not. * * @type {boolean} * @default true */ this.fog = true; this.setValues( parameters ); } copy( source ) { super.copy( source ); this.defines = { 'MATCAP': '' }; this.color.copy( source.color ); this.matcap = source.matcap; this.map = source.map; this.bumpMap = source.bumpMap; this.bumpScale = source.bumpScale; this.normalMap = source.normalMap; this.normalMapType = source.normalMapType; this.normalScale.copy( source.normalScale ); this.displacementMap = source.displacementMap; this.displacementScale = source.displacementScale; this.displacementBias = source.displacementBias; this.alphaMap = source.alphaMap; this.flatShading = source.flatShading; this.fog = source.fog; return this; } } /** * A material for rendering line primitives. * * Materials define the appearance of renderable 3D objects. * * ```js * const material = new THREE.LineDashedMaterial( { * color: 0xffffff, * scale: 1, * dashSize: 3, * gapSize: 1, * } ); * ``` * * @augments LineBasicMaterial */ class LineDashedMaterial extends LineBasicMaterial { /** * Constructs a new line dashed material. * * @param {Object} [parameters] - An object with one or more properties * defining the material's appearance. Any property of the material * (including any property from inherited materials) can be passed * in here. Color values can be passed any type of value accepted * by {@link Color#set}. */ constructor( parameters ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isLineDashedMaterial = true; this.type = 'LineDashedMaterial'; /** * The scale of the dashed part of a line. * * @type {number} * @default 1 */ this.scale = 1; /** * The size of the dash. This is both the gap with the stroke. * * @type {number} * @default 3 */ this.dashSize = 3; /** * The size of the gap. * * @type {number} * @default 1 */ this.gapSize = 1; this.setValues( parameters ); } copy( source ) { super.copy( source ); this.scale = source.scale; this.dashSize = source.dashSize; this.gapSize = source.gapSize; return this; } } /** * Converts an array to a specific type. * * @param {TypedArray|Array} array - The array to convert. * @param {TypedArray.constructor} type - The constructor of a typed array that defines the new type. * @return {TypedArray} The converted array. */ function convertArray( array, type ) { if ( ! array || array.constructor === type ) return array; if ( typeof type.BYTES_PER_ELEMENT === 'number' ) { return new type( array ); // create typed array } return Array.prototype.slice.call( array ); // create Array } /** * Returns `true` if the given object is a typed array. * * @param {any} object - The object to check. * @return {boolean} Whether the given object is a typed array. */ function isTypedArray( object ) { return ArrayBuffer.isView( object ) && ! ( object instanceof DataView ); } /** * Returns an array by which times and values can be sorted. * * @param {Array} times - The keyframe time values. * @return {Array} The array. */ function getKeyframeOrder( times ) { function compareTime( i, j ) { return times[ i ] - times[ j ]; } const n = times.length; const result = new Array( n ); for ( let i = 0; i !== n; ++ i ) result[ i ] = i; result.sort( compareTime ); return result; } /** * Sorts the given array by the previously computed order via `getKeyframeOrder()`. * * @param {Array} values - The values to sort. * @param {number} stride - The stride. * @param {Array} order - The sort order. * @return {Array} The sorted values. */ function sortedArray( values, stride, order ) { const nValues = values.length; const result = new values.constructor( nValues ); for ( let i = 0, dstOffset = 0; dstOffset !== nValues; ++ i ) { const srcOffset = order[ i ] * stride; for ( let j = 0; j !== stride; ++ j ) { result[ dstOffset ++ ] = values[ srcOffset + j ]; } } return result; } /** * Used for parsing AOS keyframe formats. * * @param {Array} jsonKeys - A list of JSON keyframes. * @param {Array} times - This array will be filled with keyframe times by this function. * @param {Array} values - This array will be filled with keyframe values by this function. * @param {string} valuePropertyName - The name of the property to use. */ function flattenJSON( jsonKeys, times, values, valuePropertyName ) { let i = 1, key = jsonKeys[ 0 ]; while ( key !== undefined && key[ valuePropertyName ] === undefined ) { key = jsonKeys[ i ++ ]; } if ( key === undefined ) return; // no data let value = key[ valuePropertyName ]; if ( value === undefined ) return; // no data if ( Array.isArray( value ) ) { do { value = key[ valuePropertyName ]; if ( value !== undefined ) { times.push( key.time ); values.push( ...value ); // push all elements } key = jsonKeys[ i ++ ]; } while ( key !== undefined ); } else if ( value.toArray !== undefined ) { // ...assume THREE.Math-ish do { value = key[ valuePropertyName ]; if ( value !== undefined ) { times.push( key.time ); value.toArray( values, values.length ); } key = jsonKeys[ i ++ ]; } while ( key !== undefined ); } else { // otherwise push as-is do { value = key[ valuePropertyName ]; if ( value !== undefined ) { times.push( key.time ); values.push( value ); } key = jsonKeys[ i ++ ]; } while ( key !== undefined ); } } /** * Creates a new clip, containing only the segment of the original clip between the given frames. * * @param {AnimationClip} sourceClip - The values to sort. * @param {string} name - The name of the clip. * @param {number} startFrame - The start frame. * @param {number} endFrame - The end frame. * @param {number} [fps=30] - The FPS. * @return {AnimationClip} The new sub clip. */ function subclip( sourceClip, name, startFrame, endFrame, fps = 30 ) { const clip = sourceClip.clone(); clip.name = name; const tracks = []; for ( let i = 0; i < clip.tracks.length; ++ i ) { const track = clip.tracks[ i ]; const valueSize = track.getValueSize(); const times = []; const values = []; for ( let j = 0; j < track.times.length; ++ j ) { const frame = track.times[ j ] * fps; if ( frame < startFrame || frame >= endFrame ) continue; times.push( track.times[ j ] ); for ( let k = 0; k < valueSize; ++ k ) { values.push( track.values[ j * valueSize + k ] ); } } if ( times.length === 0 ) continue; track.times = convertArray( times, track.times.constructor ); track.values = convertArray( values, track.values.constructor ); tracks.push( track ); } clip.tracks = tracks; // find minimum .times value across all tracks in the trimmed clip let minStartTime = Infinity; for ( let i = 0; i < clip.tracks.length; ++ i ) { if ( minStartTime > clip.tracks[ i ].times[ 0 ] ) { minStartTime = clip.tracks[ i ].times[ 0 ]; } } // shift all tracks such that clip begins at t=0 for ( let i = 0; i < clip.tracks.length; ++ i ) { clip.tracks[ i ].shift( -1 * minStartTime ); } clip.resetDuration(); return clip; } /** * Converts the keyframes of the given animation clip to an additive format. * * @param {AnimationClip} targetClip - The clip to make additive. * @param {number} [referenceFrame=0] - The reference frame. * @param {AnimationClip} [referenceClip=targetClip] - The reference clip. * @param {number} [fps=30] - The FPS. * @return {AnimationClip} The updated clip which is now additive. */ function makeClipAdditive( targetClip, referenceFrame = 0, referenceClip = targetClip, fps = 30 ) { if ( fps <= 0 ) fps = 30; const numTracks = referenceClip.tracks.length; const referenceTime = referenceFrame / fps; // Make each track's values relative to the values at the reference frame for ( let i = 0; i < numTracks; ++ i ) { const referenceTrack = referenceClip.tracks[ i ]; const referenceTrackType = referenceTrack.ValueTypeName; // Skip this track if it's non-numeric if ( referenceTrackType === 'bool' || referenceTrackType === 'string' ) continue; // Find the track in the target clip whose name and type matches the reference track const targetTrack = targetClip.tracks.find( function ( track ) { return track.name === referenceTrack.name && track.ValueTypeName === referenceTrackType; } ); if ( targetTrack === undefined ) continue; let referenceOffset = 0; const referenceValueSize = referenceTrack.getValueSize(); if ( referenceTrack.createInterpolant.isInterpolantFactoryMethodGLTFCubicSpline ) { referenceOffset = referenceValueSize / 3; } let targetOffset = 0; const targetValueSize = targetTrack.getValueSize(); if ( targetTrack.createInterpolant.isInterpolantFactoryMethodGLTFCubicSpline ) { targetOffset = targetValueSize / 3; } const lastIndex = referenceTrack.times.length - 1; let referenceValue; // Find the value to subtract out of the track if ( referenceTime <= referenceTrack.times[ 0 ] ) { // Reference frame is earlier than the first keyframe, so just use the first keyframe const startIndex = referenceOffset; const endIndex = referenceValueSize - referenceOffset; referenceValue = referenceTrack.values.slice( startIndex, endIndex ); } else if ( referenceTime >= referenceTrack.times[ lastIndex ] ) { // Reference frame is after the last keyframe, so just use the last keyframe const startIndex = lastIndex * referenceValueSize + referenceOffset; const endIndex = startIndex + referenceValueSize - referenceOffset; referenceValue = referenceTrack.values.slice( startIndex, endIndex ); } else { // Interpolate to the reference value const interpolant = referenceTrack.createInterpolant(); const startIndex = referenceOffset; const endIndex = referenceValueSize - referenceOffset; interpolant.evaluate( referenceTime ); referenceValue = interpolant.resultBuffer.slice( startIndex, endIndex ); } // Conjugate the quaternion if ( referenceTrackType === 'quaternion' ) { const referenceQuat = new Quaternion().fromArray( referenceValue ).normalize().conjugate(); referenceQuat.toArray( referenceValue ); } // Subtract the reference value from all of the track values const numTimes = targetTrack.times.length; for ( let j = 0; j < numTimes; ++ j ) { const valueStart = j * targetValueSize + targetOffset; if ( referenceTrackType === 'quaternion' ) { // Multiply the conjugate for quaternion track types Quaternion.multiplyQuaternionsFlat( targetTrack.values, valueStart, referenceValue, 0, targetTrack.values, valueStart ); } else { const valueEnd = targetValueSize - targetOffset * 2; // Subtract each value for all other numeric track types for ( let k = 0; k < valueEnd; ++ k ) { targetTrack.values[ valueStart + k ] -= referenceValue[ k ]; } } } } targetClip.blendMode = AdditiveAnimationBlendMode; return targetClip; } /** * A class with various methods to assist with animations. * * @hideconstructor */ class AnimationUtils { /** * Converts an array to a specific type * * @static * @param {TypedArray|Array} array - The array to convert. * @param {TypedArray.constructor} type - The constructor of a type array. * @return {TypedArray} The converted array */ static convertArray( array, type ) { return convertArray( array, type ); } /** * Returns `true` if the given object is a typed array. * * @static * @param {any} object - The object to check. * @return {boolean} Whether the given object is a typed array. */ static isTypedArray( object ) { return isTypedArray( object ); } /** * Returns an array by which times and values can be sorted. * * @static * @param {Array} times - The keyframe time values. * @return {Array} The array. */ static getKeyframeOrder( times ) { return getKeyframeOrder( times ); } /** * Sorts the given array by the previously computed order via `getKeyframeOrder()`. * * @static * @param {Array} values - The values to sort. * @param {number} stride - The stride. * @param {Array} order - The sort order. * @return {Array} The sorted values. */ static sortedArray( values, stride, order ) { return sortedArray( values, stride, order ); } /** * Used for parsing AOS keyframe formats. * * @static * @param {Array} jsonKeys - A list of JSON keyframes. * @param {Array} times - This array will be filled with keyframe times by this method. * @param {Array} values - This array will be filled with keyframe values by this method. * @param {string} valuePropertyName - The name of the property to use. */ static flattenJSON( jsonKeys, times, values, valuePropertyName ) { flattenJSON( jsonKeys, times, values, valuePropertyName ); } /** * Creates a new clip, containing only the segment of the original clip between the given frames. * * @static * @param {AnimationClip} sourceClip - The values to sort. * @param {string} name - The name of the clip. * @param {number} startFrame - The start frame. * @param {number} endFrame - The end frame. * @param {number} [fps=30] - The FPS. * @return {AnimationClip} The new sub clip. */ static subclip( sourceClip, name, startFrame, endFrame, fps = 30 ) { return subclip( sourceClip, name, startFrame, endFrame, fps ); } /** * Converts the keyframes of the given animation clip to an additive format. * * @static * @param {AnimationClip} targetClip - The clip to make additive. * @param {number} [referenceFrame=0] - The reference frame. * @param {AnimationClip} [referenceClip=targetClip] - The reference clip. * @param {number} [fps=30] - The FPS. * @return {AnimationClip} The updated clip which is now additive. */ static makeClipAdditive( targetClip, referenceFrame = 0, referenceClip = targetClip, fps = 30 ) { return makeClipAdditive( targetClip, referenceFrame, referenceClip, fps ); } } /** * Abstract base class of interpolants over parametric samples. * * The parameter domain is one dimensional, typically the time or a path * along a curve defined by the data. * * The sample values can have any dimensionality and derived classes may * apply special interpretations to the data. * * This class provides the interval seek in a Template Method, deferring * the actual interpolation to derived classes. * * Time complexity is O(1) for linear access crossing at most two points * and O(log N) for random access, where N is the number of positions. * * References: {@link http://www.oodesign.com/template-method-pattern.html} * * @abstract */ class Interpolant { /** * Constructs a new interpolant. * * @param {TypedArray} parameterPositions - The parameter positions hold the interpolation factors. * @param {TypedArray} sampleValues - The sample values. * @param {number} sampleSize - The sample size * @param {TypedArray} [resultBuffer] - The result buffer. */ constructor( parameterPositions, sampleValues, sampleSize, resultBuffer ) { /** * The parameter positions. * * @type {TypedArray} */ this.parameterPositions = parameterPositions; /** * A cache index. * * @private * @type {number} * @default 0 */ this._cachedIndex = 0; /** * The result buffer. * * @type {TypedArray} */ this.resultBuffer = resultBuffer !== undefined ? resultBuffer : new sampleValues.constructor( sampleSize ); /** * The sample values. * * @type {TypedArray} */ this.sampleValues = sampleValues; /** * The value size. * * @type {TypedArray} */ this.valueSize = sampleSize; /** * The interpolation settings. * * @type {?Object} * @default null */ this.settings = null; /** * The default settings object. * * @type {Object} */ this.DefaultSettings_ = {}; } /** * Evaluate the interpolant at position `t`. * * @param {number} t - The interpolation factor. * @return {TypedArray} The result buffer. */ evaluate( t ) { const pp = this.parameterPositions; let i1 = this._cachedIndex, t1 = pp[ i1 ], t0 = pp[ i1 - 1 ]; validate_interval: { seek: { let right; linear_scan: { //- See http://jsperf.com/comparison-to-undefined/3 //- slower code: //- //- if ( t >= t1 || t1 === undefined ) { forward_scan: if ( ! ( t < t1 ) ) { for ( let giveUpAt = i1 + 2; ; ) { if ( t1 === undefined ) { if ( t < t0 ) break forward_scan; // after end i1 = pp.length; this._cachedIndex = i1; return this.copySampleValue_( i1 - 1 ); } if ( i1 === giveUpAt ) break; // this loop t0 = t1; t1 = pp[ ++ i1 ]; if ( t < t1 ) { // we have arrived at the sought interval break seek; } } // prepare binary search on the right side of the index right = pp.length; break linear_scan; } //- slower code: //- if ( t < t0 || t0 === undefined ) { if ( ! ( t >= t0 ) ) { // looping? const t1global = pp[ 1 ]; if ( t < t1global ) { i1 = 2; // + 1, using the scan for the details t0 = t1global; } // linear reverse scan for ( let giveUpAt = i1 - 2; ; ) { if ( t0 === undefined ) { // before start this._cachedIndex = 0; return this.copySampleValue_( 0 ); } if ( i1 === giveUpAt ) break; // this loop t1 = t0; t0 = pp[ -- i1 - 1 ]; if ( t >= t0 ) { // we have arrived at the sought interval break seek; } } // prepare binary search on the left side of the index right = i1; i1 = 0; break linear_scan; } // the interval is valid break validate_interval; } // linear scan // binary search while ( i1 < right ) { const mid = ( i1 + right ) >>> 1; if ( t < pp[ mid ] ) { right = mid; } else { i1 = mid + 1; } } t1 = pp[ i1 ]; t0 = pp[ i1 - 1 ]; // check boundary cases, again if ( t0 === undefined ) { this._cachedIndex = 0; return this.copySampleValue_( 0 ); } if ( t1 === undefined ) { i1 = pp.length; this._cachedIndex = i1; return this.copySampleValue_( i1 - 1 ); } } // seek this._cachedIndex = i1; this.intervalChanged_( i1, t0, t1 ); } // validate_interval return this.interpolate_( i1, t0, t, t1 ); } /** * Returns the interpolation settings. * * @return {Object} The interpolation settings. */ getSettings_() { return this.settings || this.DefaultSettings_; } /** * Copies a sample value to the result buffer. * * @param {number} index - An index into the sample value buffer. * @return {TypedArray} The result buffer. */ copySampleValue_( index ) { // copies a sample value to the result buffer const result = this.resultBuffer, values = this.sampleValues, stride = this.valueSize, offset = index * stride; for ( let i = 0; i !== stride; ++ i ) { result[ i ] = values[ offset + i ]; } return result; } /** * Copies a sample value to the result buffer. * * @abstract * @param {number} i1 - An index into the sample value buffer. * @param {number} t0 - The previous interpolation factor. * @param {number} t - The current interpolation factor. * @param {number} t1 - The next interpolation factor. * @return {TypedArray} The result buffer. */ interpolate_( /* i1, t0, t, t1 */ ) { throw new Error( 'call to abstract method' ); // implementations shall return this.resultBuffer } /** * Optional method that is executed when the interval has changed. * * @param {number} i1 - An index into the sample value buffer. * @param {number} t0 - The previous interpolation factor. * @param {number} t - The current interpolation factor. */ intervalChanged_( /* i1, t0, t1 */ ) { // empty } } /** * Fast and simple cubic spline interpolant. * * It was derived from a Hermitian construction setting the first derivative * at each sample position to the linear slope between neighboring positions * over their parameter interval. * * @augments Interpolant */ class CubicInterpolant extends Interpolant { /** * Constructs a new cubic interpolant. * * @param {TypedArray} parameterPositions - The parameter positions hold the interpolation factors. * @param {TypedArray} sampleValues - The sample values. * @param {number} sampleSize - The sample size * @param {TypedArray} [resultBuffer] - The result buffer. */ constructor( parameterPositions, sampleValues, sampleSize, resultBuffer ) { super( parameterPositions, sampleValues, sampleSize, resultBuffer ); this._weightPrev = -0; this._offsetPrev = -0; this._weightNext = -0; this._offsetNext = -0; this.DefaultSettings_ = { endingStart: ZeroCurvatureEnding, endingEnd: ZeroCurvatureEnding }; } intervalChanged_( i1, t0, t1 ) { const pp = this.parameterPositions; let iPrev = i1 - 2, iNext = i1 + 1, tPrev = pp[ iPrev ], tNext = pp[ iNext ]; if ( tPrev === undefined ) { switch ( this.getSettings_().endingStart ) { case ZeroSlopeEnding: // f'(t0) = 0 iPrev = i1; tPrev = 2 * t0 - t1; break; case WrapAroundEnding: // use the other end of the curve iPrev = pp.length - 2; tPrev = t0 + pp[ iPrev ] - pp[ iPrev + 1 ]; break; default: // ZeroCurvatureEnding // f''(t0) = 0 a.k.a. Natural Spline iPrev = i1; tPrev = t1; } } if ( tNext === undefined ) { switch ( this.getSettings_().endingEnd ) { case ZeroSlopeEnding: // f'(tN) = 0 iNext = i1; tNext = 2 * t1 - t0; break; case WrapAroundEnding: // use the other end of the curve iNext = 1; tNext = t1 + pp[ 1 ] - pp[ 0 ]; break; default: // ZeroCurvatureEnding // f''(tN) = 0, a.k.a. Natural Spline iNext = i1 - 1; tNext = t0; } } const halfDt = ( t1 - t0 ) * 0.5, stride = this.valueSize; this._weightPrev = halfDt / ( t0 - tPrev ); this._weightNext = halfDt / ( tNext - t1 ); this._offsetPrev = iPrev * stride; this._offsetNext = iNext * stride; } interpolate_( i1, t0, t, t1 ) { const result = this.resultBuffer, values = this.sampleValues, stride = this.valueSize, o1 = i1 * stride, o0 = o1 - stride, oP = this._offsetPrev, oN = this._offsetNext, wP = this._weightPrev, wN = this._weightNext, p = ( t - t0 ) / ( t1 - t0 ), pp = p * p, ppp = pp * p; // evaluate polynomials const sP = - wP * ppp + 2 * wP * pp - wP * p; const s0 = ( 1 + wP ) * ppp + ( -1.5 - 2 * wP ) * pp + ( -0.5 + wP ) * p + 1; const s1 = ( -1 - wN ) * ppp + ( 1.5 + wN ) * pp + 0.5 * p; const sN = wN * ppp - wN * pp; // combine data linearly for ( let i = 0; i !== stride; ++ i ) { result[ i ] = sP * values[ oP + i ] + s0 * values[ o0 + i ] + s1 * values[ o1 + i ] + sN * values[ oN + i ]; } return result; } } /** * A basic linear interpolant. * * @augments Interpolant */ class LinearInterpolant extends Interpolant { /** * Constructs a new linear interpolant. * * @param {TypedArray} parameterPositions - The parameter positions hold the interpolation factors. * @param {TypedArray} sampleValues - The sample values. * @param {number} sampleSize - The sample size * @param {TypedArray} [resultBuffer] - The result buffer. */ constructor( parameterPositions, sampleValues, sampleSize, resultBuffer ) { super( parameterPositions, sampleValues, sampleSize, resultBuffer ); } interpolate_( i1, t0, t, t1 ) { const result = this.resultBuffer, values = this.sampleValues, stride = this.valueSize, offset1 = i1 * stride, offset0 = offset1 - stride, weight1 = ( t - t0 ) / ( t1 - t0 ), weight0 = 1 - weight1; for ( let i = 0; i !== stride; ++ i ) { result[ i ] = values[ offset0 + i ] * weight0 + values[ offset1 + i ] * weight1; } return result; } } /** * Interpolant that evaluates to the sample value at the position preceding * the parameter. * * @augments Interpolant */ class DiscreteInterpolant extends Interpolant { /** * Constructs a new discrete interpolant. * * @param {TypedArray} parameterPositions - The parameter positions hold the interpolation factors. * @param {TypedArray} sampleValues - The sample values. * @param {number} sampleSize - The sample size * @param {TypedArray} [resultBuffer] - The result buffer. */ constructor( parameterPositions, sampleValues, sampleSize, resultBuffer ) { super( parameterPositions, sampleValues, sampleSize, resultBuffer ); } interpolate_( i1 /*, t0, t, t1 */ ) { return this.copySampleValue_( i1 - 1 ); } } /** * Represents s a timed sequence of keyframes, which are composed of lists of * times and related values, and which are used to animate a specific property * of an object. */ class KeyframeTrack { /** * Constructs a new keyframe track. * * @param {string} name - The keyframe track's name. * @param {Array} times - A list of keyframe times. * @param {Array} values - A list of keyframe values. * @param {(InterpolateLinear|InterpolateDiscrete|InterpolateSmooth)} [interpolation] - The interpolation type. */ constructor( name, times, values, interpolation ) { if ( name === undefined ) throw new Error( 'THREE.KeyframeTrack: track name is undefined' ); if ( times === undefined || times.length === 0 ) throw new Error( 'THREE.KeyframeTrack: no keyframes in track named ' + name ); /** * The track's name can refer to morph targets or bones or * possibly other values within an animated object. See {@link PropertyBinding#parseTrackName} * for the forms of strings that can be parsed for property binding. * * @type {string} */ this.name = name; /** * The keyframe times. * * @type {Float32Array} */ this.times = convertArray( times, this.TimeBufferType ); /** * The keyframe values. * * @type {Float32Array} */ this.values = convertArray( values, this.ValueBufferType ); this.setInterpolation( interpolation || this.DefaultInterpolation ); } /** * Converts the keyframe track to JSON. * * @static * @param {KeyframeTrack} track - The keyframe track to serialize. * @return {Object} The serialized keyframe track as JSON. */ static toJSON( track ) { const trackType = track.constructor; let json; // derived classes can define a static toJSON method if ( trackType.toJSON !== this.toJSON ) { json = trackType.toJSON( track ); } else { // by default, we assume the data can be serialized as-is json = { 'name': track.name, 'times': convertArray( track.times, Array ), 'values': convertArray( track.values, Array ) }; const interpolation = track.getInterpolation(); if ( interpolation !== track.DefaultInterpolation ) { json.interpolation = interpolation; } } json.type = track.ValueTypeName; // mandatory return json; } /** * Factory method for creating a new discrete interpolant. * * @static * @param {TypedArray} [result] - The result buffer. * @return {DiscreteInterpolant} The new interpolant. */ InterpolantFactoryMethodDiscrete( result ) { return new DiscreteInterpolant( this.times, this.values, this.getValueSize(), result ); } /** * Factory method for creating a new linear interpolant. * * @static * @param {TypedArray} [result] - The result buffer. * @return {LinearInterpolant} The new interpolant. */ InterpolantFactoryMethodLinear( result ) { return new LinearInterpolant( this.times, this.values, this.getValueSize(), result ); } /** * Factory method for creating a new smooth interpolant. * * @static * @param {TypedArray} [result] - The result buffer. * @return {CubicInterpolant} The new interpolant. */ InterpolantFactoryMethodSmooth( result ) { return new CubicInterpolant( this.times, this.values, this.getValueSize(), result ); } /** * Defines the interpolation factor method for this keyframe track. * * @param {(InterpolateLinear|InterpolateDiscrete|InterpolateSmooth)} interpolation - The interpolation type. * @return {KeyframeTrack} A reference to this keyframe track. */ setInterpolation( interpolation ) { let factoryMethod; switch ( interpolation ) { case InterpolateDiscrete: factoryMethod = this.InterpolantFactoryMethodDiscrete; break; case InterpolateLinear: factoryMethod = this.InterpolantFactoryMethodLinear; break; case InterpolateSmooth: factoryMethod = this.InterpolantFactoryMethodSmooth; break; } if ( factoryMethod === undefined ) { const message = 'unsupported interpolation for ' + this.ValueTypeName + ' keyframe track named ' + this.name; if ( this.createInterpolant === undefined ) { // fall back to default, unless the default itself is messed up if ( interpolation !== this.DefaultInterpolation ) { this.setInterpolation( this.DefaultInterpolation ); } else { throw new Error( message ); // fatal, in this case } } console.warn( 'THREE.KeyframeTrack:', message ); return this; } this.createInterpolant = factoryMethod; return this; } /** * Returns the current interpolation type. * * @return {(InterpolateLinear|InterpolateDiscrete|InterpolateSmooth)} The interpolation type. */ getInterpolation() { switch ( this.createInterpolant ) { case this.InterpolantFactoryMethodDiscrete: return InterpolateDiscrete; case this.InterpolantFactoryMethodLinear: return InterpolateLinear; case this.InterpolantFactoryMethodSmooth: return InterpolateSmooth; } } /** * Returns the value size. * * @return {number} The value size. */ getValueSize() { return this.values.length / this.times.length; } /** * Moves all keyframes either forward or backward in time. * * @param {number} timeOffset - The offset to move the time values. * @return {KeyframeTrack} A reference to this keyframe track. */ shift( timeOffset ) { if ( timeOffset !== 0.0 ) { const times = this.times; for ( let i = 0, n = times.length; i !== n; ++ i ) { times[ i ] += timeOffset; } } return this; } /** * Scale all keyframe times by a factor (useful for frame - seconds conversions). * * @param {number} timeScale - The time scale. * @return {KeyframeTrack} A reference to this keyframe track. */ scale( timeScale ) { if ( timeScale !== 1.0 ) { const times = this.times; for ( let i = 0, n = times.length; i !== n; ++ i ) { times[ i ] *= timeScale; } } return this; } /** * Removes keyframes before and after animation without changing any values within the defined time range. * * Note: The method does not shift around keys to the start of the track time, because for interpolated * keys this will change their values * * @param {number} startTime - The start time. * @param {number} endTime - The end time. * @return {KeyframeTrack} A reference to this keyframe track. */ trim( startTime, endTime ) { const times = this.times, nKeys = times.length; let from = 0, to = nKeys - 1; while ( from !== nKeys && times[ from ] < startTime ) { ++ from; } while ( to !== -1 && times[ to ] > endTime ) { -- to; } ++ to; // inclusive -> exclusive bound if ( from !== 0 || to !== nKeys ) { // empty tracks are forbidden, so keep at least one keyframe if ( from >= to ) { to = Math.max( to, 1 ); from = to - 1; } const stride = this.getValueSize(); this.times = times.slice( from, to ); this.values = this.values.slice( from * stride, to * stride ); } return this; } /** * Performs minimal validation on the keyframe track. Returns `true` if the values * are valid. * * @return {boolean} Whether the keyframes are valid or not. */ validate() { let valid = true; const valueSize = this.getValueSize(); if ( valueSize - Math.floor( valueSize ) !== 0 ) { console.error( 'THREE.KeyframeTrack: Invalid value size in track.', this ); valid = false; } const times = this.times, values = this.values, nKeys = times.length; if ( nKeys === 0 ) { console.error( 'THREE.KeyframeTrack: Track is empty.', this ); valid = false; } let prevTime = null; for ( let i = 0; i !== nKeys; i ++ ) { const currTime = times[ i ]; if ( typeof currTime === 'number' && isNaN( currTime ) ) { console.error( 'THREE.KeyframeTrack: Time is not a valid number.', this, i, currTime ); valid = false; break; } if ( prevTime !== null && prevTime > currTime ) { console.error( 'THREE.KeyframeTrack: Out of order keys.', this, i, currTime, prevTime ); valid = false; break; } prevTime = currTime; } if ( values !== undefined ) { if ( isTypedArray( values ) ) { for ( let i = 0, n = values.length; i !== n; ++ i ) { const value = values[ i ]; if ( isNaN( value ) ) { console.error( 'THREE.KeyframeTrack: Value is not a valid number.', this, i, value ); valid = false; break; } } } } return valid; } /** * Optimizes this keyframe track by removing equivalent sequential keys (which are * common in morph target sequences). * * @return {AnimationClip} A reference to this animation clip. */ optimize() { // (0,0,0,0,1,1,1,0,0,0,0,0,0,0) --> (0,0,1,1,0,0) // times or values may be shared with other tracks, so overwriting is unsafe const times = this.times.slice(), values = this.values.slice(), stride = this.getValueSize(), smoothInterpolation = this.getInterpolation() === InterpolateSmooth, lastIndex = times.length - 1; let writeIndex = 1; for ( let i = 1; i < lastIndex; ++ i ) { let keep = false; const time = times[ i ]; const timeNext = times[ i + 1 ]; // remove adjacent keyframes scheduled at the same time if ( time !== timeNext && ( i !== 1 || time !== times[ 0 ] ) ) { if ( ! smoothInterpolation ) { // remove unnecessary keyframes same as their neighbors const offset = i * stride, offsetP = offset - stride, offsetN = offset + stride; for ( let j = 0; j !== stride; ++ j ) { const value = values[ offset + j ]; if ( value !== values[ offsetP + j ] || value !== values[ offsetN + j ] ) { keep = true; break; } } } else { keep = true; } } // in-place compaction if ( keep ) { if ( i !== writeIndex ) { times[ writeIndex ] = times[ i ]; const readOffset = i * stride, writeOffset = writeIndex * stride; for ( let j = 0; j !== stride; ++ j ) { values[ writeOffset + j ] = values[ readOffset + j ]; } } ++ writeIndex; } } // flush last keyframe (compaction looks ahead) if ( lastIndex > 0 ) { times[ writeIndex ] = times[ lastIndex ]; for ( let readOffset = lastIndex * stride, writeOffset = writeIndex * stride, j = 0; j !== stride; ++ j ) { values[ writeOffset + j ] = values[ readOffset + j ]; } ++ writeIndex; } if ( writeIndex !== times.length ) { this.times = times.slice( 0, writeIndex ); this.values = values.slice( 0, writeIndex * stride ); } else { this.times = times; this.values = values; } return this; } /** * Returns a new keyframe track with copied values from this instance. * * @return {KeyframeTrack} A clone of this instance. */ clone() { const times = this.times.slice(); const values = this.values.slice(); const TypedKeyframeTrack = this.constructor; const track = new TypedKeyframeTrack( this.name, times, values ); // Interpolant argument to constructor is not saved, so copy the factory method directly. track.createInterpolant = this.createInterpolant; return track; } } /** * The value type name. * * @type {String} * @default '' */ KeyframeTrack.prototype.ValueTypeName = ''; /** * The time buffer type of this keyframe track. * * @type {TypedArray|Array} * @default Float32Array.constructor */ KeyframeTrack.prototype.TimeBufferType = Float32Array; /** * The value buffer type of this keyframe track. * * @type {TypedArray|Array} * @default Float32Array.constructor */ KeyframeTrack.prototype.ValueBufferType = Float32Array; /** * The default interpolation type of this keyframe track. * * @type {(InterpolateLinear|InterpolateDiscrete|InterpolateSmooth)} * @default InterpolateLinear */ KeyframeTrack.prototype.DefaultInterpolation = InterpolateLinear; /** * A track for boolean keyframe values. * * @augments KeyframeTrack */ class BooleanKeyframeTrack extends KeyframeTrack { /** * Constructs a new boolean keyframe track. * * This keyframe track type has no `interpolation` parameter because the * interpolation is always discrete. * * @param {string} name - The keyframe track's name. * @param {Array} times - A list of keyframe times. * @param {Array} values - A list of keyframe values. */ constructor( name, times, values ) { super( name, times, values ); } } /** * The value type name. * * @type {String} * @default 'bool' */ BooleanKeyframeTrack.prototype.ValueTypeName = 'bool'; /** * The value buffer type of this keyframe track. * * @type {TypedArray|Array} * @default Array.constructor */ BooleanKeyframeTrack.prototype.ValueBufferType = Array; /** * The default interpolation type of this keyframe track. * * @type {(InterpolateLinear|InterpolateDiscrete|InterpolateSmooth)} * @default InterpolateDiscrete */ BooleanKeyframeTrack.prototype.DefaultInterpolation = InterpolateDiscrete; BooleanKeyframeTrack.prototype.InterpolantFactoryMethodLinear = undefined; BooleanKeyframeTrack.prototype.InterpolantFactoryMethodSmooth = undefined; /** * A track for color keyframe values. * * @augments KeyframeTrack */ class ColorKeyframeTrack extends KeyframeTrack { /** * Constructs a new color keyframe track. * * @param {string} name - The keyframe track's name. * @param {Array} times - A list of keyframe times. * @param {Array} values - A list of keyframe values. * @param {(InterpolateLinear|InterpolateDiscrete|InterpolateSmooth)} [interpolation] - The interpolation type. */ constructor( name, times, values, interpolation ) { super( name, times, values, interpolation ); } } /** * The value type name. * * @type {String} * @default 'color' */ ColorKeyframeTrack.prototype.ValueTypeName = 'color'; /** * A track for numeric keyframe values. * * @augments KeyframeTrack */ class NumberKeyframeTrack extends KeyframeTrack { /** * Constructs a new number keyframe track. * * @param {string} name - The keyframe track's name. * @param {Array} times - A list of keyframe times. * @param {Array} values - A list of keyframe values. * @param {(InterpolateLinear|InterpolateDiscrete|InterpolateSmooth)} [interpolation] - The interpolation type. */ constructor( name, times, values, interpolation ) { super( name, times, values, interpolation ); } } /** * The value type name. * * @type {String} * @default 'number' */ NumberKeyframeTrack.prototype.ValueTypeName = 'number'; /** * Spherical linear unit quaternion interpolant. * * @augments Interpolant */ class QuaternionLinearInterpolant extends Interpolant { /** * Constructs a new SLERP interpolant. * * @param {TypedArray} parameterPositions - The parameter positions hold the interpolation factors. * @param {TypedArray} sampleValues - The sample values. * @param {number} sampleSize - The sample size * @param {TypedArray} [resultBuffer] - The result buffer. */ constructor( parameterPositions, sampleValues, sampleSize, resultBuffer ) { super( parameterPositions, sampleValues, sampleSize, resultBuffer ); } interpolate_( i1, t0, t, t1 ) { const result = this.resultBuffer, values = this.sampleValues, stride = this.valueSize, alpha = ( t - t0 ) / ( t1 - t0 ); let offset = i1 * stride; for ( let end = offset + stride; offset !== end; offset += 4 ) { Quaternion.slerpFlat( result, 0, values, offset - stride, values, offset, alpha ); } return result; } } /** * A track for Quaternion keyframe values. * * @augments KeyframeTrack */ class QuaternionKeyframeTrack extends KeyframeTrack { /** * Constructs a new Quaternion keyframe track. * * @param {string} name - The keyframe track's name. * @param {Array} times - A list of keyframe times. * @param {Array} values - A list of keyframe values. * @param {(InterpolateLinear|InterpolateDiscrete|InterpolateSmooth)} [interpolation] - The interpolation type. */ constructor( name, times, values, interpolation ) { super( name, times, values, interpolation ); } /** * Overwritten so the method returns Quaternion based interpolant. * * @static * @param {TypedArray} [result] - The result buffer. * @return {QuaternionLinearInterpolant} The new interpolant. */ InterpolantFactoryMethodLinear( result ) { return new QuaternionLinearInterpolant( this.times, this.values, this.getValueSize(), result ); } } /** * The value type name. * * @type {String} * @default 'quaternion' */ QuaternionKeyframeTrack.prototype.ValueTypeName = 'quaternion'; // ValueBufferType is inherited // DefaultInterpolation is inherited; QuaternionKeyframeTrack.prototype.InterpolantFactoryMethodSmooth = undefined; /** * A track for string keyframe values. * * @augments KeyframeTrack */ class StringKeyframeTrack extends KeyframeTrack { /** * Constructs a new string keyframe track. * * This keyframe track type has no `interpolation` parameter because the * interpolation is always discrete. * * @param {string} name - The keyframe track's name. * @param {Array} times - A list of keyframe times. * @param {Array} values - A list of keyframe values. */ constructor( name, times, values ) { super( name, times, values ); } } /** * The value type name. * * @type {String} * @default 'string' */ StringKeyframeTrack.prototype.ValueTypeName = 'string'; /** * The value buffer type of this keyframe track. * * @type {TypedArray|Array} * @default Array.constructor */ StringKeyframeTrack.prototype.ValueBufferType = Array; /** * The default interpolation type of this keyframe track. * * @type {(InterpolateLinear|InterpolateDiscrete|InterpolateSmooth)} * @default InterpolateDiscrete */ StringKeyframeTrack.prototype.DefaultInterpolation = InterpolateDiscrete; StringKeyframeTrack.prototype.InterpolantFactoryMethodLinear = undefined; StringKeyframeTrack.prototype.InterpolantFactoryMethodSmooth = undefined; /** * A track for vector keyframe values. * * @augments KeyframeTrack */ class VectorKeyframeTrack extends KeyframeTrack { /** * Constructs a new vector keyframe track. * * @param {string} name - The keyframe track's name. * @param {Array} times - A list of keyframe times. * @param {Array} values - A list of keyframe values. * @param {(InterpolateLinear|InterpolateDiscrete|InterpolateSmooth)} [interpolation] - The interpolation type. */ constructor( name, times, values, interpolation ) { super( name, times, values, interpolation ); } } /** * The value type name. * * @type {String} * @default 'vector' */ VectorKeyframeTrack.prototype.ValueTypeName = 'vector'; /** * A reusable set of keyframe tracks which represent an animation. */ class AnimationClip { /** * Constructs a new animation clip. * * Note: Instead of instantiating an AnimationClip directly with the constructor, you can * use the static interface of this class for creating clips. In most cases though, animation clips * will automatically be created by loaders when importing animated 3D assets. * * @param {string} [name=''] - The clip's name. * @param {number} [duration=-1] - The clip's duration in seconds. If a negative value is passed, * the duration will be calculated from the passed keyframes. * @param {Array} tracks - An array of keyframe tracks. * @param {(NormalAnimationBlendMode|AdditiveAnimationBlendMode)} [blendMode=NormalAnimationBlendMode] - Defines how the animation * is blended/combined when two or more animations are simultaneously played. */ constructor( name = '', duration = -1, tracks = [], blendMode = NormalAnimationBlendMode ) { /** * The clip's name. * * @type {string} */ this.name = name; /** * An array of keyframe tracks. * * @type {Array} */ this.tracks = tracks; /** * The clip's duration in seconds. * * @type {number} */ this.duration = duration; /** * Defines how the animation is blended/combined when two or more animations * are simultaneously played. * * @type {(NormalAnimationBlendMode|AdditiveAnimationBlendMode)} */ this.blendMode = blendMode; /** * The UUID of the animation clip. * * @type {string} * @readonly */ this.uuid = generateUUID(); // this means it should figure out its duration by scanning the tracks if ( this.duration < 0 ) { this.resetDuration(); } } /** * Factory method for creating an animation clip from the given JSON. * * @static * @param {Object} json - The serialized animation clip. * @return {AnimationClip} The new animation clip. */ static parse( json ) { const tracks = [], jsonTracks = json.tracks, frameTime = 1.0 / ( json.fps || 1.0 ); for ( let i = 0, n = jsonTracks.length; i !== n; ++ i ) { tracks.push( parseKeyframeTrack( jsonTracks[ i ] ).scale( frameTime ) ); } const clip = new this( json.name, json.duration, tracks, json.blendMode ); clip.uuid = json.uuid; return clip; } /** * Serializes the given animation clip into JSON. * * @static * @param {AnimationClip} clip - The animation clip to serialize. * @return {Object} The JSON object. */ static toJSON( clip ) { const tracks = [], clipTracks = clip.tracks; const json = { 'name': clip.name, 'duration': clip.duration, 'tracks': tracks, 'uuid': clip.uuid, 'blendMode': clip.blendMode }; for ( let i = 0, n = clipTracks.length; i !== n; ++ i ) { tracks.push( KeyframeTrack.toJSON( clipTracks[ i ] ) ); } return json; } /** * Returns a new animation clip from the passed morph targets array of a * geometry, taking a name and the number of frames per second. * * Note: The fps parameter is required, but the animation speed can be * overridden via {@link AnimationAction#setDuration}. * * @static * @param {string} name - The name of the animation clip. * @param {Array} morphTargetSequence - A sequence of morph targets. * @param {number} fps - The Frames-Per-Second value. * @param {boolean} noLoop - Whether the clip should be no loop or not. * @return {AnimationClip} The new animation clip. */ static CreateFromMorphTargetSequence( name, morphTargetSequence, fps, noLoop ) { const numMorphTargets = morphTargetSequence.length; const tracks = []; for ( let i = 0; i < numMorphTargets; i ++ ) { let times = []; let values = []; times.push( ( i + numMorphTargets - 1 ) % numMorphTargets, i, ( i + 1 ) % numMorphTargets ); values.push( 0, 1, 0 ); const order = getKeyframeOrder( times ); times = sortedArray( times, 1, order ); values = sortedArray( values, 1, order ); // if there is a key at the first frame, duplicate it as the // last frame as well for perfect loop. if ( ! noLoop && times[ 0 ] === 0 ) { times.push( numMorphTargets ); values.push( values[ 0 ] ); } tracks.push( new NumberKeyframeTrack( '.morphTargetInfluences[' + morphTargetSequence[ i ].name + ']', times, values ).scale( 1.0 / fps ) ); } return new this( name, -1, tracks ); } /** * Searches for an animation clip by name, taking as its first parameter * either an array of clips, or a mesh or geometry that contains an * array named "animations" property. * * @static * @param {(Array|Object3D)} objectOrClipArray - The array or object to search through. * @param {string} name - The name to search for. * @return {?AnimationClip} The found animation clip. Returns `null` if no clip has been found. */ static findByName( objectOrClipArray, name ) { let clipArray = objectOrClipArray; if ( ! Array.isArray( objectOrClipArray ) ) { const o = objectOrClipArray; clipArray = o.geometry && o.geometry.animations || o.animations; } for ( let i = 0; i < clipArray.length; i ++ ) { if ( clipArray[ i ].name === name ) { return clipArray[ i ]; } } return null; } /** * Returns an array of new AnimationClips created from the morph target * sequences of a geometry, trying to sort morph target names into * animation-group-based patterns like "Walk_001, Walk_002, Run_001, Run_002...". * * See {@link MD2Loader#parse} as an example for how the method should be used. * * @static * @param {Array} morphTargets - A sequence of morph targets. * @param {number} fps - The Frames-Per-Second value. * @param {boolean} noLoop - Whether the clip should be no loop or not. * @return {Array} An array of new animation clips. */ static CreateClipsFromMorphTargetSequences( morphTargets, fps, noLoop ) { const animationToMorphTargets = {}; // tested with https://regex101.com/ on trick sequences // such flamingo_flyA_003, flamingo_run1_003, crdeath0059 const pattern = /^([\w-]*?)([\d]+)$/; // sort morph target names into animation groups based // patterns like Walk_001, Walk_002, Run_001, Run_002 for ( let i = 0, il = morphTargets.length; i < il; i ++ ) { const morphTarget = morphTargets[ i ]; const parts = morphTarget.name.match( pattern ); if ( parts && parts.length > 1 ) { const name = parts[ 1 ]; let animationMorphTargets = animationToMorphTargets[ name ]; if ( ! animationMorphTargets ) { animationToMorphTargets[ name ] = animationMorphTargets = []; } animationMorphTargets.push( morphTarget ); } } const clips = []; for ( const name in animationToMorphTargets ) { clips.push( this.CreateFromMorphTargetSequence( name, animationToMorphTargets[ name ], fps, noLoop ) ); } return clips; } /** * Parses the `animation.hierarchy` format and returns a new animation clip. * * @static * @deprecated since r175. * @param {Object} animation - A serialized animation clip as JSON. * @param {Array} bones - An array of bones. * @return {?AnimationClip} The new animation clip. */ static parseAnimation( animation, bones ) { console.warn( 'THREE.AnimationClip: parseAnimation() is deprecated and will be removed with r185' ); if ( ! animation ) { console.error( 'THREE.AnimationClip: No animation in JSONLoader data.' ); return null; } const addNonemptyTrack = function ( trackType, trackName, animationKeys, propertyName, destTracks ) { // only return track if there are actually keys. if ( animationKeys.length !== 0 ) { const times = []; const values = []; flattenJSON( animationKeys, times, values, propertyName ); // empty keys are filtered out, so check again if ( times.length !== 0 ) { destTracks.push( new trackType( trackName, times, values ) ); } } }; const tracks = []; const clipName = animation.name || 'default'; const fps = animation.fps || 30; const blendMode = animation.blendMode; // automatic length determination in AnimationClip. let duration = animation.length || -1; const hierarchyTracks = animation.hierarchy || []; for ( let h = 0; h < hierarchyTracks.length; h ++ ) { const animationKeys = hierarchyTracks[ h ].keys; // skip empty tracks if ( ! animationKeys || animationKeys.length === 0 ) continue; // process morph targets if ( animationKeys[ 0 ].morphTargets ) { // figure out all morph targets used in this track const morphTargetNames = {}; let k; for ( k = 0; k < animationKeys.length; k ++ ) { if ( animationKeys[ k ].morphTargets ) { for ( let m = 0; m < animationKeys[ k ].morphTargets.length; m ++ ) { morphTargetNames[ animationKeys[ k ].morphTargets[ m ] ] = -1; } } } // create a track for each morph target with all zero // morphTargetInfluences except for the keys in which // the morphTarget is named. for ( const morphTargetName in morphTargetNames ) { const times = []; const values = []; for ( let m = 0; m !== animationKeys[ k ].morphTargets.length; ++ m ) { const animationKey = animationKeys[ k ]; times.push( animationKey.time ); values.push( ( animationKey.morphTarget === morphTargetName ) ? 1 : 0 ); } tracks.push( new NumberKeyframeTrack( '.morphTargetInfluence[' + morphTargetName + ']', times, values ) ); } duration = morphTargetNames.length * fps; } else { // ...assume skeletal animation const boneName = '.bones[' + bones[ h ].name + ']'; addNonemptyTrack( VectorKeyframeTrack, boneName + '.position', animationKeys, 'pos', tracks ); addNonemptyTrack( QuaternionKeyframeTrack, boneName + '.quaternion', animationKeys, 'rot', tracks ); addNonemptyTrack( VectorKeyframeTrack, boneName + '.scale', animationKeys, 'scl', tracks ); } } if ( tracks.length === 0 ) { return null; } const clip = new this( clipName, duration, tracks, blendMode ); return clip; } /** * Sets the duration of this clip to the duration of its longest keyframe track. * * @return {AnimationClip} A reference to this animation clip. */ resetDuration() { const tracks = this.tracks; let duration = 0; for ( let i = 0, n = tracks.length; i !== n; ++ i ) { const track = this.tracks[ i ]; duration = Math.max( duration, track.times[ track.times.length - 1 ] ); } this.duration = duration; return this; } /** * Trims all tracks to the clip's duration. * * @return {AnimationClip} A reference to this animation clip. */ trim() { for ( let i = 0; i < this.tracks.length; i ++ ) { this.tracks[ i ].trim( 0, this.duration ); } return this; } /** * Performs minimal validation on each track in the clip. Returns `true` if all * tracks are valid. * * @return {boolean} Whether the clip's keyframes are valid or not. */ validate() { let valid = true; for ( let i = 0; i < this.tracks.length; i ++ ) { valid = valid && this.tracks[ i ].validate(); } return valid; } /** * Optimizes each track by removing equivalent sequential keys (which are * common in morph target sequences). * * @return {AnimationClip} A reference to this animation clip. */ optimize() { for ( let i = 0; i < this.tracks.length; i ++ ) { this.tracks[ i ].optimize(); } return this; } /** * Returns a new animation clip with copied values from this instance. * * @return {AnimationClip} A clone of this instance. */ clone() { const tracks = []; for ( let i = 0; i < this.tracks.length; i ++ ) { tracks.push( this.tracks[ i ].clone() ); } return new this.constructor( this.name, this.duration, tracks, this.blendMode ); } /** * Serializes this animation clip into JSON. * * @return {Object} The JSON object. */ toJSON() { return this.constructor.toJSON( this ); } } function getTrackTypeForValueTypeName( typeName ) { switch ( typeName.toLowerCase() ) { case 'scalar': case 'double': case 'float': case 'number': case 'integer': return NumberKeyframeTrack; case 'vector': case 'vector2': case 'vector3': case 'vector4': return VectorKeyframeTrack; case 'color': return ColorKeyframeTrack; case 'quaternion': return QuaternionKeyframeTrack; case 'bool': case 'boolean': return BooleanKeyframeTrack; case 'string': return StringKeyframeTrack; } throw new Error( 'THREE.KeyframeTrack: Unsupported typeName: ' + typeName ); } function parseKeyframeTrack( json ) { if ( json.type === undefined ) { throw new Error( 'THREE.KeyframeTrack: track type undefined, can not parse' ); } const trackType = getTrackTypeForValueTypeName( json.type ); if ( json.times === undefined ) { const times = [], values = []; flattenJSON( json.keys, times, values, 'value' ); json.times = times; json.values = values; } // derived classes can define a static parse method if ( trackType.parse !== undefined ) { return trackType.parse( json ); } else { // by default, we assume a constructor compatible with the base return new trackType( json.name, json.times, json.values, json.interpolation ); } } /** * @class * @classdesc A simple caching system, used internally by {@link FileLoader}. * To enable caching across all loaders that use {@link FileLoader}, add `THREE.Cache.enabled = true.` once in your app. * @hideconstructor */ const Cache = { /** * Whether caching is enabled or not. * * @static * @type {boolean} * @default false */ enabled: false, /** * A dictionary that holds cached files. * * @static * @type {Object} */ files: {}, /** * Adds a cache entry with a key to reference the file. If this key already * holds a file, it is overwritten. * * @static * @param {string} key - The key to reference the cached file. * @param {Object} file - The file to be cached. */ add: function ( key, file ) { if ( this.enabled === false ) return; // console.log( 'THREE.Cache', 'Adding key:', key ); this.files[ key ] = file; }, /** * Gets the cached value for the given key. * * @static * @param {string} key - The key to reference the cached file. * @return {Object|undefined} The cached file. If the key does not exist `undefined` is returned. */ get: function ( key ) { if ( this.enabled === false ) return; // console.log( 'THREE.Cache', 'Checking key:', key ); return this.files[ key ]; }, /** * Removes the cached file associated with the given key. * * @static * @param {string} key - The key to reference the cached file. */ remove: function ( key ) { delete this.files[ key ]; }, /** * Remove all values from the cache. * * @static */ clear: function () { this.files = {}; } }; /** * Handles and keeps track of loaded and pending data. A default global * instance of this class is created and used by loaders if not supplied * manually. * * In general that should be sufficient, however there are times when it can * be useful to have separate loaders - for example if you want to show * separate loading bars for objects and textures. * * ```js * const manager = new THREE.LoadingManager(); * manager.onLoad = () => console.log( 'Loading complete!' ); * * const loader1 = new OBJLoader( manager ); * const loader2 = new ColladaLoader( manager ); * ``` */ class LoadingManager { /** * Constructs a new loading manager. * * @param {Function} [onLoad] - Executes when all items have been loaded. * @param {Function} [onProgress] - Executes when single items have been loaded. * @param {Function} [onError] - Executes when an error occurs. */ constructor( onLoad, onProgress, onError ) { const scope = this; let isLoading = false; let itemsLoaded = 0; let itemsTotal = 0; let urlModifier = undefined; const handlers = []; // Refer to #5689 for the reason why we don't set .onStart // in the constructor /** * Executes when an item starts loading. * * @type {Function|undefined} * @default undefined */ this.onStart = undefined; /** * Executes when all items have been loaded. * * @type {Function|undefined} * @default undefined */ this.onLoad = onLoad; /** * Executes when single items have been loaded. * * @type {Function|undefined} * @default undefined */ this.onProgress = onProgress; /** * Executes when an error occurs. * * @type {Function|undefined} * @default undefined */ this.onError = onError; /** * This should be called by any loader using the manager when the loader * starts loading an item. * * @param {string} url - The URL to load. */ this.itemStart = function ( url ) { itemsTotal ++; if ( isLoading === false ) { if ( scope.onStart !== undefined ) { scope.onStart( url, itemsLoaded, itemsTotal ); } } isLoading = true; }; /** * This should be called by any loader using the manager when the loader * ended loading an item. * * @param {string} url - The URL of the loaded item. */ this.itemEnd = function ( url ) { itemsLoaded ++; if ( scope.onProgress !== undefined ) { scope.onProgress( url, itemsLoaded, itemsTotal ); } if ( itemsLoaded === itemsTotal ) { isLoading = false; if ( scope.onLoad !== undefined ) { scope.onLoad(); } } }; /** * This should be called by any loader using the manager when the loader * encounters an error when loading an item. * * @param {string} url - The URL of the item that produces an error. */ this.itemError = function ( url ) { if ( scope.onError !== undefined ) { scope.onError( url ); } }; /** * Given a URL, uses the URL modifier callback (if any) and returns a * resolved URL. If no URL modifier is set, returns the original URL. * * @param {string} url - The URL to load. * @return {string} The resolved URL. */ this.resolveURL = function ( url ) { if ( urlModifier ) { return urlModifier( url ); } return url; }; /** * If provided, the callback will be passed each resource URL before a * request is sent. The callback may return the original URL, or a new URL to * override loading behavior. This behavior can be used to load assets from * .ZIP files, drag-and-drop APIs, and Data URIs. * * ```js * const blobs = {'fish.gltf': blob1, 'diffuse.png': blob2, 'normal.png': blob3}; * * const manager = new THREE.LoadingManager(); * * // Initialize loading manager with URL callback. * const objectURLs = []; * manager.setURLModifier( ( url ) => { * * url = URL.createObjectURL( blobs[ url ] ); * objectURLs.push( url ); * return url; * * } ); * * // Load as usual, then revoke the blob URLs. * const loader = new GLTFLoader( manager ); * loader.load( 'fish.gltf', (gltf) => { * * scene.add( gltf.scene ); * objectURLs.forEach( ( url ) => URL.revokeObjectURL( url ) ); * * } ); * ``` * * @param {function(string):string} transform - URL modifier callback. Called with an URL and must return a resolved URL. * @return {LoadingManager} A reference to this loading manager. */ this.setURLModifier = function ( transform ) { urlModifier = transform; return this; }; /** * Registers a loader with the given regular expression. Can be used to * define what loader should be used in order to load specific files. A * typical use case is to overwrite the default loader for textures. * * ```js * // add handler for TGA textures * manager.addHandler( /\.tga$/i, new TGALoader() ); * ``` * * @param {string} regex - A regular expression. * @param {Loader} loader - A loader that should handle matched cases. * @return {LoadingManager} A reference to this loading manager. */ this.addHandler = function ( regex, loader ) { handlers.push( regex, loader ); return this; }; /** * Removes the loader for the given regular expression. * * @param {string} regex - A regular expression. * @return {LoadingManager} A reference to this loading manager. */ this.removeHandler = function ( regex ) { const index = handlers.indexOf( regex ); if ( index !== -1 ) { handlers.splice( index, 2 ); } return this; }; /** * Can be used to retrieve the registered loader for the given file path. * * @param {string} file - The file path. * @return {?Loader} The registered loader. Returns `null` if no loader was found. */ this.getHandler = function ( file ) { for ( let i = 0, l = handlers.length; i < l; i += 2 ) { const regex = handlers[ i ]; const loader = handlers[ i + 1 ]; if ( regex.global ) regex.lastIndex = 0; // see #17920 if ( regex.test( file ) ) { return loader; } } return null; }; } } /** * The global default loading manager. * * @constant * @type {LoadingManager} */ const DefaultLoadingManager = /*@__PURE__*/ new LoadingManager(); /** * Abstract base class for loaders. * * @abstract */ class Loader { /** * Constructs a new loader. * * @param {LoadingManager} [manager] - The loading manager. */ constructor( manager ) { /** * The loading manager. * * @type {LoadingManager} * @default DefaultLoadingManager */ this.manager = ( manager !== undefined ) ? manager : DefaultLoadingManager; /** * The crossOrigin string to implement CORS for loading the url from a * different domain that allows CORS. * * @type {string} * @default 'anonymous' */ this.crossOrigin = 'anonymous'; /** * Whether the XMLHttpRequest uses credentials. * * @type {boolean} * @default false */ this.withCredentials = false; /** * The base path from which the asset will be loaded. * * @type {string} */ this.path = ''; /** * The base path from which additional resources like textures will be loaded. * * @type {string} */ this.resourcePath = ''; /** * The [request header]{@link https://developer.mozilla.org/en-US/docs/Glossary/Request_header} * used in HTTP request. * * @type {Object} */ this.requestHeader = {}; } /** * This method needs to be implemented by all concrete loaders. It holds the * logic for loading assets from the backend. * * @param {string} url - The path/URL of the file to be loaded. * @param {Function} onLoad - Executed when the loading process has been finished. * @param {onProgressCallback} [onProgress] - Executed while the loading is in progress. * @param {onErrorCallback} [onError] - Executed when errors occur. */ load( /* url, onLoad, onProgress, onError */ ) {} /** * A async version of {@link Loader#load}. * * @param {string} url - The path/URL of the file to be loaded. * @param {onProgressCallback} [onProgress] - Executed while the loading is in progress. * @return {Promise} A Promise that resolves when the asset has been loaded. */ loadAsync( url, onProgress ) { const scope = this; return new Promise( function ( resolve, reject ) { scope.load( url, resolve, onProgress, reject ); } ); } /** * This method needs to be implemented by all concrete loaders. It holds the * logic for parsing the asset into three.js entities. * * @param {any} data - The data to parse. */ parse( /* data */ ) {} /** * Sets the `crossOrigin` String to implement CORS for loading the URL * from a different domain that allows CORS. * * @param {string} crossOrigin - The `crossOrigin` value. * @return {Loader} A reference to this instance. */ setCrossOrigin( crossOrigin ) { this.crossOrigin = crossOrigin; return this; } /** * Whether the XMLHttpRequest uses credentials such as cookies, authorization * headers or TLS client certificates, see [XMLHttpRequest.withCredentials]{@link https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials}. * * Note: This setting has no effect if you are loading files locally or from the same domain. * * @param {boolean} value - The `withCredentials` value. * @return {Loader} A reference to this instance. */ setWithCredentials( value ) { this.withCredentials = value; return this; } /** * Sets the base path for the asset. * * @param {string} path - The base path. * @return {Loader} A reference to this instance. */ setPath( path ) { this.path = path; return this; } /** * Sets the base path for dependent resources like textures. * * @param {string} resourcePath - The resource path. * @return {Loader} A reference to this instance. */ setResourcePath( resourcePath ) { this.resourcePath = resourcePath; return this; } /** * Sets the given request header. * * @param {Object} requestHeader - A [request header]{@link https://developer.mozilla.org/en-US/docs/Glossary/Request_header} * for configuring the HTTP request. * @return {Loader} A reference to this instance. */ setRequestHeader( requestHeader ) { this.requestHeader = requestHeader; return this; } } /** * Callback for onProgress in loaders. * * @callback onProgressCallback * @param {ProgressEvent} event - An instance of `ProgressEvent` that represents the current loading status. */ /** * Callback for onError in loaders. * * @callback onErrorCallback * @param {Error} error - The error which occurred during the loading process. */ /** * The default material name that is used by loaders * when creating materials for loaded 3D objects. * * Note: Not all loaders might honor this setting. * * @static * @type {string} * @default '__DEFAULT' */ Loader.DEFAULT_MATERIAL_NAME = '__DEFAULT'; const loading = {}; class HttpError extends Error { constructor( message, response ) { super( message ); this.response = response; } } /** * A low level class for loading resources with the Fetch API, used internally by * most loaders. It can also be used directly to load any file type that does * not have a loader. * * This loader supports caching. If you want to use it, add `THREE.Cache.enabled = true;` * once to your application. * * ```js * const loader = new THREE.FileLoader(); * const data = await loader.loadAsync( 'example.txt' ); * ``` * * @augments Loader */ class FileLoader extends Loader { /** * Constructs a new file loader. * * @param {LoadingManager} [manager] - The loading manager. */ constructor( manager ) { super( manager ); /** * The expected mime type. * * @type {string} */ this.mimeType = ''; /** * The expected response type. * * @type {('arraybuffer'|'blob'|'document'|'json'|'')} * @default '' */ this.responseType = ''; } /** * Starts loading from the given URL and pass the loaded response to the `onLoad()` callback. * * @param {string} url - The path/URL of the file to be loaded. This can also be a data URI. * @param {function(any)} onLoad - Executed when the loading process has been finished. * @param {onProgressCallback} [onProgress] - Executed while the loading is in progress. * @param {onErrorCallback} [onError] - Executed when errors occur. * @return {any|undefined} The cached resource if available. */ load( url, onLoad, onProgress, onError ) { if ( url === undefined ) url = ''; if ( this.path !== undefined ) url = this.path + url; url = this.manager.resolveURL( url ); const cached = Cache.get( url ); if ( cached !== undefined ) { this.manager.itemStart( url ); setTimeout( () => { if ( onLoad ) onLoad( cached ); this.manager.itemEnd( url ); }, 0 ); return cached; } // Check if request is duplicate if ( loading[ url ] !== undefined ) { loading[ url ].push( { onLoad: onLoad, onProgress: onProgress, onError: onError } ); return; } // Initialise array for duplicate requests loading[ url ] = []; loading[ url ].push( { onLoad: onLoad, onProgress: onProgress, onError: onError, } ); // create request const req = new Request( url, { headers: new Headers( this.requestHeader ), credentials: this.withCredentials ? 'include' : 'same-origin', // An abort controller could be added within a future PR } ); // record states ( avoid data race ) const mimeType = this.mimeType; const responseType = this.responseType; // start the fetch fetch( req ) .then( response => { if ( response.status === 200 || response.status === 0 ) { // Some browsers return HTTP Status 0 when using non-http protocol // e.g. 'file://' or 'data://'. Handle as success. if ( response.status === 0 ) { console.warn( 'THREE.FileLoader: HTTP Status 0 received.' ); } // Workaround: Checking if response.body === undefined for Alipay browser #23548 if ( typeof ReadableStream === 'undefined' || response.body === undefined || response.body.getReader === undefined ) { return response; } const callbacks = loading[ url ]; const reader = response.body.getReader(); // Nginx needs X-File-Size check // https://serverfault.com/questions/482875/why-does-nginx-remove-content-length-header-for-chunked-content const contentLength = response.headers.get( 'X-File-Size' ) || response.headers.get( 'Content-Length' ); const total = contentLength ? parseInt( contentLength ) : 0; const lengthComputable = total !== 0; let loaded = 0; // periodically read data into the new stream tracking while download progress const stream = new ReadableStream( { start( controller ) { readData(); function readData() { reader.read().then( ( { done, value } ) => { if ( done ) { controller.close(); } else { loaded += value.byteLength; const event = new ProgressEvent( 'progress', { lengthComputable, loaded, total } ); for ( let i = 0, il = callbacks.length; i < il; i ++ ) { const callback = callbacks[ i ]; if ( callback.onProgress ) callback.onProgress( event ); } controller.enqueue( value ); readData(); } }, ( e ) => { controller.error( e ); } ); } } } ); return new Response( stream ); } else { throw new HttpError( `fetch for "${response.url}" responded with ${response.status}: ${response.statusText}`, response ); } } ) .then( response => { switch ( responseType ) { case 'arraybuffer': return response.arrayBuffer(); case 'blob': return response.blob(); case 'document': return response.text() .then( text => { const parser = new DOMParser(); return parser.parseFromString( text, mimeType ); } ); case 'json': return response.json(); default: if ( mimeType === '' ) { return response.text(); } else { // sniff encoding const re = /charset="?([^;"\s]*)"?/i; const exec = re.exec( mimeType ); const label = exec && exec[ 1 ] ? exec[ 1 ].toLowerCase() : undefined; const decoder = new TextDecoder( label ); return response.arrayBuffer().then( ab => decoder.decode( ab ) ); } } } ) .then( data => { // Add to cache only on HTTP success, so that we do not cache // error response bodies as proper responses to requests. Cache.add( url, data ); const callbacks = loading[ url ]; delete loading[ url ]; for ( let i = 0, il = callbacks.length; i < il; i ++ ) { const callback = callbacks[ i ]; if ( callback.onLoad ) callback.onLoad( data ); } } ) .catch( err => { // Abort errors and other errors are handled the same const callbacks = loading[ url ]; if ( callbacks === undefined ) { // When onLoad was called and url was deleted in `loading` this.manager.itemError( url ); throw err; } delete loading[ url ]; for ( let i = 0, il = callbacks.length; i < il; i ++ ) { const callback = callbacks[ i ]; if ( callback.onError ) callback.onError( err ); } this.manager.itemError( url ); } ) .finally( () => { this.manager.itemEnd( url ); } ); this.manager.itemStart( url ); } /** * Sets the expected response type. * * @param {('arraybuffer'|'blob'|'document'|'json'|'')} value - The response type. * @return {FileLoader} A reference to this file loader. */ setResponseType( value ) { this.responseType = value; return this; } /** * Sets the expected mime type of the loaded file. * * @param {string} value - The mime type. * @return {FileLoader} A reference to this file loader. */ setMimeType( value ) { this.mimeType = value; return this; } } /** * Class for loading animation clips in the JSON format. The files are internally * loaded via {@link FileLoader}. * * ```js * const loader = new THREE.AnimationLoader(); * const animations = await loader.loadAsync( 'animations/animation.js' ); * ``` * * @augments Loader */ class AnimationLoader extends Loader { /** * Constructs a new animation loader. * * @param {LoadingManager} [manager] - The loading manager. */ constructor( manager ) { super( manager ); } /** * Starts loading from the given URL and pass the loaded animations as an array * holding instances of {@link AnimationClip} to the `onLoad()` callback. * * @param {string} url - The path/URL of the file to be loaded. This can also be a data URI. * @param {function(Array)} onLoad - Executed when the loading process has been finished. * @param {onProgressCallback} onProgress - Executed while the loading is in progress. * @param {onErrorCallback} onError - Executed when errors occur. */ load( url, onLoad, onProgress, onError ) { const scope = this; const loader = new FileLoader( this.manager ); loader.setPath( this.path ); loader.setRequestHeader( this.requestHeader ); loader.setWithCredentials( this.withCredentials ); loader.load( url, function ( text ) { try { onLoad( scope.parse( JSON.parse( text ) ) ); } catch ( e ) { if ( onError ) { onError( e ); } else { console.error( e ); } scope.manager.itemError( url ); } }, onProgress, onError ); } /** * Parses the given JSON object and returns an array of animation clips. * * @param {Object} json - The serialized animation clips. * @return {Array} The parsed animation clips. */ parse( json ) { const animations = []; for ( let i = 0; i < json.length; i ++ ) { const clip = AnimationClip.parse( json[ i ] ); animations.push( clip ); } return animations; } } /** * Abstract base class for loading compressed texture formats S3TC, ASTC or ETC. * Textures are internally loaded via {@link FileLoader}. * * Derived classes have to implement the `parse()` method which holds the parsing * for the respective format. * * @abstract * @augments Loader */ class CompressedTextureLoader extends Loader { /** * Constructs a new compressed texture loader. * * @param {LoadingManager} [manager] - The loading manager. */ constructor( manager ) { super( manager ); } /** * Starts loading from the given URL and passes the loaded compressed texture * to the `onLoad()` callback. The method also returns a new texture object which can * directly be used for material creation. If you do it this way, the texture * may pop up in your scene once the respective loading process is finished. * * @param {string} url - The path/URL of the file to be loaded. This can also be a data URI. * @param {function(CompressedTexture)} onLoad - Executed when the loading process has been finished. * @param {onProgressCallback} onProgress - Executed while the loading is in progress. * @param {onErrorCallback} onError - Executed when errors occur. * @return {CompressedTexture} The compressed texture. */ load( url, onLoad, onProgress, onError ) { const scope = this; const images = []; const texture = new CompressedTexture(); const loader = new FileLoader( this.manager ); loader.setPath( this.path ); loader.setResponseType( 'arraybuffer' ); loader.setRequestHeader( this.requestHeader ); loader.setWithCredentials( scope.withCredentials ); let loaded = 0; function loadTexture( i ) { loader.load( url[ i ], function ( buffer ) { const texDatas = scope.parse( buffer, true ); images[ i ] = { width: texDatas.width, height: texDatas.height, format: texDatas.format, mipmaps: texDatas.mipmaps }; loaded += 1; if ( loaded === 6 ) { if ( texDatas.mipmapCount === 1 ) texture.minFilter = LinearFilter; texture.image = images; texture.format = texDatas.format; texture.needsUpdate = true; if ( onLoad ) onLoad( texture ); } }, onProgress, onError ); } if ( Array.isArray( url ) ) { for ( let i = 0, il = url.length; i < il; ++ i ) { loadTexture( i ); } } else { // compressed cubemap texture stored in a single DDS file loader.load( url, function ( buffer ) { const texDatas = scope.parse( buffer, true ); if ( texDatas.isCubemap ) { const faces = texDatas.mipmaps.length / texDatas.mipmapCount; for ( let f = 0; f < faces; f ++ ) { images[ f ] = { mipmaps: [] }; for ( let i = 0; i < texDatas.mipmapCount; i ++ ) { images[ f ].mipmaps.push( texDatas.mipmaps[ f * texDatas.mipmapCount + i ] ); images[ f ].format = texDatas.format; images[ f ].width = texDatas.width; images[ f ].height = texDatas.height; } } texture.image = images; } else { texture.image.width = texDatas.width; texture.image.height = texDatas.height; texture.mipmaps = texDatas.mipmaps; } if ( texDatas.mipmapCount === 1 ) { texture.minFilter = LinearFilter; } texture.format = texDatas.format; texture.needsUpdate = true; if ( onLoad ) onLoad( texture ); }, onProgress, onError ); } return texture; } } /** * A loader for loading images. The class loads images with the HTML `Image` API. * * ```js * const loader = new THREE.ImageLoader(); * const image = await loader.loadAsync( 'image.png' ); * ``` * Please note that `ImageLoader` has dropped support for progress * events in `r84`. For an `ImageLoader` that supports progress events, see * [this thread]{@link https://github.com/mrdoob/three.js/issues/10439#issuecomment-275785639}. * * @augments Loader */ class ImageLoader extends Loader { /** * Constructs a new image loader. * * @param {LoadingManager} [manager] - The loading manager. */ constructor( manager ) { super( manager ); } /** * Starts loading from the given URL and passes the loaded image * to the `onLoad()` callback. The method also returns a new `Image` object which can * directly be used for texture creation. If you do it this way, the texture * may pop up in your scene once the respective loading process is finished. * * @param {string} url - The path/URL of the file to be loaded. This can also be a data URI. * @param {function(Image)} onLoad - Executed when the loading process has been finished. * @param {onProgressCallback} onProgress - Unsupported in this loader. * @param {onErrorCallback} onError - Executed when errors occur. * @return {Image} The image. */ load( url, onLoad, onProgress, onError ) { if ( this.path !== undefined ) url = this.path + url; url = this.manager.resolveURL( url ); const scope = this; const cached = Cache.get( url ); if ( cached !== undefined ) { scope.manager.itemStart( url ); setTimeout( function () { if ( onLoad ) onLoad( cached ); scope.manager.itemEnd( url ); }, 0 ); return cached; } const image = createElementNS( 'img' ); function onImageLoad() { removeEventListeners(); Cache.add( url, this ); if ( onLoad ) onLoad( this ); scope.manager.itemEnd( url ); } function onImageError( event ) { removeEventListeners(); if ( onError ) onError( event ); scope.manager.itemError( url ); scope.manager.itemEnd( url ); } function removeEventListeners() { image.removeEventListener( 'load', onImageLoad, false ); image.removeEventListener( 'error', onImageError, false ); } image.addEventListener( 'load', onImageLoad, false ); image.addEventListener( 'error', onImageError, false ); if ( url.slice( 0, 5 ) !== 'data:' ) { if ( this.crossOrigin !== undefined ) image.crossOrigin = this.crossOrigin; } scope.manager.itemStart( url ); image.src = url; return image; } } /** * Class for loading cube textures. Images are internally loaded via {@link ImageLoader}. * * The loader returns an instance of {@link CubeTexture} and expects the cube map to * be defined as six separate images representing the sides of a cube. Other cube map definitions * like vertical and horizontal cross, column and row layouts are not supported. * * Note that, by convention, cube maps are specified in a coordinate system * in which positive-x is to the right when looking up the positive-z axis -- * in other words, using a left-handed coordinate system. Since three.js uses * a right-handed coordinate system, environment maps used in three.js will * have pos-x and neg-x swapped. * * The loaded cube texture is in sRGB color space. Meaning {@link Texture#colorSpace} * is set to `SRGBColorSpace` by default. * * ```js * const loader = new THREE.CubeTextureLoader().setPath( 'textures/cubeMaps/' ); * const cubeTexture = await loader.loadAsync( [ * 'px.png', 'nx.png', 'py.png', 'ny.png', 'pz.png', 'nz.png' * ] ); * scene.background = cubeTexture; * ``` * * @augments Loader */ class CubeTextureLoader extends Loader { /** * Constructs a new cube texture loader. * * @param {LoadingManager} [manager] - The loading manager. */ constructor( manager ) { super( manager ); } /** * Starts loading from the given URL and pass the fully loaded cube texture * to the `onLoad()` callback. The method also returns a new cube texture object which can * directly be used for material creation. If you do it this way, the cube texture * may pop up in your scene once the respective loading process is finished. * * @param {Array} urls - Array of 6 URLs to images, one for each side of the * cube texture. The urls should be specified in the following order: pos-x, * neg-x, pos-y, neg-y, pos-z, neg-z. An array of data URIs are allowed as well. * @param {function(CubeTexture)} onLoad - Executed when the loading process has been finished. * @param {onProgressCallback} onProgress - Unsupported in this loader. * @param {onErrorCallback} onError - Executed when errors occur. * @return {CubeTexture} The cube texture. */ load( urls, onLoad, onProgress, onError ) { const texture = new CubeTexture(); texture.colorSpace = SRGBColorSpace; const loader = new ImageLoader( this.manager ); loader.setCrossOrigin( this.crossOrigin ); loader.setPath( this.path ); let loaded = 0; function loadTexture( i ) { loader.load( urls[ i ], function ( image ) { texture.images[ i ] = image; loaded ++; if ( loaded === 6 ) { texture.needsUpdate = true; if ( onLoad ) onLoad( texture ); } }, undefined, onError ); } for ( let i = 0; i < urls.length; ++ i ) { loadTexture( i ); } return texture; } } /** * Abstract base class for loading binary texture formats RGBE, EXR or TGA. * Textures are internally loaded via {@link FileLoader}. * * Derived classes have to implement the `parse()` method which holds the parsing * for the respective format. * * @abstract * @augments Loader */ class DataTextureLoader extends Loader { /** * Constructs a new data texture loader. * * @param {LoadingManager} [manager] - The loading manager. */ constructor( manager ) { super( manager ); } /** * Starts loading from the given URL and passes the loaded data texture * to the `onLoad()` callback. The method also returns a new texture object which can * directly be used for material creation. If you do it this way, the texture * may pop up in your scene once the respective loading process is finished. * * @param {string} url - The path/URL of the file to be loaded. This can also be a data URI. * @param {function(DataTexture)} onLoad - Executed when the loading process has been finished. * @param {onProgressCallback} onProgress - Executed while the loading is in progress. * @param {onErrorCallback} onError - Executed when errors occur. * @return {DataTexture} The data texture. */ load( url, onLoad, onProgress, onError ) { const scope = this; const texture = new DataTexture(); const loader = new FileLoader( this.manager ); loader.setResponseType( 'arraybuffer' ); loader.setRequestHeader( this.requestHeader ); loader.setPath( this.path ); loader.setWithCredentials( scope.withCredentials ); loader.load( url, function ( buffer ) { let texData; try { texData = scope.parse( buffer ); } catch ( error ) { if ( onError !== undefined ) { onError( error ); } else { console.error( error ); return; } } if ( texData.image !== undefined ) { texture.image = texData.image; } else if ( texData.data !== undefined ) { texture.image.width = texData.width; texture.image.height = texData.height; texture.image.data = texData.data; } texture.wrapS = texData.wrapS !== undefined ? texData.wrapS : ClampToEdgeWrapping; texture.wrapT = texData.wrapT !== undefined ? texData.wrapT : ClampToEdgeWrapping; texture.magFilter = texData.magFilter !== undefined ? texData.magFilter : LinearFilter; texture.minFilter = texData.minFilter !== undefined ? texData.minFilter : LinearFilter; texture.anisotropy = texData.anisotropy !== undefined ? texData.anisotropy : 1; if ( texData.colorSpace !== undefined ) { texture.colorSpace = texData.colorSpace; } if ( texData.flipY !== undefined ) { texture.flipY = texData.flipY; } if ( texData.format !== undefined ) { texture.format = texData.format; } if ( texData.type !== undefined ) { texture.type = texData.type; } if ( texData.mipmaps !== undefined ) { texture.mipmaps = texData.mipmaps; texture.minFilter = LinearMipmapLinearFilter; // presumably... } if ( texData.mipmapCount === 1 ) { texture.minFilter = LinearFilter; } if ( texData.generateMipmaps !== undefined ) { texture.generateMipmaps = texData.generateMipmaps; } texture.needsUpdate = true; if ( onLoad ) onLoad( texture, texData ); }, onProgress, onError ); return texture; } } /** * Class for loading textures. Images are internally * loaded via {@link ImageLoader}. * * ```js * const loader = new THREE.TextureLoader(); * const texture = await loader.loadAsync( 'textures/land_ocean_ice_cloud_2048.jpg' ); * * const material = new THREE.MeshBasicMaterial( { map:texture } ); * ``` * Please note that `TextureLoader` has dropped support for progress * events in `r84`. For a `TextureLoader` that supports progress events, see * [this thread]{@link https://github.com/mrdoob/three.js/issues/10439#issuecomment-293260145}. * * @augments Loader */ class TextureLoader extends Loader { /** * Constructs a new texture loader. * * @param {LoadingManager} [manager] - The loading manager. */ constructor( manager ) { super( manager ); } /** * Starts loading from the given URL and pass the fully loaded texture * to the `onLoad()` callback. The method also returns a new texture object which can * directly be used for material creation. If you do it this way, the texture * may pop up in your scene once the respective loading process is finished. * * @param {string} url - The path/URL of the file to be loaded. This can also be a data URI. * @param {function(Texture)} onLoad - Executed when the loading process has been finished. * @param {onProgressCallback} onProgress - Unsupported in this loader. * @param {onErrorCallback} onError - Executed when errors occur. * @return {Texture} The texture. */ load( url, onLoad, onProgress, onError ) { const texture = new Texture(); const loader = new ImageLoader( this.manager ); loader.setCrossOrigin( this.crossOrigin ); loader.setPath( this.path ); loader.load( url, function ( image ) { texture.image = image; texture.needsUpdate = true; if ( onLoad !== undefined ) { onLoad( texture ); } }, onProgress, onError ); return texture; } } /** * Abstract base class for lights - all other light types inherit the * properties and methods described here. * * @abstract * @augments Object3D */ class Light extends Object3D { /** * Constructs a new light. * * @param {(number|Color|string)} [color=0xffffff] - The light's color. * @param {number} [intensity=1] - The light's strength/intensity. */ constructor( color, intensity = 1 ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isLight = true; this.type = 'Light'; /** * The light's color. * * @type {Color} */ this.color = new Color( color ); /** * The light's intensity. * * @type {number} * @default 1 */ this.intensity = intensity; } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. */ dispose() { // Empty here in base class; some subclasses override. } copy( source, recursive ) { super.copy( source, recursive ); this.color.copy( source.color ); this.intensity = source.intensity; return this; } toJSON( meta ) { const data = super.toJSON( meta ); data.object.color = this.color.getHex(); data.object.intensity = this.intensity; if ( this.groundColor !== undefined ) data.object.groundColor = this.groundColor.getHex(); if ( this.distance !== undefined ) data.object.distance = this.distance; if ( this.angle !== undefined ) data.object.angle = this.angle; if ( this.decay !== undefined ) data.object.decay = this.decay; if ( this.penumbra !== undefined ) data.object.penumbra = this.penumbra; if ( this.shadow !== undefined ) data.object.shadow = this.shadow.toJSON(); if ( this.target !== undefined ) data.object.target = this.target.uuid; return data; } } /** * A light source positioned directly above the scene, with color fading from * the sky color to the ground color. * * This light cannot be used to cast shadows. * * ```js * const light = new THREE.HemisphereLight( 0xffffbb, 0x080820, 1 ); * scene.add( light ); * ``` * * @augments Light */ class HemisphereLight extends Light { /** * Constructs a new hemisphere light. * * @param {(number|Color|string)} [skyColor=0xffffff] - The light's sky color. * @param {(number|Color|string)} [groundColor=0xffffff] - The light's ground color. * @param {number} [intensity=1] - The light's strength/intensity. */ constructor( skyColor, groundColor, intensity ) { super( skyColor, intensity ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isHemisphereLight = true; this.type = 'HemisphereLight'; this.position.copy( Object3D.DEFAULT_UP ); this.updateMatrix(); /** * The light's ground color. * * @type {Color} */ this.groundColor = new Color( groundColor ); } copy( source, recursive ) { super.copy( source, recursive ); this.groundColor.copy( source.groundColor ); return this; } } const _projScreenMatrix$1 = /*@__PURE__*/ new Matrix4(); const _lightPositionWorld$1 = /*@__PURE__*/ new Vector3(); const _lookTarget$1 = /*@__PURE__*/ new Vector3(); /** * Abstract base class for light shadow classes. These classes * represent the shadow configuration for different light types. * * @abstract */ class LightShadow { /** * Constructs a new light shadow. * * @param {Camera} camera - The light's view of the world. */ constructor( camera ) { /** * The light's view of the world. * * @type {Camera} */ this.camera = camera; /** * The intensity of the shadow. The default is `1`. * Valid values are in the range `[0, 1]`. * * @type {number} * @default 1 */ this.intensity = 1; /** * Shadow map bias, how much to add or subtract from the normalized depth * when deciding whether a surface is in shadow. * * The default is `0`. Very tiny adjustments here (in the order of `0.0001`) * may help reduce artifacts in shadows. * * @type {number} * @default 0 */ this.bias = 0; /** * Defines how much the position used to query the shadow map is offset along * the object normal. The default is `0`. Increasing this value can be used to * reduce shadow acne especially in large scenes where light shines onto * geometry at a shallow angle. The cost is that shadows may appear distorted. * * @type {number} * @default 0 */ this.normalBias = 0; /** * Setting this to values greater than 1 will blur the edges of the shadow. * High values will cause unwanted banding effects in the shadows - a greater * map size will allow for a higher value to be used here before these effects * become visible. * * The property has no effect when the shadow map type is `PCFSoftShadowMap` and * and it is recommended to increase softness by decreasing the shadow map size instead. * * The property has no effect when the shadow map type is `BasicShadowMap`. * * @type {number} * @default 1 */ this.radius = 1; /** * The amount of samples to use when blurring a VSM shadow map. * * @type {number} * @default 8 */ this.blurSamples = 8; /** * Defines the width and height of the shadow map. Higher values give better quality * shadows at the cost of computation time. Values must be powers of two. * * @type {Vector2} * @default (512,512) */ this.mapSize = new Vector2( 512, 512 ); /** * The depth map generated using the internal camera; a location beyond a * pixel's depth is in shadow. Computed internally during rendering. * * @type {?RenderTarget} * @default null */ this.map = null; /** * The distribution map generated using the internal camera; an occlusion is * calculated based on the distribution of depths. Computed internally during * rendering. * * @type {?RenderTarget} * @default null */ this.mapPass = null; /** * Model to shadow camera space, to compute location and depth in shadow map. * This is computed internally during rendering. * * @type {Matrix4} */ this.matrix = new Matrix4(); /** * Enables automatic updates of the light's shadow. If you do not require dynamic * lighting / shadows, you may set this to `false`. * * @type {boolean} * @default true */ this.autoUpdate = true; /** * When set to `true`, shadow maps will be updated in the next `render` call. * If you have set {@link LightShadow#autoUpdate} to `false`, you will need to * set this property to `true` and then make a render call to update the light's shadow. * * @type {boolean} * @default false */ this.needsUpdate = false; this._frustum = new Frustum(); this._frameExtents = new Vector2( 1, 1 ); this._viewportCount = 1; this._viewports = [ new Vector4( 0, 0, 1, 1 ) ]; } /** * Used internally by the renderer to get the number of viewports that need * to be rendered for this shadow. * * @return {number} The viewport count. */ getViewportCount() { return this._viewportCount; } /** * Gets the shadow cameras frustum. Used internally by the renderer to cull objects. * * @return {Frustum} The shadow camera frustum. */ getFrustum() { return this._frustum; } /** * Update the matrices for the camera and shadow, used internally by the renderer. * * @param {Light} light - The light for which the shadow is being rendered. */ updateMatrices( light ) { const shadowCamera = this.camera; const shadowMatrix = this.matrix; _lightPositionWorld$1.setFromMatrixPosition( light.matrixWorld ); shadowCamera.position.copy( _lightPositionWorld$1 ); _lookTarget$1.setFromMatrixPosition( light.target.matrixWorld ); shadowCamera.lookAt( _lookTarget$1 ); shadowCamera.updateMatrixWorld(); _projScreenMatrix$1.multiplyMatrices( shadowCamera.projectionMatrix, shadowCamera.matrixWorldInverse ); this._frustum.setFromProjectionMatrix( _projScreenMatrix$1 ); shadowMatrix.set( 0.5, 0.0, 0.0, 0.5, 0.0, 0.5, 0.0, 0.5, 0.0, 0.0, 0.5, 0.5, 0.0, 0.0, 0.0, 1.0 ); shadowMatrix.multiply( _projScreenMatrix$1 ); } /** * Returns a viewport definition for the given viewport index. * * @param {number} viewportIndex - The viewport index. * @return {Vector4} The viewport. */ getViewport( viewportIndex ) { return this._viewports[ viewportIndex ]; } /** * Returns the frame extends. * * @return {Vector2} The frame extends. */ getFrameExtents() { return this._frameExtents; } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. */ dispose() { if ( this.map ) { this.map.dispose(); } if ( this.mapPass ) { this.mapPass.dispose(); } } /** * Copies the values of the given light shadow instance to this instance. * * @param {LightShadow} source - The light shadow to copy. * @return {LightShadow} A reference to this light shadow instance. */ copy( source ) { this.camera = source.camera.clone(); this.intensity = source.intensity; this.bias = source.bias; this.radius = source.radius; this.mapSize.copy( source.mapSize ); return this; } /** * Returns a new light shadow instance with copied values from this instance. * * @return {LightShadow} A clone of this instance. */ clone() { return new this.constructor().copy( this ); } /** * Serializes the light shadow into JSON. * * @return {Object} A JSON object representing the serialized light shadow. * @see {@link ObjectLoader#parse} */ toJSON() { const object = {}; if ( this.intensity !== 1 ) object.intensity = this.intensity; if ( this.bias !== 0 ) object.bias = this.bias; if ( this.normalBias !== 0 ) object.normalBias = this.normalBias; if ( this.radius !== 1 ) object.radius = this.radius; if ( this.mapSize.x !== 512 || this.mapSize.y !== 512 ) object.mapSize = this.mapSize.toArray(); object.camera = this.camera.toJSON( false ).object; delete object.camera.matrix; return object; } } /** * Represents the shadow configuration of directional lights. * * @augments LightShadow */ class SpotLightShadow extends LightShadow { /** * Constructs a new spot light shadow. */ constructor() { super( new PerspectiveCamera( 50, 1, 0.5, 500 ) ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isSpotLightShadow = true; /** * Used to focus the shadow camera. The camera's field of view is set as a * percentage of the spotlight's field-of-view. Range is `[0, 1]`. * * @type {number} * @default 1 */ this.focus = 1; } updateMatrices( light ) { const camera = this.camera; const fov = RAD2DEG * 2 * light.angle * this.focus; const aspect = this.mapSize.width / this.mapSize.height; const far = light.distance || camera.far; if ( fov !== camera.fov || aspect !== camera.aspect || far !== camera.far ) { camera.fov = fov; camera.aspect = aspect; camera.far = far; camera.updateProjectionMatrix(); } super.updateMatrices( light ); } copy( source ) { super.copy( source ); this.focus = source.focus; return this; } } /** * This light gets emitted from a single point in one direction, along a cone * that increases in size the further from the light it gets. * * This light can cast shadows - see the {@link SpotLightShadow} for details. * * ```js * // white spotlight shining from the side, modulated by a texture * const spotLight = new THREE.SpotLight( 0xffffff ); * spotLight.position.set( 100, 1000, 100 ); * spotLight.map = new THREE.TextureLoader().load( url ); * * spotLight.castShadow = true; * spotLight.shadow.mapSize.width = 1024; * spotLight.shadow.mapSize.height = 1024; * spotLight.shadow.camera.near = 500; * spotLight.shadow.camera.far = 4000; * spotLight.shadow.camera.fov = 30;s * ``` * * @augments Light */ class SpotLight extends Light { /** * Constructs a new spot light. * * @param {(number|Color|string)} [color=0xffffff] - The light's color. * @param {number} [intensity=1] - The light's strength/intensity measured in candela (cd). * @param {number} [distance=0] - Maximum range of the light. `0` means no limit. * @param {number} [angle=Math.PI/3] - Maximum angle of light dispersion from its direction whose upper bound is `Math.PI/2`. * @param {number} [penumbra=0] - Percent of the spotlight cone that is attenuated due to penumbra. Value range is `[0,1]`. * @param {number} [decay=2] - The amount the light dims along the distance of the light. */ constructor( color, intensity, distance = 0, angle = Math.PI / 3, penumbra = 0, decay = 2 ) { super( color, intensity ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isSpotLight = true; this.type = 'SpotLight'; this.position.copy( Object3D.DEFAULT_UP ); this.updateMatrix(); /** * The spot light points from its position to the * target's position. * * For the target's position to be changed to anything other * than the default, it must be added to the scene. * * It is also possible to set the target to be another 3D object * in the scene. The light will now track the target object. * * @type {Object3D} */ this.target = new Object3D(); /** * Maximum range of the light. `0` means no limit. * * @type {number} * @default 0 */ this.distance = distance; /** * Maximum angle of light dispersion from its direction whose upper bound is `Math.PI/2`. * * @type {number} * @default Math.PI/3 */ this.angle = angle; /** * Percent of the spotlight cone that is attenuated due to penumbra. * Value range is `[0,1]`. * * @type {number} * @default 0 */ this.penumbra = penumbra; /** * The amount the light dims along the distance of the light. In context of * physically-correct rendering the default value should not be changed. * * @type {number} * @default 2 */ this.decay = decay; /** * A texture used to modulate the color of the light. The spot light * color is mixed with the RGB value of this texture, with a ratio * corresponding to its alpha value. The cookie-like masking effect is * reproduced using pixel values (0, 0, 0, 1-cookie_value). * * *Warning*: This property is disabled if {@link Object3D#castShadow} is set to `false`. * * @type {?Texture} * @default null */ this.map = null; /** * This property holds the light's shadow configuration. * * @type {SpotLightShadow} */ this.shadow = new SpotLightShadow(); } /** * The light's power. Power is the luminous power of the light measured in lumens (lm). * Changing the power will also change the light's intensity. * * @type {number} */ get power() { // compute the light's luminous power (in lumens) from its intensity (in candela) // by convention for a spotlight, luminous power (lm) = π * luminous intensity (cd) return this.intensity * Math.PI; } set power( power ) { // set the light's intensity (in candela) from the desired luminous power (in lumens) this.intensity = power / Math.PI; } dispose() { this.shadow.dispose(); } copy( source, recursive ) { super.copy( source, recursive ); this.distance = source.distance; this.angle = source.angle; this.penumbra = source.penumbra; this.decay = source.decay; this.target = source.target.clone(); this.shadow = source.shadow.clone(); return this; } } const _projScreenMatrix = /*@__PURE__*/ new Matrix4(); const _lightPositionWorld = /*@__PURE__*/ new Vector3(); const _lookTarget = /*@__PURE__*/ new Vector3(); /** * Represents the shadow configuration of point lights. * * @augments LightShadow */ class PointLightShadow extends LightShadow { /** * Constructs a new point light shadow. */ constructor() { super( new PerspectiveCamera( 90, 1, 0.5, 500 ) ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isPointLightShadow = true; this._frameExtents = new Vector2( 4, 2 ); this._viewportCount = 6; this._viewports = [ // These viewports map a cube-map onto a 2D texture with the // following orientation: // // xzXZ // y Y // // X - Positive x direction // x - Negative x direction // Y - Positive y direction // y - Negative y direction // Z - Positive z direction // z - Negative z direction // positive X new Vector4( 2, 1, 1, 1 ), // negative X new Vector4( 0, 1, 1, 1 ), // positive Z new Vector4( 3, 1, 1, 1 ), // negative Z new Vector4( 1, 1, 1, 1 ), // positive Y new Vector4( 3, 0, 1, 1 ), // negative Y new Vector4( 1, 0, 1, 1 ) ]; this._cubeDirections = [ new Vector3( 1, 0, 0 ), new Vector3( -1, 0, 0 ), new Vector3( 0, 0, 1 ), new Vector3( 0, 0, -1 ), new Vector3( 0, 1, 0 ), new Vector3( 0, -1, 0 ) ]; this._cubeUps = [ new Vector3( 0, 1, 0 ), new Vector3( 0, 1, 0 ), new Vector3( 0, 1, 0 ), new Vector3( 0, 1, 0 ), new Vector3( 0, 0, 1 ), new Vector3( 0, 0, -1 ) ]; } /** * Update the matrices for the camera and shadow, used internally by the renderer. * * @param {Light} light - The light for which the shadow is being rendered. * @param {number} [viewportIndex=0] - The viewport index. */ updateMatrices( light, viewportIndex = 0 ) { const camera = this.camera; const shadowMatrix = this.matrix; const far = light.distance || camera.far; if ( far !== camera.far ) { camera.far = far; camera.updateProjectionMatrix(); } _lightPositionWorld.setFromMatrixPosition( light.matrixWorld ); camera.position.copy( _lightPositionWorld ); _lookTarget.copy( camera.position ); _lookTarget.add( this._cubeDirections[ viewportIndex ] ); camera.up.copy( this._cubeUps[ viewportIndex ] ); camera.lookAt( _lookTarget ); camera.updateMatrixWorld(); shadowMatrix.makeTranslation( - _lightPositionWorld.x, - _lightPositionWorld.y, - _lightPositionWorld.z ); _projScreenMatrix.multiplyMatrices( camera.projectionMatrix, camera.matrixWorldInverse ); this._frustum.setFromProjectionMatrix( _projScreenMatrix ); } } /** * A light that gets emitted from a single point in all directions. A common * use case for this is to replicate the light emitted from a bare * lightbulb. * * This light can cast shadows - see the {@link PointLightShadow} for details. * * ```js * const light = new THREE.PointLight( 0xff0000, 1, 100 ); * light.position.set( 50, 50, 50 ); * scene.add( light ); * ``` * * @augments Light */ class PointLight extends Light { /** * Constructs a new point light. * * @param {(number|Color|string)} [color=0xffffff] - The light's color. * @param {number} [intensity=1] - The light's strength/intensity measured in candela (cd). * @param {number} [distance=0] - Maximum range of the light. `0` means no limit. * @param {number} [decay=2] - The amount the light dims along the distance of the light. */ constructor( color, intensity, distance = 0, decay = 2 ) { super( color, intensity ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isPointLight = true; this.type = 'PointLight'; /** * When distance is zero, light will attenuate according to inverse-square * law to infinite distance. When distance is non-zero, light will attenuate * according to inverse-square law until near the distance cutoff, where it * will then attenuate quickly and smoothly to 0. Inherently, cutoffs are not * physically correct. * * @type {number} * @default 0 */ this.distance = distance; /** * The amount the light dims along the distance of the light. In context of * physically-correct rendering the default value should not be changed. * * @type {number} * @default 2 */ this.decay = decay; /** * This property holds the light's shadow configuration. * * @type {PointLightShadow} */ this.shadow = new PointLightShadow(); } /** * The light's power. Power is the luminous power of the light measured in lumens (lm). * Changing the power will also change the light's intensity. * * @type {number} */ get power() { // compute the light's luminous power (in lumens) from its intensity (in candela) // for an isotropic light source, luminous power (lm) = 4 π luminous intensity (cd) return this.intensity * 4 * Math.PI; } set power( power ) { // set the light's intensity (in candela) from the desired luminous power (in lumens) this.intensity = power / ( 4 * Math.PI ); } dispose() { this.shadow.dispose(); } copy( source, recursive ) { super.copy( source, recursive ); this.distance = source.distance; this.decay = source.decay; this.shadow = source.shadow.clone(); return this; } } /** * Camera that uses [orthographic projection]{@link https://en.wikipedia.org/wiki/Orthographic_projection}. * * In this projection mode, an object's size in the rendered image stays * constant regardless of its distance from the camera. This can be useful * for rendering 2D scenes and UI elements, amongst other things. * * ```js * const camera = new THREE.OrthographicCamera( width / - 2, width / 2, height / 2, height / - 2, 1, 1000 ); * scene.add( camera ); * ``` * * @augments Camera */ class OrthographicCamera extends Camera { /** * Constructs a new orthographic camera. * * @param {number} [left=-1] - The left plane of the camera's frustum. * @param {number} [right=1] - The right plane of the camera's frustum. * @param {number} [top=1] - The top plane of the camera's frustum. * @param {number} [bottom=-1] - The bottom plane of the camera's frustum. * @param {number} [near=0.1] - The camera's near plane. * @param {number} [far=2000] - The camera's far plane. */ constructor( left = -1, right = 1, top = 1, bottom = -1, near = 0.1, far = 2000 ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isOrthographicCamera = true; this.type = 'OrthographicCamera'; /** * The zoom factor of the camera. * * @type {number} * @default 1 */ this.zoom = 1; /** * Represents the frustum window specification. This property should not be edited * directly but via {@link PerspectiveCamera#setViewOffset} and {@link PerspectiveCamera#clearViewOffset}. * * @type {?Object} * @default null */ this.view = null; /** * The left plane of the camera's frustum. * * @type {number} * @default -1 */ this.left = left; /** * The right plane of the camera's frustum. * * @type {number} * @default 1 */ this.right = right; /** * The top plane of the camera's frustum. * * @type {number} * @default 1 */ this.top = top; /** * The bottom plane of the camera's frustum. * * @type {number} * @default -1 */ this.bottom = bottom; /** * The camera's near plane. The valid range is greater than `0` * and less than the current value of {@link OrthographicCamera#far}. * * Note that, unlike for the {@link PerspectiveCamera}, `0` is a * valid value for an orthographic camera's near plane. * * @type {number} * @default 0.1 */ this.near = near; /** * The camera's far plane. Must be greater than the * current value of {@link OrthographicCamera#near}. * * @type {number} * @default 2000 */ this.far = far; this.updateProjectionMatrix(); } copy( source, recursive ) { super.copy( source, recursive ); this.left = source.left; this.right = source.right; this.top = source.top; this.bottom = source.bottom; this.near = source.near; this.far = source.far; this.zoom = source.zoom; this.view = source.view === null ? null : Object.assign( {}, source.view ); return this; } /** * Sets an offset in a larger frustum. This is useful for multi-window or * multi-monitor/multi-machine setups. * * @param {number} fullWidth - The full width of multiview setup. * @param {number} fullHeight - The full height of multiview setup. * @param {number} x - The horizontal offset of the subcamera. * @param {number} y - The vertical offset of the subcamera. * @param {number} width - The width of subcamera. * @param {number} height - The height of subcamera. * @see {@link PerspectiveCamera#setViewOffset} */ setViewOffset( fullWidth, fullHeight, x, y, width, height ) { if ( this.view === null ) { this.view = { enabled: true, fullWidth: 1, fullHeight: 1, offsetX: 0, offsetY: 0, width: 1, height: 1 }; } this.view.enabled = true; this.view.fullWidth = fullWidth; this.view.fullHeight = fullHeight; this.view.offsetX = x; this.view.offsetY = y; this.view.width = width; this.view.height = height; this.updateProjectionMatrix(); } /** * Removes the view offset from the projection matrix. */ clearViewOffset() { if ( this.view !== null ) { this.view.enabled = false; } this.updateProjectionMatrix(); } /** * Updates the camera's projection matrix. Must be called after any change of * camera properties. */ updateProjectionMatrix() { const dx = ( this.right - this.left ) / ( 2 * this.zoom ); const dy = ( this.top - this.bottom ) / ( 2 * this.zoom ); const cx = ( this.right + this.left ) / 2; const cy = ( this.top + this.bottom ) / 2; let left = cx - dx; let right = cx + dx; let top = cy + dy; let bottom = cy - dy; if ( this.view !== null && this.view.enabled ) { const scaleW = ( this.right - this.left ) / this.view.fullWidth / this.zoom; const scaleH = ( this.top - this.bottom ) / this.view.fullHeight / this.zoom; left += scaleW * this.view.offsetX; right = left + scaleW * this.view.width; top -= scaleH * this.view.offsetY; bottom = top - scaleH * this.view.height; } this.projectionMatrix.makeOrthographic( left, right, top, bottom, this.near, this.far, this.coordinateSystem ); this.projectionMatrixInverse.copy( this.projectionMatrix ).invert(); } toJSON( meta ) { const data = super.toJSON( meta ); data.object.zoom = this.zoom; data.object.left = this.left; data.object.right = this.right; data.object.top = this.top; data.object.bottom = this.bottom; data.object.near = this.near; data.object.far = this.far; if ( this.view !== null ) data.object.view = Object.assign( {}, this.view ); return data; } } /** * Represents the shadow configuration of directional lights. * * @augments LightShadow */ class DirectionalLightShadow extends LightShadow { /** * Constructs a new directional light shadow. */ constructor() { super( new OrthographicCamera( -5, 5, 5, -5, 0.5, 500 ) ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isDirectionalLightShadow = true; } } /** * A light that gets emitted in a specific direction. This light will behave * as though it is infinitely far away and the rays produced from it are all * parallel. The common use case for this is to simulate daylight; the sun is * far enough away that its position can be considered to be infinite, and * all light rays coming from it are parallel. * * A common point of confusion for directional lights is that setting the * rotation has no effect. This is because three.js's DirectionalLight is the * equivalent to what is often called a 'Target Direct Light' in other * applications. * * This means that its direction is calculated as pointing from the light's * {@link Object3D#position} to the {@link DirectionalLight#target} position * (as opposed to a 'Free Direct Light' that just has a rotation * component). * * This light can cast shadows - see the {@link DirectionalLightShadow} for details. * * ```js * // White directional light at half intensity shining from the top. * const directionalLight = new THREE.DirectionalLight( 0xffffff, 0.5 ); * scene.add( directionalLight ); * ``` * * @augments Light */ class DirectionalLight extends Light { /** * Constructs a new directional light. * * @param {(number|Color|string)} [color=0xffffff] - The light's color. * @param {number} [intensity=1] - The light's strength/intensity. */ constructor( color, intensity ) { super( color, intensity ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isDirectionalLight = true; this.type = 'DirectionalLight'; this.position.copy( Object3D.DEFAULT_UP ); this.updateMatrix(); /** * The directional light points from its position to the * target's position. * * For the target's position to be changed to anything other * than the default, it must be added to the scene. * * It is also possible to set the target to be another 3D object * in the scene. The light will now track the target object. * * @type {Object3D} */ this.target = new Object3D(); /** * This property holds the light's shadow configuration. * * @type {DirectionalLightShadow} */ this.shadow = new DirectionalLightShadow(); } dispose() { this.shadow.dispose(); } copy( source ) { super.copy( source ); this.target = source.target.clone(); this.shadow = source.shadow.clone(); return this; } } /** * This light globally illuminates all objects in the scene equally. * * It cannot be used to cast shadows as it does not have a direction. * * ```js * const light = new THREE.AmbientLight( 0x404040 ); // soft white light * scene.add( light ); * ``` * * @augments Light */ class AmbientLight extends Light { /** * Constructs a new ambient light. * * @param {(number|Color|string)} [color=0xffffff] - The light's color. * @param {number} [intensity=1] - The light's strength/intensity. */ constructor( color, intensity ) { super( color, intensity ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isAmbientLight = true; this.type = 'AmbientLight'; } } /** * This class emits light uniformly across the face a rectangular plane. * This light type can be used to simulate light sources such as bright * windows or strip lighting. * * Important Notes: * * - There is no shadow support. * - Only PBR materials are supported. * - You have to include `RectAreaLightUniformsLib` (`WebGLRenderer`) or `RectAreaLightTexturesLib` (`WebGPURenderer`) * into your app and init the uniforms/textures. * * ```js * RectAreaLightUniformsLib.init(); // only relevant for WebGLRenderer * THREE.RectAreaLightNode.setLTC( RectAreaLightTexturesLib.init() ); // only relevant for WebGPURenderer * * const intensity = 1; const width = 10; const height = 10; * const rectLight = new THREE.RectAreaLight( 0xffffff, intensity, width, height ); * rectLight.position.set( 5, 5, 0 ); * rectLight.lookAt( 0, 0, 0 ); * scene.add( rectLight ) * ``` * * @augments Light */ class RectAreaLight extends Light { /** * Constructs a new area light. * * @param {(number|Color|string)} [color=0xffffff] - The light's color. * @param {number} [intensity=1] - The light's strength/intensity. * @param {number} [width=10] - The width of the light. * @param {number} [height=10] - The height of the light. */ constructor( color, intensity, width = 10, height = 10 ) { super( color, intensity ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isRectAreaLight = true; this.type = 'RectAreaLight'; /** * The width of the light. * * @type {number} * @default 10 */ this.width = width; /** * The height of the light. * * @type {number} * @default 10 */ this.height = height; } /** * The light's power. Power is the luminous power of the light measured in lumens (lm). * Changing the power will also change the light's intensity. * * @type {number} */ get power() { // compute the light's luminous power (in lumens) from its intensity (in nits) return this.intensity * this.width * this.height * Math.PI; } set power( power ) { // set the light's intensity (in nits) from the desired luminous power (in lumens) this.intensity = power / ( this.width * this.height * Math.PI ); } copy( source ) { super.copy( source ); this.width = source.width; this.height = source.height; return this; } toJSON( meta ) { const data = super.toJSON( meta ); data.object.width = this.width; data.object.height = this.height; return data; } } /** * Represents a third-order spherical harmonics (SH). Light probes use this class * to encode lighting information. * * - Primary reference: {@link https://graphics.stanford.edu/papers/envmap/envmap.pdf} * - Secondary reference: {@link https://www.ppsloan.org/publications/StupidSH36.pdf} */ class SphericalHarmonics3 { /** * Constructs a new spherical harmonics. */ constructor() { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isSphericalHarmonics3 = true; /** * An array holding the (9) SH coefficients. * * @type {Array} */ this.coefficients = []; for ( let i = 0; i < 9; i ++ ) { this.coefficients.push( new Vector3() ); } } /** * Sets the given SH coefficients to this instance by copying * the values. * * @param {Array} coefficients - The SH coefficients. * @return {SphericalHarmonics3} A reference to this spherical harmonics. */ set( coefficients ) { for ( let i = 0; i < 9; i ++ ) { this.coefficients[ i ].copy( coefficients[ i ] ); } return this; } /** * Sets all SH coefficients to `0`. * * @return {SphericalHarmonics3} A reference to this spherical harmonics. */ zero() { for ( let i = 0; i < 9; i ++ ) { this.coefficients[ i ].set( 0, 0, 0 ); } return this; } /** * Returns the radiance in the direction of the given normal. * * @param {Vector3} normal - The normal vector (assumed to be unit length) * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} The radiance. */ getAt( normal, target ) { // normal is assumed to be unit length const x = normal.x, y = normal.y, z = normal.z; const coeff = this.coefficients; // band 0 target.copy( coeff[ 0 ] ).multiplyScalar( 0.282095 ); // band 1 target.addScaledVector( coeff[ 1 ], 0.488603 * y ); target.addScaledVector( coeff[ 2 ], 0.488603 * z ); target.addScaledVector( coeff[ 3 ], 0.488603 * x ); // band 2 target.addScaledVector( coeff[ 4 ], 1.092548 * ( x * y ) ); target.addScaledVector( coeff[ 5 ], 1.092548 * ( y * z ) ); target.addScaledVector( coeff[ 6 ], 0.315392 * ( 3.0 * z * z - 1.0 ) ); target.addScaledVector( coeff[ 7 ], 1.092548 * ( x * z ) ); target.addScaledVector( coeff[ 8 ], 0.546274 * ( x * x - y * y ) ); return target; } /** * Returns the irradiance (radiance convolved with cosine lobe) in the * direction of the given normal. * * @param {Vector3} normal - The normal vector (assumed to be unit length) * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} The irradiance. */ getIrradianceAt( normal, target ) { // normal is assumed to be unit length const x = normal.x, y = normal.y, z = normal.z; const coeff = this.coefficients; // band 0 target.copy( coeff[ 0 ] ).multiplyScalar( 0.886227 ); // π * 0.282095 // band 1 target.addScaledVector( coeff[ 1 ], 2.0 * 0.511664 * y ); // ( 2 * π / 3 ) * 0.488603 target.addScaledVector( coeff[ 2 ], 2.0 * 0.511664 * z ); target.addScaledVector( coeff[ 3 ], 2.0 * 0.511664 * x ); // band 2 target.addScaledVector( coeff[ 4 ], 2.0 * 0.429043 * x * y ); // ( π / 4 ) * 1.092548 target.addScaledVector( coeff[ 5 ], 2.0 * 0.429043 * y * z ); target.addScaledVector( coeff[ 6 ], 0.743125 * z * z - 0.247708 ); // ( π / 4 ) * 0.315392 * 3 target.addScaledVector( coeff[ 7 ], 2.0 * 0.429043 * x * z ); target.addScaledVector( coeff[ 8 ], 0.429043 * ( x * x - y * y ) ); // ( π / 4 ) * 0.546274 return target; } /** * Adds the given SH to this instance. * * @param {SphericalHarmonics3} sh - The SH to add. * @return {SphericalHarmonics3} A reference to this spherical harmonics. */ add( sh ) { for ( let i = 0; i < 9; i ++ ) { this.coefficients[ i ].add( sh.coefficients[ i ] ); } return this; } /** * A convenience method for performing {@link SphericalHarmonics3#add} and * {@link SphericalHarmonics3#scale} at once. * * @param {SphericalHarmonics3} sh - The SH to add. * @param {number} s - The scale factor. * @return {SphericalHarmonics3} A reference to this spherical harmonics. */ addScaledSH( sh, s ) { for ( let i = 0; i < 9; i ++ ) { this.coefficients[ i ].addScaledVector( sh.coefficients[ i ], s ); } return this; } /** * Scales this SH by the given scale factor. * * @param {number} s - The scale factor. * @return {SphericalHarmonics3} A reference to this spherical harmonics. */ scale( s ) { for ( let i = 0; i < 9; i ++ ) { this.coefficients[ i ].multiplyScalar( s ); } return this; } /** * Linear interpolates between the given SH and this instance by the given * alpha factor. * * @param {SphericalHarmonics3} sh - The SH to interpolate with. * @param {number} alpha - The alpha factor. * @return {SphericalHarmonics3} A reference to this spherical harmonics. */ lerp( sh, alpha ) { for ( let i = 0; i < 9; i ++ ) { this.coefficients[ i ].lerp( sh.coefficients[ i ], alpha ); } return this; } /** * Returns `true` if this spherical harmonics is equal with the given one. * * @param {SphericalHarmonics3} sh - The spherical harmonics to test for equality. * @return {boolean} Whether this spherical harmonics is equal with the given one. */ equals( sh ) { for ( let i = 0; i < 9; i ++ ) { if ( ! this.coefficients[ i ].equals( sh.coefficients[ i ] ) ) { return false; } } return true; } /** * Copies the values of the given spherical harmonics to this instance. * * @param {SphericalHarmonics3} sh - The spherical harmonics to copy. * @return {SphericalHarmonics3} A reference to this spherical harmonics. */ copy( sh ) { return this.set( sh.coefficients ); } /** * Returns a new spherical harmonics with copied values from this instance. * * @return {SphericalHarmonics3} A clone of this instance. */ clone() { return new this.constructor().copy( this ); } /** * Sets the SH coefficients of this instance from the given array. * * @param {Array} array - An array holding the SH coefficients. * @param {number} [offset=0] - The array offset where to start copying. * @return {SphericalHarmonics3} A clone of this instance. */ fromArray( array, offset = 0 ) { const coefficients = this.coefficients; for ( let i = 0; i < 9; i ++ ) { coefficients[ i ].fromArray( array, offset + ( i * 3 ) ); } return this; } /** * Returns an array with the SH coefficients, or copies them into the provided * array. The coefficients are represented as numbers. * * @param {Array} [array=[]] - The target array. * @param {number} [offset=0] - The array offset where to start copying. * @return {Array} An array with flat SH coefficients. */ toArray( array = [], offset = 0 ) { const coefficients = this.coefficients; for ( let i = 0; i < 9; i ++ ) { coefficients[ i ].toArray( array, offset + ( i * 3 ) ); } return array; } /** * Computes the SH basis for the given normal vector. * * @param {Vector3} normal - The normal. * @param {Array} shBasis - The target array holding the SH basis. */ static getBasisAt( normal, shBasis ) { // normal is assumed to be unit length const x = normal.x, y = normal.y, z = normal.z; // band 0 shBasis[ 0 ] = 0.282095; // band 1 shBasis[ 1 ] = 0.488603 * y; shBasis[ 2 ] = 0.488603 * z; shBasis[ 3 ] = 0.488603 * x; // band 2 shBasis[ 4 ] = 1.092548 * x * y; shBasis[ 5 ] = 1.092548 * y * z; shBasis[ 6 ] = 0.315392 * ( 3 * z * z - 1 ); shBasis[ 7 ] = 1.092548 * x * z; shBasis[ 8 ] = 0.546274 * ( x * x - y * y ); } } /** * Light probes are an alternative way of adding light to a 3D scene. Unlike * classical light sources (e.g. directional, point or spot lights), light * probes do not emit light. Instead they store information about light * passing through 3D space. During rendering, the light that hits a 3D * object is approximated by using the data from the light probe. * * Light probes are usually created from (radiance) environment maps. The * class {@link LightProbeGenerator} can be used to create light probes from * cube textures or render targets. However, light estimation data could also * be provided in other forms e.g. by WebXR. This enables the rendering of * augmented reality content that reacts to real world lighting. * * The current probe implementation in three.js supports so-called diffuse * light probes. This type of light probe is functionally equivalent to an * irradiance environment map. * * @augments Light */ class LightProbe extends Light { /** * Constructs a new light probe. * * @param {SphericalHarmonics3} sh - The spherical harmonics which represents encoded lighting information. * @param {number} [intensity=1] - The light's strength/intensity. */ constructor( sh = new SphericalHarmonics3(), intensity = 1 ) { super( undefined, intensity ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isLightProbe = true; /** * A light probe uses spherical harmonics to encode lighting information. * * @type {SphericalHarmonics3} */ this.sh = sh; } copy( source ) { super.copy( source ); this.sh.copy( source.sh ); return this; } /** * Deserializes the light prove from the given JSON. * * @param {Object} json - The JSON holding the serialized light probe. * @return {LightProbe} A reference to this light probe. */ fromJSON( json ) { this.intensity = json.intensity; // TODO: Move this bit to Light.fromJSON(); this.sh.fromArray( json.sh ); return this; } toJSON( meta ) { const data = super.toJSON( meta ); data.object.sh = this.sh.toArray(); return data; } } /** * Class for loading geometries. The files are internally * loaded via {@link FileLoader}. * * ```js * const loader = new THREE.MaterialLoader(); * const material = await loader.loadAsync( 'material.json' ); * ``` * This loader does not support node materials. Use {@link NodeMaterialLoader} instead. * * @augments Loader */ class MaterialLoader extends Loader { /** * Constructs a new material loader. * * @param {LoadingManager} [manager] - The loading manager. */ constructor( manager ) { super( manager ); /** * A dictionary holding textures used by the material. * * @type {Object} */ this.textures = {}; } /** * Starts loading from the given URL and pass the loaded material to the `onLoad()` callback. * * @param {string} url - The path/URL of the file to be loaded. This can also be a data URI. * @param {function(Material)} onLoad - Executed when the loading process has been finished. * @param {onProgressCallback} onProgress - Executed while the loading is in progress. * @param {onErrorCallback} onError - Executed when errors occur. */ load( url, onLoad, onProgress, onError ) { const scope = this; const loader = new FileLoader( scope.manager ); loader.setPath( scope.path ); loader.setRequestHeader( scope.requestHeader ); loader.setWithCredentials( scope.withCredentials ); loader.load( url, function ( text ) { try { onLoad( scope.parse( JSON.parse( text ) ) ); } catch ( e ) { if ( onError ) { onError( e ); } else { console.error( e ); } scope.manager.itemError( url ); } }, onProgress, onError ); } /** * Parses the given JSON object and returns a material. * * @param {Object} json - The serialized material. * @return {Material} The parsed material. */ parse( json ) { const textures = this.textures; function getTexture( name ) { if ( textures[ name ] === undefined ) { console.warn( 'THREE.MaterialLoader: Undefined texture', name ); } return textures[ name ]; } const material = this.createMaterialFromType( json.type ); if ( json.uuid !== undefined ) material.uuid = json.uuid; if ( json.name !== undefined ) material.name = json.name; if ( json.color !== undefined && material.color !== undefined ) material.color.setHex( json.color ); if ( json.roughness !== undefined ) material.roughness = json.roughness; if ( json.metalness !== undefined ) material.metalness = json.metalness; if ( json.sheen !== undefined ) material.sheen = json.sheen; if ( json.sheenColor !== undefined ) material.sheenColor = new Color().setHex( json.sheenColor ); if ( json.sheenRoughness !== undefined ) material.sheenRoughness = json.sheenRoughness; if ( json.emissive !== undefined && material.emissive !== undefined ) material.emissive.setHex( json.emissive ); if ( json.specular !== undefined && material.specular !== undefined ) material.specular.setHex( json.specular ); if ( json.specularIntensity !== undefined ) material.specularIntensity = json.specularIntensity; if ( json.specularColor !== undefined && material.specularColor !== undefined ) material.specularColor.setHex( json.specularColor ); if ( json.shininess !== undefined ) material.shininess = json.shininess; if ( json.clearcoat !== undefined ) material.clearcoat = json.clearcoat; if ( json.clearcoatRoughness !== undefined ) material.clearcoatRoughness = json.clearcoatRoughness; if ( json.dispersion !== undefined ) material.dispersion = json.dispersion; if ( json.iridescence !== undefined ) material.iridescence = json.iridescence; if ( json.iridescenceIOR !== undefined ) material.iridescenceIOR = json.iridescenceIOR; if ( json.iridescenceThicknessRange !== undefined ) material.iridescenceThicknessRange = json.iridescenceThicknessRange; if ( json.transmission !== undefined ) material.transmission = json.transmission; if ( json.thickness !== undefined ) material.thickness = json.thickness; if ( json.attenuationDistance !== undefined ) material.attenuationDistance = json.attenuationDistance; if ( json.attenuationColor !== undefined && material.attenuationColor !== undefined ) material.attenuationColor.setHex( json.attenuationColor ); if ( json.anisotropy !== undefined ) material.anisotropy = json.anisotropy; if ( json.anisotropyRotation !== undefined ) material.anisotropyRotation = json.anisotropyRotation; if ( json.fog !== undefined ) material.fog = json.fog; if ( json.flatShading !== undefined ) material.flatShading = json.flatShading; if ( json.blending !== undefined ) material.blending = json.blending; if ( json.combine !== undefined ) material.combine = json.combine; if ( json.side !== undefined ) material.side = json.side; if ( json.shadowSide !== undefined ) material.shadowSide = json.shadowSide; if ( json.opacity !== undefined ) material.opacity = json.opacity; if ( json.transparent !== undefined ) material.transparent = json.transparent; if ( json.alphaTest !== undefined ) material.alphaTest = json.alphaTest; if ( json.alphaHash !== undefined ) material.alphaHash = json.alphaHash; if ( json.depthFunc !== undefined ) material.depthFunc = json.depthFunc; if ( json.depthTest !== undefined ) material.depthTest = json.depthTest; if ( json.depthWrite !== undefined ) material.depthWrite = json.depthWrite; if ( json.colorWrite !== undefined ) material.colorWrite = json.colorWrite; if ( json.blendSrc !== undefined ) material.blendSrc = json.blendSrc; if ( json.blendDst !== undefined ) material.blendDst = json.blendDst; if ( json.blendEquation !== undefined ) material.blendEquation = json.blendEquation; if ( json.blendSrcAlpha !== undefined ) material.blendSrcAlpha = json.blendSrcAlpha; if ( json.blendDstAlpha !== undefined ) material.blendDstAlpha = json.blendDstAlpha; if ( json.blendEquationAlpha !== undefined ) material.blendEquationAlpha = json.blendEquationAlpha; if ( json.blendColor !== undefined && material.blendColor !== undefined ) material.blendColor.setHex( json.blendColor ); if ( json.blendAlpha !== undefined ) material.blendAlpha = json.blendAlpha; if ( json.stencilWriteMask !== undefined ) material.stencilWriteMask = json.stencilWriteMask; if ( json.stencilFunc !== undefined ) material.stencilFunc = json.stencilFunc; if ( json.stencilRef !== undefined ) material.stencilRef = json.stencilRef; if ( json.stencilFuncMask !== undefined ) material.stencilFuncMask = json.stencilFuncMask; if ( json.stencilFail !== undefined ) material.stencilFail = json.stencilFail; if ( json.stencilZFail !== undefined ) material.stencilZFail = json.stencilZFail; if ( json.stencilZPass !== undefined ) material.stencilZPass = json.stencilZPass; if ( json.stencilWrite !== undefined ) material.stencilWrite = json.stencilWrite; if ( json.wireframe !== undefined ) material.wireframe = json.wireframe; if ( json.wireframeLinewidth !== undefined ) material.wireframeLinewidth = json.wireframeLinewidth; if ( json.wireframeLinecap !== undefined ) material.wireframeLinecap = json.wireframeLinecap; if ( json.wireframeLinejoin !== undefined ) material.wireframeLinejoin = json.wireframeLinejoin; if ( json.rotation !== undefined ) material.rotation = json.rotation; if ( json.linewidth !== undefined ) material.linewidth = json.linewidth; if ( json.dashSize !== undefined ) material.dashSize = json.dashSize; if ( json.gapSize !== undefined ) material.gapSize = json.gapSize; if ( json.scale !== undefined ) material.scale = json.scale; if ( json.polygonOffset !== undefined ) material.polygonOffset = json.polygonOffset; if ( json.polygonOffsetFactor !== undefined ) material.polygonOffsetFactor = json.polygonOffsetFactor; if ( json.polygonOffsetUnits !== undefined ) material.polygonOffsetUnits = json.polygonOffsetUnits; if ( json.dithering !== undefined ) material.dithering = json.dithering; if ( json.alphaToCoverage !== undefined ) material.alphaToCoverage = json.alphaToCoverage; if ( json.premultipliedAlpha !== undefined ) material.premultipliedAlpha = json.premultipliedAlpha; if ( json.forceSinglePass !== undefined ) material.forceSinglePass = json.forceSinglePass; if ( json.visible !== undefined ) material.visible = json.visible; if ( json.toneMapped !== undefined ) material.toneMapped = json.toneMapped; if ( json.userData !== undefined ) material.userData = json.userData; if ( json.vertexColors !== undefined ) { if ( typeof json.vertexColors === 'number' ) { material.vertexColors = ( json.vertexColors > 0 ) ? true : false; } else { material.vertexColors = json.vertexColors; } } // Shader Material if ( json.uniforms !== undefined ) { for ( const name in json.uniforms ) { const uniform = json.uniforms[ name ]; material.uniforms[ name ] = {}; switch ( uniform.type ) { case 't': material.uniforms[ name ].value = getTexture( uniform.value ); break; case 'c': material.uniforms[ name ].value = new Color().setHex( uniform.value ); break; case 'v2': material.uniforms[ name ].value = new Vector2().fromArray( uniform.value ); break; case 'v3': material.uniforms[ name ].value = new Vector3().fromArray( uniform.value ); break; case 'v4': material.uniforms[ name ].value = new Vector4().fromArray( uniform.value ); break; case 'm3': material.uniforms[ name ].value = new Matrix3().fromArray( uniform.value ); break; case 'm4': material.uniforms[ name ].value = new Matrix4().fromArray( uniform.value ); break; default: material.uniforms[ name ].value = uniform.value; } } } if ( json.defines !== undefined ) material.defines = json.defines; if ( json.vertexShader !== undefined ) material.vertexShader = json.vertexShader; if ( json.fragmentShader !== undefined ) material.fragmentShader = json.fragmentShader; if ( json.glslVersion !== undefined ) material.glslVersion = json.glslVersion; if ( json.extensions !== undefined ) { for ( const key in json.extensions ) { material.extensions[ key ] = json.extensions[ key ]; } } if ( json.lights !== undefined ) material.lights = json.lights; if ( json.clipping !== undefined ) material.clipping = json.clipping; // for PointsMaterial if ( json.size !== undefined ) material.size = json.size; if ( json.sizeAttenuation !== undefined ) material.sizeAttenuation = json.sizeAttenuation; // maps if ( json.map !== undefined ) material.map = getTexture( json.map ); if ( json.matcap !== undefined ) material.matcap = getTexture( json.matcap ); if ( json.alphaMap !== undefined ) material.alphaMap = getTexture( json.alphaMap ); if ( json.bumpMap !== undefined ) material.bumpMap = getTexture( json.bumpMap ); if ( json.bumpScale !== undefined ) material.bumpScale = json.bumpScale; if ( json.normalMap !== undefined ) material.normalMap = getTexture( json.normalMap ); if ( json.normalMapType !== undefined ) material.normalMapType = json.normalMapType; if ( json.normalScale !== undefined ) { let normalScale = json.normalScale; if ( Array.isArray( normalScale ) === false ) { // Blender exporter used to export a scalar. See #7459 normalScale = [ normalScale, normalScale ]; } material.normalScale = new Vector2().fromArray( normalScale ); } if ( json.displacementMap !== undefined ) material.displacementMap = getTexture( json.displacementMap ); if ( json.displacementScale !== undefined ) material.displacementScale = json.displacementScale; if ( json.displacementBias !== undefined ) material.displacementBias = json.displacementBias; if ( json.roughnessMap !== undefined ) material.roughnessMap = getTexture( json.roughnessMap ); if ( json.metalnessMap !== undefined ) material.metalnessMap = getTexture( json.metalnessMap ); if ( json.emissiveMap !== undefined ) material.emissiveMap = getTexture( json.emissiveMap ); if ( json.emissiveIntensity !== undefined ) material.emissiveIntensity = json.emissiveIntensity; if ( json.specularMap !== undefined ) material.specularMap = getTexture( json.specularMap ); if ( json.specularIntensityMap !== undefined ) material.specularIntensityMap = getTexture( json.specularIntensityMap ); if ( json.specularColorMap !== undefined ) material.specularColorMap = getTexture( json.specularColorMap ); if ( json.envMap !== undefined ) material.envMap = getTexture( json.envMap ); if ( json.envMapRotation !== undefined ) material.envMapRotation.fromArray( json.envMapRotation ); if ( json.envMapIntensity !== undefined ) material.envMapIntensity = json.envMapIntensity; if ( json.reflectivity !== undefined ) material.reflectivity = json.reflectivity; if ( json.refractionRatio !== undefined ) material.refractionRatio = json.refractionRatio; if ( json.lightMap !== undefined ) material.lightMap = getTexture( json.lightMap ); if ( json.lightMapIntensity !== undefined ) material.lightMapIntensity = json.lightMapIntensity; if ( json.aoMap !== undefined ) material.aoMap = getTexture( json.aoMap ); if ( json.aoMapIntensity !== undefined ) material.aoMapIntensity = json.aoMapIntensity; if ( json.gradientMap !== undefined ) material.gradientMap = getTexture( json.gradientMap ); if ( json.clearcoatMap !== undefined ) material.clearcoatMap = getTexture( json.clearcoatMap ); if ( json.clearcoatRoughnessMap !== undefined ) material.clearcoatRoughnessMap = getTexture( json.clearcoatRoughnessMap ); if ( json.clearcoatNormalMap !== undefined ) material.clearcoatNormalMap = getTexture( json.clearcoatNormalMap ); if ( json.clearcoatNormalScale !== undefined ) material.clearcoatNormalScale = new Vector2().fromArray( json.clearcoatNormalScale ); if ( json.iridescenceMap !== undefined ) material.iridescenceMap = getTexture( json.iridescenceMap ); if ( json.iridescenceThicknessMap !== undefined ) material.iridescenceThicknessMap = getTexture( json.iridescenceThicknessMap ); if ( json.transmissionMap !== undefined ) material.transmissionMap = getTexture( json.transmissionMap ); if ( json.thicknessMap !== undefined ) material.thicknessMap = getTexture( json.thicknessMap ); if ( json.anisotropyMap !== undefined ) material.anisotropyMap = getTexture( json.anisotropyMap ); if ( json.sheenColorMap !== undefined ) material.sheenColorMap = getTexture( json.sheenColorMap ); if ( json.sheenRoughnessMap !== undefined ) material.sheenRoughnessMap = getTexture( json.sheenRoughnessMap ); return material; } /** * Textures are not embedded in the material JSON so they have * to be injected before the loading process starts. * * @param {Object} value - A dictionary holding textures for material properties. * @return {MaterialLoader} A reference to this material loader. */ setTextures( value ) { this.textures = value; return this; } /** * Creates a material for the given type. * * @param {string} type - The material type. * @return {Material} The new material. */ createMaterialFromType( type ) { return MaterialLoader.createMaterialFromType( type ); } /** * Creates a material for the given type. * * @static * @param {string} type - The material type. * @return {Material} The new material. */ static createMaterialFromType( type ) { const materialLib = { ShadowMaterial, SpriteMaterial, RawShaderMaterial, ShaderMaterial, PointsMaterial, MeshPhysicalMaterial, MeshStandardMaterial, MeshPhongMaterial, MeshToonMaterial, MeshNormalMaterial, MeshLambertMaterial, MeshDepthMaterial, MeshDistanceMaterial, MeshBasicMaterial, MeshMatcapMaterial, LineDashedMaterial, LineBasicMaterial, Material }; return new materialLib[ type ](); } } /** * A class with loader utility functions. */ class LoaderUtils { /** * Extracts the base URL from the given URL. * * @param {string} url -The URL to extract the base URL from. * @return {string} The extracted base URL. */ static extractUrlBase( url ) { const index = url.lastIndexOf( '/' ); if ( index === -1 ) return './'; return url.slice( 0, index + 1 ); } /** * Resolves relative URLs against the given path. Absolute paths, data urls, * and blob URLs will be returned as is. Invalid URLs will return an empty * string. * * @param {string} url -The URL to resolve. * @param {string} path - The base path for relative URLs to be resolved against. * @return {string} The resolved URL. */ static resolveURL( url, path ) { // Invalid URL if ( typeof url !== 'string' || url === '' ) return ''; // Host Relative URL if ( /^https?:\/\//i.test( path ) && /^\//.test( url ) ) { path = path.replace( /(^https?:\/\/[^\/]+).*/i, '$1' ); } // Absolute URL http://,https://,// if ( /^(https?:)?\/\//i.test( url ) ) return url; // Data URI if ( /^data:.*,.*$/i.test( url ) ) return url; // Blob URL if ( /^blob:.*$/i.test( url ) ) return url; // Relative URL return path + url; } } /** * An instanced version of a geometry. */ class InstancedBufferGeometry extends BufferGeometry { /** * Constructs a new instanced buffer geometry. */ constructor() { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isInstancedBufferGeometry = true; this.type = 'InstancedBufferGeometry'; /** * The instance count. * * @type {number} * @default Infinity */ this.instanceCount = Infinity; } copy( source ) { super.copy( source ); this.instanceCount = source.instanceCount; return this; } toJSON() { const data = super.toJSON(); data.instanceCount = this.instanceCount; data.isInstancedBufferGeometry = true; return data; } } /** * Class for loading geometries. The files are internally * loaded via {@link FileLoader}. * * ```js * const loader = new THREE.BufferGeometryLoader(); * const geometry = await loader.loadAsync( 'models/json/pressure.json' ); * * const material = new THREE.MeshBasicMaterial( { color: 0xF5F5F5 } ); * const object = new THREE.Mesh( geometry, material ); * scene.add( object ); * ``` * * @augments Loader */ class BufferGeometryLoader extends Loader { /** * Constructs a new geometry loader. * * @param {LoadingManager} [manager] - The loading manager. */ constructor( manager ) { super( manager ); } /** * Starts loading from the given URL and pass the loaded geometry to the `onLoad()` callback. * * @param {string} url - The path/URL of the file to be loaded. This can also be a data URI. * @param {function(BufferGeometry)} onLoad - Executed when the loading process has been finished. * @param {onProgressCallback} onProgress - Executed while the loading is in progress. * @param {onErrorCallback} onError - Executed when errors occur. */ load( url, onLoad, onProgress, onError ) { const scope = this; const loader = new FileLoader( scope.manager ); loader.setPath( scope.path ); loader.setRequestHeader( scope.requestHeader ); loader.setWithCredentials( scope.withCredentials ); loader.load( url, function ( text ) { try { onLoad( scope.parse( JSON.parse( text ) ) ); } catch ( e ) { if ( onError ) { onError( e ); } else { console.error( e ); } scope.manager.itemError( url ); } }, onProgress, onError ); } /** * Parses the given JSON object and returns a geometry. * * @param {Object} json - The serialized geometry. * @return {BufferGeometry} The parsed geometry. */ parse( json ) { const interleavedBufferMap = {}; const arrayBufferMap = {}; function getInterleavedBuffer( json, uuid ) { if ( interleavedBufferMap[ uuid ] !== undefined ) return interleavedBufferMap[ uuid ]; const interleavedBuffers = json.interleavedBuffers; const interleavedBuffer = interleavedBuffers[ uuid ]; const buffer = getArrayBuffer( json, interleavedBuffer.buffer ); const array = getTypedArray( interleavedBuffer.type, buffer ); const ib = new InterleavedBuffer( array, interleavedBuffer.stride ); ib.uuid = interleavedBuffer.uuid; interleavedBufferMap[ uuid ] = ib; return ib; } function getArrayBuffer( json, uuid ) { if ( arrayBufferMap[ uuid ] !== undefined ) return arrayBufferMap[ uuid ]; const arrayBuffers = json.arrayBuffers; const arrayBuffer = arrayBuffers[ uuid ]; const ab = new Uint32Array( arrayBuffer ).buffer; arrayBufferMap[ uuid ] = ab; return ab; } const geometry = json.isInstancedBufferGeometry ? new InstancedBufferGeometry() : new BufferGeometry(); const index = json.data.index; if ( index !== undefined ) { const typedArray = getTypedArray( index.type, index.array ); geometry.setIndex( new BufferAttribute( typedArray, 1 ) ); } const attributes = json.data.attributes; for ( const key in attributes ) { const attribute = attributes[ key ]; let bufferAttribute; if ( attribute.isInterleavedBufferAttribute ) { const interleavedBuffer = getInterleavedBuffer( json.data, attribute.data ); bufferAttribute = new InterleavedBufferAttribute( interleavedBuffer, attribute.itemSize, attribute.offset, attribute.normalized ); } else { const typedArray = getTypedArray( attribute.type, attribute.array ); const bufferAttributeConstr = attribute.isInstancedBufferAttribute ? InstancedBufferAttribute : BufferAttribute; bufferAttribute = new bufferAttributeConstr( typedArray, attribute.itemSize, attribute.normalized ); } if ( attribute.name !== undefined ) bufferAttribute.name = attribute.name; if ( attribute.usage !== undefined ) bufferAttribute.setUsage( attribute.usage ); geometry.setAttribute( key, bufferAttribute ); } const morphAttributes = json.data.morphAttributes; if ( morphAttributes ) { for ( const key in morphAttributes ) { const attributeArray = morphAttributes[ key ]; const array = []; for ( let i = 0, il = attributeArray.length; i < il; i ++ ) { const attribute = attributeArray[ i ]; let bufferAttribute; if ( attribute.isInterleavedBufferAttribute ) { const interleavedBuffer = getInterleavedBuffer( json.data, attribute.data ); bufferAttribute = new InterleavedBufferAttribute( interleavedBuffer, attribute.itemSize, attribute.offset, attribute.normalized ); } else { const typedArray = getTypedArray( attribute.type, attribute.array ); bufferAttribute = new BufferAttribute( typedArray, attribute.itemSize, attribute.normalized ); } if ( attribute.name !== undefined ) bufferAttribute.name = attribute.name; array.push( bufferAttribute ); } geometry.morphAttributes[ key ] = array; } } const morphTargetsRelative = json.data.morphTargetsRelative; if ( morphTargetsRelative ) { geometry.morphTargetsRelative = true; } const groups = json.data.groups || json.data.drawcalls || json.data.offsets; if ( groups !== undefined ) { for ( let i = 0, n = groups.length; i !== n; ++ i ) { const group = groups[ i ]; geometry.addGroup( group.start, group.count, group.materialIndex ); } } const boundingSphere = json.data.boundingSphere; if ( boundingSphere !== undefined ) { const center = new Vector3(); if ( boundingSphere.center !== undefined ) { center.fromArray( boundingSphere.center ); } geometry.boundingSphere = new Sphere( center, boundingSphere.radius ); } if ( json.name ) geometry.name = json.name; if ( json.userData ) geometry.userData = json.userData; return geometry; } } /** * A loader for loading a JSON resource in the [JSON Object/Scene format]{@link https://github.com/mrdoob/three.js/wiki/JSON-Object-Scene-format-4}. * The files are internally loaded via {@link FileLoader}. * * ```js * const loader = new THREE.ObjectLoader(); * const obj = await loader.loadAsync( 'models/json/example.json' ); * scene.add( obj ); * * // Alternatively, to parse a previously loaded JSON structure * const object = await loader.parseAsync( a_json_object ); * scene.add( object ); * ``` * * @augments Loader */ class ObjectLoader extends Loader { /** * Constructs a new object loader. * * @param {LoadingManager} [manager] - The loading manager. */ constructor( manager ) { super( manager ); } /** * Starts loading from the given URL and pass the loaded 3D object to the `onLoad()` callback. * * @param {string} url - The path/URL of the file to be loaded. This can also be a data URI. * @param {function(Object3D)} onLoad - Executed when the loading process has been finished. * @param {onProgressCallback} onProgress - Executed while the loading is in progress. * @param {onErrorCallback} onError - Executed when errors occur. */ load( url, onLoad, onProgress, onError ) { const scope = this; const path = ( this.path === '' ) ? LoaderUtils.extractUrlBase( url ) : this.path; this.resourcePath = this.resourcePath || path; const loader = new FileLoader( this.manager ); loader.setPath( this.path ); loader.setRequestHeader( this.requestHeader ); loader.setWithCredentials( this.withCredentials ); loader.load( url, function ( text ) { let json = null; try { json = JSON.parse( text ); } catch ( error ) { if ( onError !== undefined ) onError( error ); console.error( 'THREE:ObjectLoader: Can\'t parse ' + url + '.', error.message ); return; } const metadata = json.metadata; if ( metadata === undefined || metadata.type === undefined || metadata.type.toLowerCase() === 'geometry' ) { if ( onError !== undefined ) onError( new Error( 'THREE.ObjectLoader: Can\'t load ' + url ) ); console.error( 'THREE.ObjectLoader: Can\'t load ' + url ); return; } scope.parse( json, onLoad ); }, onProgress, onError ); } /** * Async version of {@link ObjectLoader#load}. * * @async * @param {string} url - The path/URL of the file to be loaded. This can also be a data URI. * @param {onProgressCallback} onProgress - Executed while the loading is in progress. * @return {Promise} A Promise that resolves with the loaded 3D object. */ async loadAsync( url, onProgress ) { const scope = this; const path = ( this.path === '' ) ? LoaderUtils.extractUrlBase( url ) : this.path; this.resourcePath = this.resourcePath || path; const loader = new FileLoader( this.manager ); loader.setPath( this.path ); loader.setRequestHeader( this.requestHeader ); loader.setWithCredentials( this.withCredentials ); const text = await loader.loadAsync( url, onProgress ); const json = JSON.parse( text ); const metadata = json.metadata; if ( metadata === undefined || metadata.type === undefined || metadata.type.toLowerCase() === 'geometry' ) { throw new Error( 'THREE.ObjectLoader: Can\'t load ' + url ); } return await scope.parseAsync( json ); } /** * Parses the given JSON. This is used internally by {@link ObjectLoader#load} * but can also be used directly to parse a previously loaded JSON structure. * * @param {Object} json - The serialized 3D object. * @param {onLoad} onLoad - Executed when all resources (e.g. textures) have been fully loaded. * @return {Object3D} The parsed 3D object. */ parse( json, onLoad ) { const animations = this.parseAnimations( json.animations ); const shapes = this.parseShapes( json.shapes ); const geometries = this.parseGeometries( json.geometries, shapes ); const images = this.parseImages( json.images, function () { if ( onLoad !== undefined ) onLoad( object ); } ); const textures = this.parseTextures( json.textures, images ); const materials = this.parseMaterials( json.materials, textures ); const object = this.parseObject( json.object, geometries, materials, textures, animations ); const skeletons = this.parseSkeletons( json.skeletons, object ); this.bindSkeletons( object, skeletons ); this.bindLightTargets( object ); // if ( onLoad !== undefined ) { let hasImages = false; for ( const uuid in images ) { if ( images[ uuid ].data instanceof HTMLImageElement ) { hasImages = true; break; } } if ( hasImages === false ) onLoad( object ); } return object; } /** * Async version of {@link ObjectLoader#parse}. * * @param {Object} json - The serialized 3D object. * @return {Promise} A Promise that resolves with the parsed 3D object. */ async parseAsync( json ) { const animations = this.parseAnimations( json.animations ); const shapes = this.parseShapes( json.shapes ); const geometries = this.parseGeometries( json.geometries, shapes ); const images = await this.parseImagesAsync( json.images ); const textures = this.parseTextures( json.textures, images ); const materials = this.parseMaterials( json.materials, textures ); const object = this.parseObject( json.object, geometries, materials, textures, animations ); const skeletons = this.parseSkeletons( json.skeletons, object ); this.bindSkeletons( object, skeletons ); this.bindLightTargets( object ); return object; } // internals parseShapes( json ) { const shapes = {}; if ( json !== undefined ) { for ( let i = 0, l = json.length; i < l; i ++ ) { const shape = new Shape().fromJSON( json[ i ] ); shapes[ shape.uuid ] = shape; } } return shapes; } parseSkeletons( json, object ) { const skeletons = {}; const bones = {}; // generate bone lookup table object.traverse( function ( child ) { if ( child.isBone ) bones[ child.uuid ] = child; } ); // create skeletons if ( json !== undefined ) { for ( let i = 0, l = json.length; i < l; i ++ ) { const skeleton = new Skeleton().fromJSON( json[ i ], bones ); skeletons[ skeleton.uuid ] = skeleton; } } return skeletons; } parseGeometries( json, shapes ) { const geometries = {}; if ( json !== undefined ) { const bufferGeometryLoader = new BufferGeometryLoader(); for ( let i = 0, l = json.length; i < l; i ++ ) { let geometry; const data = json[ i ]; switch ( data.type ) { case 'BufferGeometry': case 'InstancedBufferGeometry': geometry = bufferGeometryLoader.parse( data ); break; default: if ( data.type in Geometries ) { geometry = Geometries[ data.type ].fromJSON( data, shapes ); } else { console.warn( `THREE.ObjectLoader: Unsupported geometry type "${ data.type }"` ); } } geometry.uuid = data.uuid; if ( data.name !== undefined ) geometry.name = data.name; if ( data.userData !== undefined ) geometry.userData = data.userData; geometries[ data.uuid ] = geometry; } } return geometries; } parseMaterials( json, textures ) { const cache = {}; // MultiMaterial const materials = {}; if ( json !== undefined ) { const loader = new MaterialLoader(); loader.setTextures( textures ); for ( let i = 0, l = json.length; i < l; i ++ ) { const data = json[ i ]; if ( cache[ data.uuid ] === undefined ) { cache[ data.uuid ] = loader.parse( data ); } materials[ data.uuid ] = cache[ data.uuid ]; } } return materials; } parseAnimations( json ) { const animations = {}; if ( json !== undefined ) { for ( let i = 0; i < json.length; i ++ ) { const data = json[ i ]; const clip = AnimationClip.parse( data ); animations[ clip.uuid ] = clip; } } return animations; } parseImages( json, onLoad ) { const scope = this; const images = {}; let loader; function loadImage( url ) { scope.manager.itemStart( url ); return loader.load( url, function () { scope.manager.itemEnd( url ); }, undefined, function () { scope.manager.itemError( url ); scope.manager.itemEnd( url ); } ); } function deserializeImage( image ) { if ( typeof image === 'string' ) { const url = image; const path = /^(\/\/)|([a-z]+:(\/\/)?)/i.test( url ) ? url : scope.resourcePath + url; return loadImage( path ); } else { if ( image.data ) { return { data: getTypedArray( image.type, image.data ), width: image.width, height: image.height }; } else { return null; } } } if ( json !== undefined && json.length > 0 ) { const manager = new LoadingManager( onLoad ); loader = new ImageLoader( manager ); loader.setCrossOrigin( this.crossOrigin ); for ( let i = 0, il = json.length; i < il; i ++ ) { const image = json[ i ]; const url = image.url; if ( Array.isArray( url ) ) { // load array of images e.g CubeTexture const imageArray = []; for ( let j = 0, jl = url.length; j < jl; j ++ ) { const currentUrl = url[ j ]; const deserializedImage = deserializeImage( currentUrl ); if ( deserializedImage !== null ) { if ( deserializedImage instanceof HTMLImageElement ) { imageArray.push( deserializedImage ); } else { // special case: handle array of data textures for cube textures imageArray.push( new DataTexture( deserializedImage.data, deserializedImage.width, deserializedImage.height ) ); } } } images[ image.uuid ] = new Source( imageArray ); } else { // load single image const deserializedImage = deserializeImage( image.url ); images[ image.uuid ] = new Source( deserializedImage ); } } } return images; } async parseImagesAsync( json ) { const scope = this; const images = {}; let loader; async function deserializeImage( image ) { if ( typeof image === 'string' ) { const url = image; const path = /^(\/\/)|([a-z]+:(\/\/)?)/i.test( url ) ? url : scope.resourcePath + url; return await loader.loadAsync( path ); } else { if ( image.data ) { return { data: getTypedArray( image.type, image.data ), width: image.width, height: image.height }; } else { return null; } } } if ( json !== undefined && json.length > 0 ) { loader = new ImageLoader( this.manager ); loader.setCrossOrigin( this.crossOrigin ); for ( let i = 0, il = json.length; i < il; i ++ ) { const image = json[ i ]; const url = image.url; if ( Array.isArray( url ) ) { // load array of images e.g CubeTexture const imageArray = []; for ( let j = 0, jl = url.length; j < jl; j ++ ) { const currentUrl = url[ j ]; const deserializedImage = await deserializeImage( currentUrl ); if ( deserializedImage !== null ) { if ( deserializedImage instanceof HTMLImageElement ) { imageArray.push( deserializedImage ); } else { // special case: handle array of data textures for cube textures imageArray.push( new DataTexture( deserializedImage.data, deserializedImage.width, deserializedImage.height ) ); } } } images[ image.uuid ] = new Source( imageArray ); } else { // load single image const deserializedImage = await deserializeImage( image.url ); images[ image.uuid ] = new Source( deserializedImage ); } } } return images; } parseTextures( json, images ) { function parseConstant( value, type ) { if ( typeof value === 'number' ) return value; console.warn( 'THREE.ObjectLoader.parseTexture: Constant should be in numeric form.', value ); return type[ value ]; } const textures = {}; if ( json !== undefined ) { for ( let i = 0, l = json.length; i < l; i ++ ) { const data = json[ i ]; if ( data.image === undefined ) { console.warn( 'THREE.ObjectLoader: No "image" specified for', data.uuid ); } if ( images[ data.image ] === undefined ) { console.warn( 'THREE.ObjectLoader: Undefined image', data.image ); } const source = images[ data.image ]; const image = source.data; let texture; if ( Array.isArray( image ) ) { texture = new CubeTexture(); if ( image.length === 6 ) texture.needsUpdate = true; } else { if ( image && image.data ) { texture = new DataTexture(); } else { texture = new Texture(); } if ( image ) texture.needsUpdate = true; // textures can have undefined image data } texture.source = source; texture.uuid = data.uuid; if ( data.name !== undefined ) texture.name = data.name; if ( data.mapping !== undefined ) texture.mapping = parseConstant( data.mapping, TEXTURE_MAPPING ); if ( data.channel !== undefined ) texture.channel = data.channel; if ( data.offset !== undefined ) texture.offset.fromArray( data.offset ); if ( data.repeat !== undefined ) texture.repeat.fromArray( data.repeat ); if ( data.center !== undefined ) texture.center.fromArray( data.center ); if ( data.rotation !== undefined ) texture.rotation = data.rotation; if ( data.wrap !== undefined ) { texture.wrapS = parseConstant( data.wrap[ 0 ], TEXTURE_WRAPPING ); texture.wrapT = parseConstant( data.wrap[ 1 ], TEXTURE_WRAPPING ); } if ( data.format !== undefined ) texture.format = data.format; if ( data.internalFormat !== undefined ) texture.internalFormat = data.internalFormat; if ( data.type !== undefined ) texture.type = data.type; if ( data.colorSpace !== undefined ) texture.colorSpace = data.colorSpace; if ( data.minFilter !== undefined ) texture.minFilter = parseConstant( data.minFilter, TEXTURE_FILTER ); if ( data.magFilter !== undefined ) texture.magFilter = parseConstant( data.magFilter, TEXTURE_FILTER ); if ( data.anisotropy !== undefined ) texture.anisotropy = data.anisotropy; if ( data.flipY !== undefined ) texture.flipY = data.flipY; if ( data.generateMipmaps !== undefined ) texture.generateMipmaps = data.generateMipmaps; if ( data.premultiplyAlpha !== undefined ) texture.premultiplyAlpha = data.premultiplyAlpha; if ( data.unpackAlignment !== undefined ) texture.unpackAlignment = data.unpackAlignment; if ( data.compareFunction !== undefined ) texture.compareFunction = data.compareFunction; if ( data.userData !== undefined ) texture.userData = data.userData; textures[ data.uuid ] = texture; } } return textures; } parseObject( data, geometries, materials, textures, animations ) { let object; function getGeometry( name ) { if ( geometries[ name ] === undefined ) { console.warn( 'THREE.ObjectLoader: Undefined geometry', name ); } return geometries[ name ]; } function getMaterial( name ) { if ( name === undefined ) return undefined; if ( Array.isArray( name ) ) { const array = []; for ( let i = 0, l = name.length; i < l; i ++ ) { const uuid = name[ i ]; if ( materials[ uuid ] === undefined ) { console.warn( 'THREE.ObjectLoader: Undefined material', uuid ); } array.push( materials[ uuid ] ); } return array; } if ( materials[ name ] === undefined ) { console.warn( 'THREE.ObjectLoader: Undefined material', name ); } return materials[ name ]; } function getTexture( uuid ) { if ( textures[ uuid ] === undefined ) { console.warn( 'THREE.ObjectLoader: Undefined texture', uuid ); } return textures[ uuid ]; } let geometry, material; switch ( data.type ) { case 'Scene': object = new Scene(); if ( data.background !== undefined ) { if ( Number.isInteger( data.background ) ) { object.background = new Color( data.background ); } else { object.background = getTexture( data.background ); } } if ( data.environment !== undefined ) { object.environment = getTexture( data.environment ); } if ( data.fog !== undefined ) { if ( data.fog.type === 'Fog' ) { object.fog = new Fog( data.fog.color, data.fog.near, data.fog.far ); } else if ( data.fog.type === 'FogExp2' ) { object.fog = new FogExp2( data.fog.color, data.fog.density ); } if ( data.fog.name !== '' ) { object.fog.name = data.fog.name; } } if ( data.backgroundBlurriness !== undefined ) object.backgroundBlurriness = data.backgroundBlurriness; if ( data.backgroundIntensity !== undefined ) object.backgroundIntensity = data.backgroundIntensity; if ( data.backgroundRotation !== undefined ) object.backgroundRotation.fromArray( data.backgroundRotation ); if ( data.environmentIntensity !== undefined ) object.environmentIntensity = data.environmentIntensity; if ( data.environmentRotation !== undefined ) object.environmentRotation.fromArray( data.environmentRotation ); break; case 'PerspectiveCamera': object = new PerspectiveCamera( data.fov, data.aspect, data.near, data.far ); if ( data.focus !== undefined ) object.focus = data.focus; if ( data.zoom !== undefined ) object.zoom = data.zoom; if ( data.filmGauge !== undefined ) object.filmGauge = data.filmGauge; if ( data.filmOffset !== undefined ) object.filmOffset = data.filmOffset; if ( data.view !== undefined ) object.view = Object.assign( {}, data.view ); break; case 'OrthographicCamera': object = new OrthographicCamera( data.left, data.right, data.top, data.bottom, data.near, data.far ); if ( data.zoom !== undefined ) object.zoom = data.zoom; if ( data.view !== undefined ) object.view = Object.assign( {}, data.view ); break; case 'AmbientLight': object = new AmbientLight( data.color, data.intensity ); break; case 'DirectionalLight': object = new DirectionalLight( data.color, data.intensity ); object.target = data.target || ''; break; case 'PointLight': object = new PointLight( data.color, data.intensity, data.distance, data.decay ); break; case 'RectAreaLight': object = new RectAreaLight( data.color, data.intensity, data.width, data.height ); break; case 'SpotLight': object = new SpotLight( data.color, data.intensity, data.distance, data.angle, data.penumbra, data.decay ); object.target = data.target || ''; break; case 'HemisphereLight': object = new HemisphereLight( data.color, data.groundColor, data.intensity ); break; case 'LightProbe': object = new LightProbe().fromJSON( data ); break; case 'SkinnedMesh': geometry = getGeometry( data.geometry ); material = getMaterial( data.material ); object = new SkinnedMesh( geometry, material ); if ( data.bindMode !== undefined ) object.bindMode = data.bindMode; if ( data.bindMatrix !== undefined ) object.bindMatrix.fromArray( data.bindMatrix ); if ( data.skeleton !== undefined ) object.skeleton = data.skeleton; break; case 'Mesh': geometry = getGeometry( data.geometry ); material = getMaterial( data.material ); object = new Mesh( geometry, material ); break; case 'InstancedMesh': geometry = getGeometry( data.geometry ); material = getMaterial( data.material ); const count = data.count; const instanceMatrix = data.instanceMatrix; const instanceColor = data.instanceColor; object = new InstancedMesh( geometry, material, count ); object.instanceMatrix = new InstancedBufferAttribute( new Float32Array( instanceMatrix.array ), 16 ); if ( instanceColor !== undefined ) object.instanceColor = new InstancedBufferAttribute( new Float32Array( instanceColor.array ), instanceColor.itemSize ); break; case 'BatchedMesh': geometry = getGeometry( data.geometry ); material = getMaterial( data.material ); object = new BatchedMesh( data.maxInstanceCount, data.maxVertexCount, data.maxIndexCount, material ); object.geometry = geometry; object.perObjectFrustumCulled = data.perObjectFrustumCulled; object.sortObjects = data.sortObjects; object._drawRanges = data.drawRanges; object._reservedRanges = data.reservedRanges; object._visibility = data.visibility; object._active = data.active; object._bounds = data.bounds.map( bound => { const box = new Box3(); box.min.fromArray( bound.boxMin ); box.max.fromArray( bound.boxMax ); const sphere = new Sphere(); sphere.radius = bound.sphereRadius; sphere.center.fromArray( bound.sphereCenter ); return { boxInitialized: bound.boxInitialized, box: box, sphereInitialized: bound.sphereInitialized, sphere: sphere }; } ); object._maxInstanceCount = data.maxInstanceCount; object._maxVertexCount = data.maxVertexCount; object._maxIndexCount = data.maxIndexCount; object._geometryInitialized = data.geometryInitialized; object._geometryCount = data.geometryCount; object._matricesTexture = getTexture( data.matricesTexture.uuid ); if ( data.colorsTexture !== undefined ) object._colorsTexture = getTexture( data.colorsTexture.uuid ); break; case 'LOD': object = new LOD(); break; case 'Line': object = new Line( getGeometry( data.geometry ), getMaterial( data.material ) ); break; case 'LineLoop': object = new LineLoop( getGeometry( data.geometry ), getMaterial( data.material ) ); break; case 'LineSegments': object = new LineSegments( getGeometry( data.geometry ), getMaterial( data.material ) ); break; case 'PointCloud': case 'Points': object = new Points( getGeometry( data.geometry ), getMaterial( data.material ) ); break; case 'Sprite': object = new Sprite( getMaterial( data.material ) ); break; case 'Group': object = new Group(); break; case 'Bone': object = new Bone(); break; default: object = new Object3D(); } object.uuid = data.uuid; if ( data.name !== undefined ) object.name = data.name; if ( data.matrix !== undefined ) { object.matrix.fromArray( data.matrix ); if ( data.matrixAutoUpdate !== undefined ) object.matrixAutoUpdate = data.matrixAutoUpdate; if ( object.matrixAutoUpdate ) object.matrix.decompose( object.position, object.quaternion, object.scale ); } else { if ( data.position !== undefined ) object.position.fromArray( data.position ); if ( data.rotation !== undefined ) object.rotation.fromArray( data.rotation ); if ( data.quaternion !== undefined ) object.quaternion.fromArray( data.quaternion ); if ( data.scale !== undefined ) object.scale.fromArray( data.scale ); } if ( data.up !== undefined ) object.up.fromArray( data.up ); if ( data.castShadow !== undefined ) object.castShadow = data.castShadow; if ( data.receiveShadow !== undefined ) object.receiveShadow = data.receiveShadow; if ( data.shadow ) { if ( data.shadow.intensity !== undefined ) object.shadow.intensity = data.shadow.intensity; if ( data.shadow.bias !== undefined ) object.shadow.bias = data.shadow.bias; if ( data.shadow.normalBias !== undefined ) object.shadow.normalBias = data.shadow.normalBias; if ( data.shadow.radius !== undefined ) object.shadow.radius = data.shadow.radius; if ( data.shadow.mapSize !== undefined ) object.shadow.mapSize.fromArray( data.shadow.mapSize ); if ( data.shadow.camera !== undefined ) object.shadow.camera = this.parseObject( data.shadow.camera ); } if ( data.visible !== undefined ) object.visible = data.visible; if ( data.frustumCulled !== undefined ) object.frustumCulled = data.frustumCulled; if ( data.renderOrder !== undefined ) object.renderOrder = data.renderOrder; if ( data.userData !== undefined ) object.userData = data.userData; if ( data.layers !== undefined ) object.layers.mask = data.layers; if ( data.children !== undefined ) { const children = data.children; for ( let i = 0; i < children.length; i ++ ) { object.add( this.parseObject( children[ i ], geometries, materials, textures, animations ) ); } } if ( data.animations !== undefined ) { const objectAnimations = data.animations; for ( let i = 0; i < objectAnimations.length; i ++ ) { const uuid = objectAnimations[ i ]; object.animations.push( animations[ uuid ] ); } } if ( data.type === 'LOD' ) { if ( data.autoUpdate !== undefined ) object.autoUpdate = data.autoUpdate; const levels = data.levels; for ( let l = 0; l < levels.length; l ++ ) { const level = levels[ l ]; const child = object.getObjectByProperty( 'uuid', level.object ); if ( child !== undefined ) { object.addLevel( child, level.distance, level.hysteresis ); } } } return object; } bindSkeletons( object, skeletons ) { if ( Object.keys( skeletons ).length === 0 ) return; object.traverse( function ( child ) { if ( child.isSkinnedMesh === true && child.skeleton !== undefined ) { const skeleton = skeletons[ child.skeleton ]; if ( skeleton === undefined ) { console.warn( 'THREE.ObjectLoader: No skeleton found with UUID:', child.skeleton ); } else { child.bind( skeleton, child.bindMatrix ); } } } ); } bindLightTargets( object ) { object.traverse( function ( child ) { if ( child.isDirectionalLight || child.isSpotLight ) { const uuid = child.target; const target = object.getObjectByProperty( 'uuid', uuid ); if ( target !== undefined ) { child.target = target; } else { child.target = new Object3D(); } } } ); } } const TEXTURE_MAPPING = { UVMapping: UVMapping, CubeReflectionMapping: CubeReflectionMapping, CubeRefractionMapping: CubeRefractionMapping, EquirectangularReflectionMapping: EquirectangularReflectionMapping, EquirectangularRefractionMapping: EquirectangularRefractionMapping, CubeUVReflectionMapping: CubeUVReflectionMapping }; const TEXTURE_WRAPPING = { RepeatWrapping: RepeatWrapping, ClampToEdgeWrapping: ClampToEdgeWrapping, MirroredRepeatWrapping: MirroredRepeatWrapping }; const TEXTURE_FILTER = { NearestFilter: NearestFilter, NearestMipmapNearestFilter: NearestMipmapNearestFilter, NearestMipmapLinearFilter: NearestMipmapLinearFilter, LinearFilter: LinearFilter, LinearMipmapNearestFilter: LinearMipmapNearestFilter, LinearMipmapLinearFilter: LinearMipmapLinearFilter }; /** * A loader for loading images as an [ImageBitmap]{@link https://developer.mozilla.org/en-US/docs/Web/API/ImageBitmap}. * An `ImageBitmap` provides an asynchronous and resource efficient pathway to prepare * textures for rendering. * * Note that {@link Texture#flipY} and {@link Texture#premultiplyAlpha} are ignored with image bitmaps. * They needs these configuration on bitmap creation unlike regular images need them on uploading to GPU. * * You need to set the equivalent options via {@link ImageBitmapLoader#setOptions} instead. * * Also note that unlike {@link FileLoader}, this loader does not avoid multiple concurrent requests to the same URL. * * ```js * const loader = new THREE.ImageBitmapLoader(); * loader.setOptions( { imageOrientation: 'flipY' } ); // set options if needed * const imageBitmap = await loader.loadAsync( 'image.png' ); * * const texture = new THREE.Texture( imageBitmap ); * texture.needsUpdate = true; * ``` * * @augments Loader */ class ImageBitmapLoader extends Loader { /** * Constructs a new image bitmap loader. * * @param {LoadingManager} [manager] - The loading manager. */ constructor( manager ) { super( manager ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isImageBitmapLoader = true; if ( typeof createImageBitmap === 'undefined' ) { console.warn( 'THREE.ImageBitmapLoader: createImageBitmap() not supported.' ); } if ( typeof fetch === 'undefined' ) { console.warn( 'THREE.ImageBitmapLoader: fetch() not supported.' ); } /** * Represents the loader options. * * @type {Object} * @default {premultiplyAlpha:'none'} */ this.options = { premultiplyAlpha: 'none' }; } /** * Sets the given loader options. The structure of the object must match the `options` parameter of * [createImageBitmap]{@link https://developer.mozilla.org/en-US/docs/Web/API/Window/createImageBitmap}. * * @param {Object} options - The loader options to set. * @return {ImageBitmapLoader} A reference to this image bitmap loader. */ setOptions( options ) { this.options = options; return this; } /** * Starts loading from the given URL and pass the loaded image bitmap to the `onLoad()` callback. * * @param {string} url - The path/URL of the file to be loaded. This can also be a data URI. * @param {function(ImageBitmap)} onLoad - Executed when the loading process has been finished. * @param {onProgressCallback} onProgress - Unsupported in this loader. * @param {onErrorCallback} onError - Executed when errors occur. * @return {ImageBitmap|undefined} The image bitmap. */ load( url, onLoad, onProgress, onError ) { if ( url === undefined ) url = ''; if ( this.path !== undefined ) url = this.path + url; url = this.manager.resolveURL( url ); const scope = this; const cached = Cache.get( url ); if ( cached !== undefined ) { scope.manager.itemStart( url ); // If cached is a promise, wait for it to resolve if ( cached.then ) { cached.then( imageBitmap => { if ( onLoad ) onLoad( imageBitmap ); scope.manager.itemEnd( url ); } ).catch( e => { if ( onError ) onError( e ); } ); return; } // If cached is not a promise (i.e., it's already an imageBitmap) setTimeout( function () { if ( onLoad ) onLoad( cached ); scope.manager.itemEnd( url ); }, 0 ); return cached; } const fetchOptions = {}; fetchOptions.credentials = ( this.crossOrigin === 'anonymous' ) ? 'same-origin' : 'include'; fetchOptions.headers = this.requestHeader; const promise = fetch( url, fetchOptions ).then( function ( res ) { return res.blob(); } ).then( function ( blob ) { return createImageBitmap( blob, Object.assign( scope.options, { colorSpaceConversion: 'none' } ) ); } ).then( function ( imageBitmap ) { Cache.add( url, imageBitmap ); if ( onLoad ) onLoad( imageBitmap ); scope.manager.itemEnd( url ); return imageBitmap; } ).catch( function ( e ) { if ( onError ) onError( e ); Cache.remove( url ); scope.manager.itemError( url ); scope.manager.itemEnd( url ); } ); Cache.add( url, promise ); scope.manager.itemStart( url ); } } let _context; /** * Manages the global audio context in the engine. * * @hideconstructor */ class AudioContext { /** * Returns the global native audio context. * * @return {AudioContext} The native audio context. */ static getContext() { if ( _context === undefined ) { _context = new ( window.AudioContext || window.webkitAudioContext )(); } return _context; } /** * Allows to set the global native audio context from outside. * * @param {AudioContext} value - The native context to set. */ static setContext( value ) { _context = value; } } /** * Class for loading audio buffers. Audios are internally * loaded via {@link FileLoader}. * * ```js * const audioListener = new THREE.AudioListener(); * const ambientSound = new THREE.Audio( audioListener ); * * const loader = new THREE.AudioLoader(); * const audioBuffer = await loader.loadAsync( 'audio/ambient_ocean.ogg' ); * * ambientSound.setBuffer( audioBuffer ); * ambientSound.play(); * ``` * * @augments Loader */ class AudioLoader extends Loader { /** * Constructs a new audio loader. * * @param {LoadingManager} [manager] - The loading manager. */ constructor( manager ) { super( manager ); } /** * Starts loading from the given URL and passes the loaded audio buffer * to the `onLoad()` callback. * * @param {string} url - The path/URL of the file to be loaded. This can also be a data URI. * @param {function(AudioBuffer)} onLoad - Executed when the loading process has been finished. * @param {onProgressCallback} onProgress - Executed while the loading is in progress. * @param {onErrorCallback} onError - Executed when errors occur. */ load( url, onLoad, onProgress, onError ) { const scope = this; const loader = new FileLoader( this.manager ); loader.setResponseType( 'arraybuffer' ); loader.setPath( this.path ); loader.setRequestHeader( this.requestHeader ); loader.setWithCredentials( this.withCredentials ); loader.load( url, function ( buffer ) { try { // Create a copy of the buffer. The `decodeAudioData` method // detaches the buffer when complete, preventing reuse. const bufferCopy = buffer.slice( 0 ); const context = AudioContext.getContext(); context.decodeAudioData( bufferCopy, function ( audioBuffer ) { onLoad( audioBuffer ); } ).catch( handleError ); } catch ( e ) { handleError( e ); } }, onProgress, onError ); function handleError( e ) { if ( onError ) { onError( e ); } else { console.error( e ); } scope.manager.itemError( url ); } } } const _eyeRight = /*@__PURE__*/ new Matrix4(); const _eyeLeft = /*@__PURE__*/ new Matrix4(); const _projectionMatrix = /*@__PURE__*/ new Matrix4(); /** * A special type of camera that uses two perspective cameras with * stereoscopic projection. Can be used for rendering stereo effects * like [3D Anaglyph]{@link https://en.wikipedia.org/wiki/Anaglyph_3D} or * [Parallax Barrier]{@link https://en.wikipedia.org/wiki/parallax_barrier}. */ class StereoCamera { /** * Constructs a new stereo camera. */ constructor() { /** * The type property is used for detecting the object type * in context of serialization/deserialization. * * @type {string} * @readonly */ this.type = 'StereoCamera'; /** * The aspect. * * @type {number} * @default 1 */ this.aspect = 1; /** * The eye separation which represents the distance * between the left and right camera. * * @type {number} * @default 0.064 */ this.eyeSep = 0.064; /** * The camera representing the left eye. This is added to layer `1` so objects to be * rendered by the left camera must also be added to this layer. * * @type {PerspectiveCamera} */ this.cameraL = new PerspectiveCamera(); this.cameraL.layers.enable( 1 ); this.cameraL.matrixAutoUpdate = false; /** * The camera representing the right eye. This is added to layer `2` so objects to be * rendered by the right camera must also be added to this layer. * * @type {PerspectiveCamera} */ this.cameraR = new PerspectiveCamera(); this.cameraR.layers.enable( 2 ); this.cameraR.matrixAutoUpdate = false; this._cache = { focus: null, fov: null, aspect: null, near: null, far: null, zoom: null, eyeSep: null }; } /** * Updates the stereo camera based on the given perspective camera. * * @param {PerspectiveCamera} camera - The perspective camera. */ update( camera ) { const cache = this._cache; const needsUpdate = cache.focus !== camera.focus || cache.fov !== camera.fov || cache.aspect !== camera.aspect * this.aspect || cache.near !== camera.near || cache.far !== camera.far || cache.zoom !== camera.zoom || cache.eyeSep !== this.eyeSep; if ( needsUpdate ) { cache.focus = camera.focus; cache.fov = camera.fov; cache.aspect = camera.aspect * this.aspect; cache.near = camera.near; cache.far = camera.far; cache.zoom = camera.zoom; cache.eyeSep = this.eyeSep; // Off-axis stereoscopic effect based on // http://paulbourke.net/stereographics/stereorender/ _projectionMatrix.copy( camera.projectionMatrix ); const eyeSepHalf = cache.eyeSep / 2; const eyeSepOnProjection = eyeSepHalf * cache.near / cache.focus; const ymax = ( cache.near * Math.tan( DEG2RAD * cache.fov * 0.5 ) ) / cache.zoom; let xmin, xmax; // translate xOffset _eyeLeft.elements[ 12 ] = - eyeSepHalf; _eyeRight.elements[ 12 ] = eyeSepHalf; // for left eye xmin = - ymax * cache.aspect + eyeSepOnProjection; xmax = ymax * cache.aspect + eyeSepOnProjection; _projectionMatrix.elements[ 0 ] = 2 * cache.near / ( xmax - xmin ); _projectionMatrix.elements[ 8 ] = ( xmax + xmin ) / ( xmax - xmin ); this.cameraL.projectionMatrix.copy( _projectionMatrix ); // for right eye xmin = - ymax * cache.aspect - eyeSepOnProjection; xmax = ymax * cache.aspect - eyeSepOnProjection; _projectionMatrix.elements[ 0 ] = 2 * cache.near / ( xmax - xmin ); _projectionMatrix.elements[ 8 ] = ( xmax + xmin ) / ( xmax - xmin ); this.cameraR.projectionMatrix.copy( _projectionMatrix ); } this.cameraL.matrixWorld.copy( camera.matrixWorld ).multiply( _eyeLeft ); this.cameraR.matrixWorld.copy( camera.matrixWorld ).multiply( _eyeRight ); } } /** * This type of camera can be used in order to efficiently render a scene with a * predefined set of cameras. This is an important performance aspect for * rendering VR scenes. * * An instance of `ArrayCamera` always has an array of sub cameras. It's mandatory * to define for each sub camera the `viewport` property which determines the * part of the viewport that is rendered with this camera. * * @augments PerspectiveCamera */ class ArrayCamera extends PerspectiveCamera { /** * Constructs a new array camera. * * @param {Array} [array=[]] - An array of perspective sub cameras. */ constructor( array = [] ) { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isArrayCamera = true; /** * An array of perspective sub cameras. * * @type {Array} */ this.cameras = array; this.index = 0; } } /** * Class for keeping track of time. */ class Clock { /** * Constructs a new clock. * * @param {boolean} [autoStart=true] - Whether to automatically start the clock when * `getDelta()` is called for the first time. */ constructor( autoStart = true ) { /** * If set to `true`, the clock starts automatically when `getDelta()` is called * for the first time. * * @type {boolean} * @default true */ this.autoStart = autoStart; /** * Holds the time at which the clock's `start()` method was last called. * * @type {number} * @default 0 */ this.startTime = 0; /** * Holds the time at which the clock's `start()`, `getElapsedTime()` or * `getDelta()` methods were last called. * * @type {number} * @default 0 */ this.oldTime = 0; /** * Keeps track of the total time that the clock has been running. * * @type {number} * @default 0 */ this.elapsedTime = 0; /** * Whether the clock is running or not. * * @type {boolean} * @default true */ this.running = false; } /** * Starts the clock. When `autoStart` is set to `true`, the method is automatically * called by the class. */ start() { this.startTime = now(); this.oldTime = this.startTime; this.elapsedTime = 0; this.running = true; } /** * Stops the clock. */ stop() { this.getElapsedTime(); this.running = false; this.autoStart = false; } /** * Returns the elapsed time in seconds. * * @return {number} The elapsed time. */ getElapsedTime() { this.getDelta(); return this.elapsedTime; } /** * Returns the delta time in seconds. * * @return {number} The delta time. */ getDelta() { let diff = 0; if ( this.autoStart && ! this.running ) { this.start(); return 0; } if ( this.running ) { const newTime = now(); diff = ( newTime - this.oldTime ) / 1000; this.oldTime = newTime; this.elapsedTime += diff; } return diff; } } function now() { return performance.now(); } const _position$1 = /*@__PURE__*/ new Vector3(); const _quaternion$1 = /*@__PURE__*/ new Quaternion(); const _scale$1 = /*@__PURE__*/ new Vector3(); const _orientation$1 = /*@__PURE__*/ new Vector3(); /** * The class represents a virtual listener of the all positional and non-positional audio effects * in the scene. A three.js application usually creates a single listener. It is a mandatory * constructor parameter for audios entities like {@link Audio} and {@link PositionalAudio}. * * In most cases, the listener object is a child of the camera. So the 3D transformation of the * camera represents the 3D transformation of the listener. * * @augments Object3D */ class AudioListener extends Object3D { /** * Constructs a new audio listener. */ constructor() { super(); this.type = 'AudioListener'; /** * The native audio context. * * @type {AudioContext} * @readonly */ this.context = AudioContext.getContext(); /** * The gain node used for volume control. * * @type {GainNode} * @readonly */ this.gain = this.context.createGain(); this.gain.connect( this.context.destination ); /** * An optional filter. * * Defined via {@link AudioListener#setFilter}. * * @type {?AudioNode} * @default null * @readonly */ this.filter = null; /** * Time delta values required for `linearRampToValueAtTime()` usage. * * @type {number} * @default 0 * @readonly */ this.timeDelta = 0; // private this._clock = new Clock(); } /** * Returns the listener's input node. * * This method is used by other audio nodes to connect to this listener. * * @return {GainNode} The input node. */ getInput() { return this.gain; } /** * Removes the current filter from this listener. * * @return {AudioListener} A reference to this listener. */ removeFilter() { if ( this.filter !== null ) { this.gain.disconnect( this.filter ); this.filter.disconnect( this.context.destination ); this.gain.connect( this.context.destination ); this.filter = null; } return this; } /** * Returns the current set filter. * * @return {?AudioNode} The filter. */ getFilter() { return this.filter; } /** * Sets the given filter to this listener. * * @param {AudioNode} value - The filter to set. * @return {AudioListener} A reference to this listener. */ setFilter( value ) { if ( this.filter !== null ) { this.gain.disconnect( this.filter ); this.filter.disconnect( this.context.destination ); } else { this.gain.disconnect( this.context.destination ); } this.filter = value; this.gain.connect( this.filter ); this.filter.connect( this.context.destination ); return this; } /** * Returns the applications master volume. * * @return {number} The master volume. */ getMasterVolume() { return this.gain.gain.value; } /** * Sets the applications master volume. This volume setting affects * all audio nodes in the scene. * * @param {number} value - The master volume to set. * @return {AudioListener} A reference to this listener. */ setMasterVolume( value ) { this.gain.gain.setTargetAtTime( value, this.context.currentTime, 0.01 ); return this; } updateMatrixWorld( force ) { super.updateMatrixWorld( force ); const listener = this.context.listener; const up = this.up; this.timeDelta = this._clock.getDelta(); this.matrixWorld.decompose( _position$1, _quaternion$1, _scale$1 ); _orientation$1.set( 0, 0, -1 ).applyQuaternion( _quaternion$1 ); if ( listener.positionX ) { // code path for Chrome (see #14393) const endTime = this.context.currentTime + this.timeDelta; listener.positionX.linearRampToValueAtTime( _position$1.x, endTime ); listener.positionY.linearRampToValueAtTime( _position$1.y, endTime ); listener.positionZ.linearRampToValueAtTime( _position$1.z, endTime ); listener.forwardX.linearRampToValueAtTime( _orientation$1.x, endTime ); listener.forwardY.linearRampToValueAtTime( _orientation$1.y, endTime ); listener.forwardZ.linearRampToValueAtTime( _orientation$1.z, endTime ); listener.upX.linearRampToValueAtTime( up.x, endTime ); listener.upY.linearRampToValueAtTime( up.y, endTime ); listener.upZ.linearRampToValueAtTime( up.z, endTime ); } else { listener.setPosition( _position$1.x, _position$1.y, _position$1.z ); listener.setOrientation( _orientation$1.x, _orientation$1.y, _orientation$1.z, up.x, up.y, up.z ); } } } /** * Represents a non-positional ( global ) audio object. * * This and related audio modules make use of the [Web Audio API]{@link https://www.w3.org/TR/webaudio-1.1/}. * * ```js * // create an AudioListener and add it to the camera * const listener = new THREE.AudioListener(); * camera.add( listener ); * * // create a global audio source * const sound = new THREE.Audio( listener ); * * // load a sound and set it as the Audio object's buffer * const audioLoader = new THREE.AudioLoader(); * audioLoader.load( 'sounds/ambient.ogg', function( buffer ) { * sound.setBuffer( buffer ); * sound.setLoop( true ); * sound.setVolume( 0.5 ); * sound.play(); * }); * ``` * * @augments Object3D */ class Audio extends Object3D { /** * Constructs a new audio. * * @param {AudioListener} listener - The global audio listener. */ constructor( listener ) { super(); this.type = 'Audio'; /** * The global audio listener. * * @type {AudioListener} * @readonly */ this.listener = listener; /** * The audio context. * * @type {AudioContext} * @readonly */ this.context = listener.context; /** * The gain node used for volume control. * * @type {GainNode} * @readonly */ this.gain = this.context.createGain(); this.gain.connect( listener.getInput() ); /** * Whether to start playback automatically or not. * * @type {boolean} * @default false */ this.autoplay = false; /** * A reference to an audio buffer. * * Defined via {@link Audio#setBuffer}. * * @type {?AudioBuffer} * @default null * @readonly */ this.buffer = null; /** * Modify pitch, measured in cents. +/- 100 is a semitone. * +/- 1200 is an octave. * * Defined via {@link Audio#setDetune}. * * @type {number} * @default 0 * @readonly */ this.detune = 0; /** * Whether the audio should loop or not. * * Defined via {@link Audio#setLoop}. * * @type {boolean} * @default false * @readonly */ this.loop = false; /** * Defines where in the audio buffer the replay should * start, in seconds. * * @type {number} * @default 0 */ this.loopStart = 0; /** * Defines where in the audio buffer the replay should * stop, in seconds. * * @type {number} * @default 0 */ this.loopEnd = 0; /** * An offset to the time within the audio buffer the playback * should begin, in seconds. * * @type {number} * @default 0 */ this.offset = 0; /** * Overrides the default duration of the audio. * * @type {undefined|number} * @default undefined */ this.duration = undefined; /** * The playback speed. * * Defined via {@link Audio#setPlaybackRate}. * * @type {number} * @readonly * @default 1 */ this.playbackRate = 1; /** * Indicates whether the audio is playing or not. * * This flag will be automatically set when using {@link Audio#play}, * {@link Audio#pause}, {@link Audio#stop}. * * @type {boolean} * @readonly * @default false */ this.isPlaying = false; /** * Indicates whether the audio playback can be controlled * with method like {@link Audio#play} or {@link Audio#pause}. * * This flag will be automatically set when audio sources are * defined. * * @type {boolean} * @readonly * @default true */ this.hasPlaybackControl = true; /** * Holds a reference to the current audio source. * * The property is automatically by one of the `set*()` methods. * * @type {?AudioNode} * @readonly * @default null */ this.source = null; /** * Defines the source type. * * The property is automatically by one of the `set*()` methods. * * @type {('empty'|'audioNode'|'mediaNode'|'mediaStreamNode'|'buffer')} * @readonly * @default 'empty' */ this.sourceType = 'empty'; this._startedAt = 0; this._progress = 0; this._connected = false; /** * Can be used to apply a variety of low-order filters to create * more complex sound effects e.g. via `BiquadFilterNode`. * * The property is automatically set by {@link Audio#setFilters}. * * @type {Array} * @readonly */ this.filters = []; } /** * Returns the output audio node. * * @return {GainNode} The output node. */ getOutput() { return this.gain; } /** * Sets the given audio node as the source of this instance. * * {@link Audio#sourceType} is set to `audioNode` and {@link Audio#hasPlaybackControl} to `false`. * * @param {AudioNode} audioNode - The audio node like an instance of `OscillatorNode`. * @return {Audio} A reference to this instance. */ setNodeSource( audioNode ) { this.hasPlaybackControl = false; this.sourceType = 'audioNode'; this.source = audioNode; this.connect(); return this; } /** * Sets the given media element as the source of this instance. * * {@link Audio#sourceType} is set to `mediaNode` and {@link Audio#hasPlaybackControl} to `false`. * * @param {HTMLMediaElement} mediaElement - The media element. * @return {Audio} A reference to this instance. */ setMediaElementSource( mediaElement ) { this.hasPlaybackControl = false; this.sourceType = 'mediaNode'; this.source = this.context.createMediaElementSource( mediaElement ); this.connect(); return this; } /** * Sets the given media stream as the source of this instance. * * {@link Audio#sourceType} is set to `mediaStreamNode` and {@link Audio#hasPlaybackControl} to `false`. * * @param {MediaStream} mediaStream - The media stream. * @return {Audio} A reference to this instance. */ setMediaStreamSource( mediaStream ) { this.hasPlaybackControl = false; this.sourceType = 'mediaStreamNode'; this.source = this.context.createMediaStreamSource( mediaStream ); this.connect(); return this; } /** * Sets the given audio buffer as the source of this instance. * * {@link Audio#sourceType} is set to `buffer` and {@link Audio#hasPlaybackControl} to `true`. * * @param {AudioBuffer} audioBuffer - The audio buffer. * @return {Audio} A reference to this instance. */ setBuffer( audioBuffer ) { this.buffer = audioBuffer; this.sourceType = 'buffer'; if ( this.autoplay ) this.play(); return this; } /** * Starts the playback of the audio. * * Can only be used with compatible audio sources that allow playback control. * * @param {number} [delay=0] - The delay, in seconds, at which the audio should start playing. * @return {Audio|undefined} A reference to this instance. */ play( delay = 0 ) { if ( this.isPlaying === true ) { console.warn( 'THREE.Audio: Audio is already playing.' ); return; } if ( this.hasPlaybackControl === false ) { console.warn( 'THREE.Audio: this Audio has no playback control.' ); return; } this._startedAt = this.context.currentTime + delay; const source = this.context.createBufferSource(); source.buffer = this.buffer; source.loop = this.loop; source.loopStart = this.loopStart; source.loopEnd = this.loopEnd; source.onended = this.onEnded.bind( this ); source.start( this._startedAt, this._progress + this.offset, this.duration ); this.isPlaying = true; this.source = source; this.setDetune( this.detune ); this.setPlaybackRate( this.playbackRate ); return this.connect(); } /** * Pauses the playback of the audio. * * Can only be used with compatible audio sources that allow playback control. * * @return {Audio|undefined} A reference to this instance. */ pause() { if ( this.hasPlaybackControl === false ) { console.warn( 'THREE.Audio: this Audio has no playback control.' ); return; } if ( this.isPlaying === true ) { // update current progress this._progress += Math.max( this.context.currentTime - this._startedAt, 0 ) * this.playbackRate; if ( this.loop === true ) { // ensure _progress does not exceed duration with looped audios this._progress = this._progress % ( this.duration || this.buffer.duration ); } this.source.stop(); this.source.onended = null; this.isPlaying = false; } return this; } /** * Stops the playback of the audio. * * Can only be used with compatible audio sources that allow playback control. * * @param {number} [delay=0] - The delay, in seconds, at which the audio should stop playing. * @return {Audio|undefined} A reference to this instance. */ stop( delay = 0 ) { if ( this.hasPlaybackControl === false ) { console.warn( 'THREE.Audio: this Audio has no playback control.' ); return; } this._progress = 0; if ( this.source !== null ) { this.source.stop( this.context.currentTime + delay ); this.source.onended = null; } this.isPlaying = false; return this; } /** * Connects to the audio source. This is used internally on * initialisation and when setting / removing filters. * * @return {Audio} A reference to this instance. */ connect() { if ( this.filters.length > 0 ) { this.source.connect( this.filters[ 0 ] ); for ( let i = 1, l = this.filters.length; i < l; i ++ ) { this.filters[ i - 1 ].connect( this.filters[ i ] ); } this.filters[ this.filters.length - 1 ].connect( this.getOutput() ); } else { this.source.connect( this.getOutput() ); } this._connected = true; return this; } /** * Disconnects to the audio source. This is used internally on * initialisation and when setting / removing filters. * * @return {Audio|undefined} A reference to this instance. */ disconnect() { if ( this._connected === false ) { return; } if ( this.filters.length > 0 ) { this.source.disconnect( this.filters[ 0 ] ); for ( let i = 1, l = this.filters.length; i < l; i ++ ) { this.filters[ i - 1 ].disconnect( this.filters[ i ] ); } this.filters[ this.filters.length - 1 ].disconnect( this.getOutput() ); } else { this.source.disconnect( this.getOutput() ); } this._connected = false; return this; } /** * Returns the current set filters. * * @return {Array} The list of filters. */ getFilters() { return this.filters; } /** * Sets an array of filters and connects them with the audio source. * * @param {Array} [value] - A list of filters. * @return {Audio} A reference to this instance. */ setFilters( value ) { if ( ! value ) value = []; if ( this._connected === true ) { this.disconnect(); this.filters = value.slice(); this.connect(); } else { this.filters = value.slice(); } return this; } /** * Defines the detuning of oscillation in cents. * * @param {number} value - The detuning of oscillation in cents. * @return {Audio} A reference to this instance. */ setDetune( value ) { this.detune = value; if ( this.isPlaying === true && this.source.detune !== undefined ) { this.source.detune.setTargetAtTime( this.detune, this.context.currentTime, 0.01 ); } return this; } /** * Returns the detuning of oscillation in cents. * * @return {number} The detuning of oscillation in cents. */ getDetune() { return this.detune; } /** * Returns the first filter in the list of filters. * * @return {AudioNode|undefined} The first filter in the list of filters. */ getFilter() { return this.getFilters()[ 0 ]; } /** * Applies a single filter node to the audio. * * @param {AudioNode} [filter] - The filter to set. * @return {Audio} A reference to this instance. */ setFilter( filter ) { return this.setFilters( filter ? [ filter ] : [] ); } /** * Sets the playback rate. * * Can only be used with compatible audio sources that allow playback control. * * @param {number} [value] - The playback rate to set. * @return {Audio|undefined} A reference to this instance. */ setPlaybackRate( value ) { if ( this.hasPlaybackControl === false ) { console.warn( 'THREE.Audio: this Audio has no playback control.' ); return; } this.playbackRate = value; if ( this.isPlaying === true ) { this.source.playbackRate.setTargetAtTime( this.playbackRate, this.context.currentTime, 0.01 ); } return this; } /** * Returns the current playback rate. * @return {number} The playback rate. */ getPlaybackRate() { return this.playbackRate; } /** * Automatically called when playback finished. */ onEnded() { this.isPlaying = false; this._progress = 0; } /** * Returns the loop flag. * * Can only be used with compatible audio sources that allow playback control. * * @return {boolean} Whether the audio should loop or not. */ getLoop() { if ( this.hasPlaybackControl === false ) { console.warn( 'THREE.Audio: this Audio has no playback control.' ); return false; } return this.loop; } /** * Sets the loop flag. * * Can only be used with compatible audio sources that allow playback control. * * @param {boolean} value - Whether the audio should loop or not. * @return {Audio|undefined} A reference to this instance. */ setLoop( value ) { if ( this.hasPlaybackControl === false ) { console.warn( 'THREE.Audio: this Audio has no playback control.' ); return; } this.loop = value; if ( this.isPlaying === true ) { this.source.loop = this.loop; } return this; } /** * Sets the loop start value which defines where in the audio buffer the replay should * start, in seconds. * * @param {number} value - The loop start value. * @return {Audio} A reference to this instance. */ setLoopStart( value ) { this.loopStart = value; return this; } /** * Sets the loop end value which defines where in the audio buffer the replay should * stop, in seconds. * * @param {number} value - The loop end value. * @return {Audio} A reference to this instance. */ setLoopEnd( value ) { this.loopEnd = value; return this; } /** * Returns the volume. * * @return {number} The volume. */ getVolume() { return this.gain.gain.value; } /** * Sets the volume. * * @param {number} value - The volume to set. * @return {Audio} A reference to this instance. */ setVolume( value ) { this.gain.gain.setTargetAtTime( value, this.context.currentTime, 0.01 ); return this; } copy( source, recursive ) { super.copy( source, recursive ); if ( source.sourceType !== 'buffer' ) { console.warn( 'THREE.Audio: Audio source type cannot be copied.' ); return this; } this.autoplay = source.autoplay; this.buffer = source.buffer; this.detune = source.detune; this.loop = source.loop; this.loopStart = source.loopStart; this.loopEnd = source.loopEnd; this.offset = source.offset; this.duration = source.duration; this.playbackRate = source.playbackRate; this.hasPlaybackControl = source.hasPlaybackControl; this.sourceType = source.sourceType; this.filters = source.filters.slice(); return this; } clone( recursive ) { return new this.constructor( this.listener ).copy( this, recursive ); } } const _position = /*@__PURE__*/ new Vector3(); const _quaternion = /*@__PURE__*/ new Quaternion(); const _scale = /*@__PURE__*/ new Vector3(); const _orientation = /*@__PURE__*/ new Vector3(); /** * Represents a positional audio object. * * ```js * // create an AudioListener and add it to the camera * const listener = new THREE.AudioListener(); * camera.add( listener ); * * // create the PositionalAudio object (passing in the listener) * const sound = new THREE.PositionalAudio( listener ); * * // load a sound and set it as the PositionalAudio object's buffer * const audioLoader = new THREE.AudioLoader(); * audioLoader.load( 'sounds/song.ogg', function( buffer ) { * sound.setBuffer( buffer ); * sound.setRefDistance( 20 ); * sound.play(); * }); * * // create an object for the sound to play from * const sphere = new THREE.SphereGeometry( 20, 32, 16 ); * const material = new THREE.MeshPhongMaterial( { color: 0xff2200 } ); * const mesh = new THREE.Mesh( sphere, material ); * scene.add( mesh ); * * // finally add the sound to the mesh * mesh.add( sound ); * * @augments Audio */ class PositionalAudio extends Audio { /** * Constructs a positional audio. * * @param {AudioListener} listener - The global audio listener. */ constructor( listener ) { super( listener ); /** * The panner node represents the location, direction, and behavior of an audio * source in 3D space. * * @type {PannerNode} * @readonly */ this.panner = this.context.createPanner(); this.panner.panningModel = 'HRTF'; this.panner.connect( this.gain ); } connect() { super.connect(); this.panner.connect( this.gain ); return this; } disconnect() { super.disconnect(); this.panner.disconnect( this.gain ); return this; } getOutput() { return this.panner; } /** * Returns the current reference distance. * * @return {number} The reference distance. */ getRefDistance() { return this.panner.refDistance; } /** * Defines the reference distance for reducing volume as the audio source moves * further from the listener – i.e. the distance at which the volume reduction * starts taking effect. * * @param {number} value - The reference distance to set. * @return {PositionalAudio} A reference to this instance. */ setRefDistance( value ) { this.panner.refDistance = value; return this; } /** * Returns the current rolloff factor. * * @return {number} The rolloff factor. */ getRolloffFactor() { return this.panner.rolloffFactor; } /** * Defines how quickly the volume is reduced as the source moves away from the listener. * * @param {number} value - The rolloff factor. * @return {PositionalAudio} A reference to this instance. */ setRolloffFactor( value ) { this.panner.rolloffFactor = value; return this; } /** * Returns the current distance model. * * @return {('linear'|'inverse'|'exponential')} The distance model. */ getDistanceModel() { return this.panner.distanceModel; } /** * Defines which algorithm to use to reduce the volume of the audio source * as it moves away from the listener. * * Read [the spec]{@link https://www.w3.org/TR/webaudio-1.1/#enumdef-distancemodeltype} * for more details. * * @param {('linear'|'inverse'|'exponential')} value - The distance model to set. * @return {PositionalAudio} A reference to this instance. */ setDistanceModel( value ) { this.panner.distanceModel = value; return this; } /** * Returns the current max distance. * * @return {number} The max distance. */ getMaxDistance() { return this.panner.maxDistance; } /** * Defines the maximum distance between the audio source and the listener, * after which the volume is not reduced any further. * * This value is used only by the `linear` distance model. * * @param {number} value - The max distance. * @return {PositionalAudio} A reference to this instance. */ setMaxDistance( value ) { this.panner.maxDistance = value; return this; } /** * Sets the directional cone in which the audio can be listened. * * @param {number} coneInnerAngle - An angle, in degrees, of a cone inside of which there will be no volume reduction. * @param {number} coneOuterAngle - An angle, in degrees, of a cone outside of which the volume will be reduced by a constant value, defined by the `coneOuterGain` parameter. * @param {number} coneOuterGain - The amount of volume reduction outside the cone defined by the `coneOuterAngle`. When set to `0`, no sound can be heard. * @return {PositionalAudio} A reference to this instance. */ setDirectionalCone( coneInnerAngle, coneOuterAngle, coneOuterGain ) { this.panner.coneInnerAngle = coneInnerAngle; this.panner.coneOuterAngle = coneOuterAngle; this.panner.coneOuterGain = coneOuterGain; return this; } updateMatrixWorld( force ) { super.updateMatrixWorld( force ); if ( this.hasPlaybackControl === true && this.isPlaying === false ) return; this.matrixWorld.decompose( _position, _quaternion, _scale ); _orientation.set( 0, 0, 1 ).applyQuaternion( _quaternion ); const panner = this.panner; if ( panner.positionX ) { // code path for Chrome and Firefox (see #14393) const endTime = this.context.currentTime + this.listener.timeDelta; panner.positionX.linearRampToValueAtTime( _position.x, endTime ); panner.positionY.linearRampToValueAtTime( _position.y, endTime ); panner.positionZ.linearRampToValueAtTime( _position.z, endTime ); panner.orientationX.linearRampToValueAtTime( _orientation.x, endTime ); panner.orientationY.linearRampToValueAtTime( _orientation.y, endTime ); panner.orientationZ.linearRampToValueAtTime( _orientation.z, endTime ); } else { panner.setPosition( _position.x, _position.y, _position.z ); panner.setOrientation( _orientation.x, _orientation.y, _orientation.z ); } } } /** * This class can be used to analyse audio data. * * ```js * // create an AudioListener and add it to the camera * const listener = new THREE.AudioListener(); * camera.add( listener ); * * // create an Audio source * const sound = new THREE.Audio( listener ); * * // load a sound and set it as the Audio object's buffer * const audioLoader = new THREE.AudioLoader(); * audioLoader.load( 'sounds/ambient.ogg', function( buffer ) { * sound.setBuffer( buffer ); * sound.setLoop(true); * sound.setVolume(0.5); * sound.play(); * }); * * // create an AudioAnalyser, passing in the sound and desired fftSize * const analyser = new THREE.AudioAnalyser( sound, 32 ); * * // get the average frequency of the sound * const data = analyser.getAverageFrequency(); * ``` */ class AudioAnalyser { /** * Constructs a new audio analyzer. * * @param {Audio} audio - The audio to analyze. * @param {number} [fftSize=2048] - The window size in samples that is used when performing a Fast Fourier Transform (FFT) to get frequency domain data. */ constructor( audio, fftSize = 2048 ) { /** * The global audio listener. * * @type {AnalyserNode} */ this.analyser = audio.context.createAnalyser(); this.analyser.fftSize = fftSize; /** * Holds the analyzed data. * * @type {Uint8Array} */ this.data = new Uint8Array( this.analyser.frequencyBinCount ); audio.getOutput().connect( this.analyser ); } /** * Returns an array with frequency data of the audio. * * Each item in the array represents the decibel value for a specific frequency. * The frequencies are spread linearly from 0 to 1/2 of the sample rate. * For example, for 48000 sample rate, the last item of the array will represent * the decibel value for 24000 Hz. * * @return {Uint8Array} The frequency data. */ getFrequencyData() { this.analyser.getByteFrequencyData( this.data ); return this.data; } /** * Returns the average of the frequencies returned by {@link AudioAnalyser#getFrequencyData}. * * @return {number} The average frequency. */ getAverageFrequency() { let value = 0; const data = this.getFrequencyData(); for ( let i = 0; i < data.length; i ++ ) { value += data[ i ]; } return value / data.length; } } /** * Buffered scene graph property that allows weighted accumulation; used internally. */ class PropertyMixer { /** * Constructs a new property mixer. * * @param {PropertyBinding} binding - The property binding. * @param {string} typeName - The keyframe track type name. * @param {number} valueSize - The keyframe track value size. */ constructor( binding, typeName, valueSize ) { /** * The property binding. * * @type {PropertyBinding} */ this.binding = binding; /** * The keyframe track value size. * * @type {number} */ this.valueSize = valueSize; let mixFunction, mixFunctionAdditive, setIdentity; // buffer layout: [ incoming | accu0 | accu1 | orig | addAccu | (optional work) ] // // interpolators can use .buffer as their .result // the data then goes to 'incoming' // // 'accu0' and 'accu1' are used frame-interleaved for // the cumulative result and are compared to detect // changes // // 'orig' stores the original state of the property // // 'add' is used for additive cumulative results // // 'work' is optional and is only present for quaternion types. It is used // to store intermediate quaternion multiplication results switch ( typeName ) { case 'quaternion': mixFunction = this._slerp; mixFunctionAdditive = this._slerpAdditive; setIdentity = this._setAdditiveIdentityQuaternion; this.buffer = new Float64Array( valueSize * 6 ); this._workIndex = 5; break; case 'string': case 'bool': mixFunction = this._select; // Use the regular mix function and for additive on these types, // additive is not relevant for non-numeric types mixFunctionAdditive = this._select; setIdentity = this._setAdditiveIdentityOther; this.buffer = new Array( valueSize * 5 ); break; default: mixFunction = this._lerp; mixFunctionAdditive = this._lerpAdditive; setIdentity = this._setAdditiveIdentityNumeric; this.buffer = new Float64Array( valueSize * 5 ); } this._mixBufferRegion = mixFunction; this._mixBufferRegionAdditive = mixFunctionAdditive; this._setIdentity = setIdentity; this._origIndex = 3; this._addIndex = 4; /** * TODO * * @type {number} * @default 0 */ this.cumulativeWeight = 0; /** * TODO * * @type {number} * @default 0 */ this.cumulativeWeightAdditive = 0; /** * TODO * * @type {number} * @default 0 */ this.useCount = 0; /** * TODO * * @type {number} * @default 0 */ this.referenceCount = 0; } /** * Accumulates data in the `incoming` region into `accu`. * * @param {number} accuIndex - The accumulation index. * @param {number} weight - The weight. */ accumulate( accuIndex, weight ) { // note: happily accumulating nothing when weight = 0, the caller knows // the weight and shouldn't have made the call in the first place const buffer = this.buffer, stride = this.valueSize, offset = accuIndex * stride + stride; let currentWeight = this.cumulativeWeight; if ( currentWeight === 0 ) { // accuN := incoming * weight for ( let i = 0; i !== stride; ++ i ) { buffer[ offset + i ] = buffer[ i ]; } currentWeight = weight; } else { // accuN := accuN + incoming * weight currentWeight += weight; const mix = weight / currentWeight; this._mixBufferRegion( buffer, offset, 0, mix, stride ); } this.cumulativeWeight = currentWeight; } /** * Accumulates data in the `incoming` region into `add`. * * @param {number} weight - The weight. */ accumulateAdditive( weight ) { const buffer = this.buffer, stride = this.valueSize, offset = stride * this._addIndex; if ( this.cumulativeWeightAdditive === 0 ) { // add = identity this._setIdentity(); } // add := add + incoming * weight this._mixBufferRegionAdditive( buffer, offset, 0, weight, stride ); this.cumulativeWeightAdditive += weight; } /** * Applies the state of `accu` to the binding when accus differ. * * @param {number} accuIndex - The accumulation index. */ apply( accuIndex ) { const stride = this.valueSize, buffer = this.buffer, offset = accuIndex * stride + stride, weight = this.cumulativeWeight, weightAdditive = this.cumulativeWeightAdditive, binding = this.binding; this.cumulativeWeight = 0; this.cumulativeWeightAdditive = 0; if ( weight < 1 ) { // accuN := accuN + original * ( 1 - cumulativeWeight ) const originalValueOffset = stride * this._origIndex; this._mixBufferRegion( buffer, offset, originalValueOffset, 1 - weight, stride ); } if ( weightAdditive > 0 ) { // accuN := accuN + additive accuN this._mixBufferRegionAdditive( buffer, offset, this._addIndex * stride, 1, stride ); } for ( let i = stride, e = stride + stride; i !== e; ++ i ) { if ( buffer[ i ] !== buffer[ i + stride ] ) { // value has changed -> update scene graph binding.setValue( buffer, offset ); break; } } } /** * Remembers the state of the bound property and copy it to both accus. */ saveOriginalState() { const binding = this.binding; const buffer = this.buffer, stride = this.valueSize, originalValueOffset = stride * this._origIndex; binding.getValue( buffer, originalValueOffset ); // accu[0..1] := orig -- initially detect changes against the original for ( let i = stride, e = originalValueOffset; i !== e; ++ i ) { buffer[ i ] = buffer[ originalValueOffset + ( i % stride ) ]; } // Add to identity for additive this._setIdentity(); this.cumulativeWeight = 0; this.cumulativeWeightAdditive = 0; } /** * Applies the state previously taken via {@link PropertyMixer#saveOriginalState} to the binding. */ restoreOriginalState() { const originalValueOffset = this.valueSize * 3; this.binding.setValue( this.buffer, originalValueOffset ); } // internals _setAdditiveIdentityNumeric() { const startIndex = this._addIndex * this.valueSize; const endIndex = startIndex + this.valueSize; for ( let i = startIndex; i < endIndex; i ++ ) { this.buffer[ i ] = 0; } } _setAdditiveIdentityQuaternion() { this._setAdditiveIdentityNumeric(); this.buffer[ this._addIndex * this.valueSize + 3 ] = 1; } _setAdditiveIdentityOther() { const startIndex = this._origIndex * this.valueSize; const targetIndex = this._addIndex * this.valueSize; for ( let i = 0; i < this.valueSize; i ++ ) { this.buffer[ targetIndex + i ] = this.buffer[ startIndex + i ]; } } // mix functions _select( buffer, dstOffset, srcOffset, t, stride ) { if ( t >= 0.5 ) { for ( let i = 0; i !== stride; ++ i ) { buffer[ dstOffset + i ] = buffer[ srcOffset + i ]; } } } _slerp( buffer, dstOffset, srcOffset, t ) { Quaternion.slerpFlat( buffer, dstOffset, buffer, dstOffset, buffer, srcOffset, t ); } _slerpAdditive( buffer, dstOffset, srcOffset, t, stride ) { const workOffset = this._workIndex * stride; // Store result in intermediate buffer offset Quaternion.multiplyQuaternionsFlat( buffer, workOffset, buffer, dstOffset, buffer, srcOffset ); // Slerp to the intermediate result Quaternion.slerpFlat( buffer, dstOffset, buffer, dstOffset, buffer, workOffset, t ); } _lerp( buffer, dstOffset, srcOffset, t, stride ) { const s = 1 - t; for ( let i = 0; i !== stride; ++ i ) { const j = dstOffset + i; buffer[ j ] = buffer[ j ] * s + buffer[ srcOffset + i ] * t; } } _lerpAdditive( buffer, dstOffset, srcOffset, t, stride ) { for ( let i = 0; i !== stride; ++ i ) { const j = dstOffset + i; buffer[ j ] = buffer[ j ] + buffer[ srcOffset + i ] * t; } } } // Characters [].:/ are reserved for track binding syntax. const _RESERVED_CHARS_RE = '\\[\\]\\.:\\/'; const _reservedRe = new RegExp( '[' + _RESERVED_CHARS_RE + ']', 'g' ); // Attempts to allow node names from any language. ES5's `\w` regexp matches // only latin characters, and the unicode \p{L} is not yet supported. So // instead, we exclude reserved characters and match everything else. const _wordChar = '[^' + _RESERVED_CHARS_RE + ']'; const _wordCharOrDot = '[^' + _RESERVED_CHARS_RE.replace( '\\.', '' ) + ']'; // Parent directories, delimited by '/' or ':'. Currently unused, but must // be matched to parse the rest of the track name. const _directoryRe = /*@__PURE__*/ /((?:WC+[\/:])*)/.source.replace( 'WC', _wordChar ); // Target node. May contain word characters (a-zA-Z0-9_) and '.' or '-'. const _nodeRe = /*@__PURE__*/ /(WCOD+)?/.source.replace( 'WCOD', _wordCharOrDot ); // Object on target node, and accessor. May not contain reserved // characters. Accessor may contain any character except closing bracket. const _objectRe = /*@__PURE__*/ /(?:\.(WC+)(?:\[(.+)\])?)?/.source.replace( 'WC', _wordChar ); // Property and accessor. May not contain reserved characters. Accessor may // contain any non-bracket characters. const _propertyRe = /*@__PURE__*/ /\.(WC+)(?:\[(.+)\])?/.source.replace( 'WC', _wordChar ); const _trackRe = new RegExp( '' + '^' + _directoryRe + _nodeRe + _objectRe + _propertyRe + '$' ); const _supportedObjectNames = [ 'material', 'materials', 'bones', 'map' ]; class Composite { constructor( targetGroup, path, optionalParsedPath ) { const parsedPath = optionalParsedPath || PropertyBinding.parseTrackName( path ); this._targetGroup = targetGroup; this._bindings = targetGroup.subscribe_( path, parsedPath ); } getValue( array, offset ) { this.bind(); // bind all binding const firstValidIndex = this._targetGroup.nCachedObjects_, binding = this._bindings[ firstValidIndex ]; // and only call .getValue on the first if ( binding !== undefined ) binding.getValue( array, offset ); } setValue( array, offset ) { const bindings = this._bindings; for ( let i = this._targetGroup.nCachedObjects_, n = bindings.length; i !== n; ++ i ) { bindings[ i ].setValue( array, offset ); } } bind() { const bindings = this._bindings; for ( let i = this._targetGroup.nCachedObjects_, n = bindings.length; i !== n; ++ i ) { bindings[ i ].bind(); } } unbind() { const bindings = this._bindings; for ( let i = this._targetGroup.nCachedObjects_, n = bindings.length; i !== n; ++ i ) { bindings[ i ].unbind(); } } } // Note: This class uses a State pattern on a per-method basis: // 'bind' sets 'this.getValue' / 'setValue' and shadows the // prototype version of these methods with one that represents // the bound state. When the property is not found, the methods // become no-ops. /** * This holds a reference to a real property in the scene graph; used internally. */ class PropertyBinding { /** * Constructs a new property binding. * * @param {Object} rootNode - The root node. * @param {string} path - The path. * @param {?Object} [parsedPath] - The parsed path. */ constructor( rootNode, path, parsedPath ) { /** * The object path to the animated property. * * @type {string} */ this.path = path; /** * An object holding information about the path. * * @type {Object} */ this.parsedPath = parsedPath || PropertyBinding.parseTrackName( path ); /** * The object owns the animated property. * * @type {?Object} */ this.node = PropertyBinding.findNode( rootNode, this.parsedPath.nodeName ); /** * The root node. * * @type {Object3D|Skeleton} */ this.rootNode = rootNode; // initial state of these methods that calls 'bind' this.getValue = this._getValue_unbound; this.setValue = this._setValue_unbound; } /** * Factory method for creating a property binding from the given parameters. * * @static * @param {Object} root - The root node. * @param {string} path - The path. * @param {?Object} [parsedPath] - The parsed path. * @return {PropertyBinding|Composite} The created property binding or composite. */ static create( root, path, parsedPath ) { if ( ! ( root && root.isAnimationObjectGroup ) ) { return new PropertyBinding( root, path, parsedPath ); } else { return new PropertyBinding.Composite( root, path, parsedPath ); } } /** * Replaces spaces with underscores and removes unsupported characters from * node names, to ensure compatibility with parseTrackName(). * * @param {string} name - Node name to be sanitized. * @return {string} The sanitized node name. */ static sanitizeNodeName( name ) { return name.replace( /\s/g, '_' ).replace( _reservedRe, '' ); } /** * Parses the given track name (an object path to an animated property) and * returns an object with information about the path. Matches strings in the following forms: * * - nodeName.property * - nodeName.property[accessor] * - nodeName.material.property[accessor] * - uuid.property[accessor] * - uuid.objectName[objectIndex].propertyName[propertyIndex] * - parentName/nodeName.property * - parentName/parentName/nodeName.property[index] * - .bone[Armature.DEF_cog].position * - scene:helium_balloon_model:helium_balloon_model.position * * @static * @param {string} trackName - The track name to parse. * @return {Object} The parsed track name as an object. */ static parseTrackName( trackName ) { const matches = _trackRe.exec( trackName ); if ( matches === null ) { throw new Error( 'PropertyBinding: Cannot parse trackName: ' + trackName ); } const results = { // directoryName: matches[ 1 ], // (tschw) currently unused nodeName: matches[ 2 ], objectName: matches[ 3 ], objectIndex: matches[ 4 ], propertyName: matches[ 5 ], // required propertyIndex: matches[ 6 ] }; const lastDot = results.nodeName && results.nodeName.lastIndexOf( '.' ); if ( lastDot !== undefined && lastDot !== -1 ) { const objectName = results.nodeName.substring( lastDot + 1 ); // Object names must be checked against an allowlist. Otherwise, there // is no way to parse 'foo.bar.baz': 'baz' must be a property, but // 'bar' could be the objectName, or part of a nodeName (which can // include '.' characters). if ( _supportedObjectNames.indexOf( objectName ) !== -1 ) { results.nodeName = results.nodeName.substring( 0, lastDot ); results.objectName = objectName; } } if ( results.propertyName === null || results.propertyName.length === 0 ) { throw new Error( 'PropertyBinding: can not parse propertyName from trackName: ' + trackName ); } return results; } /** * Searches for a node in the hierarchy of the given root object by the given * node name. * * @static * @param {Object} root - The root object. * @param {string|number} nodeName - The name of the node. * @return {?Object} The found node. Returns `null` if no object was found. */ static findNode( root, nodeName ) { if ( nodeName === undefined || nodeName === '' || nodeName === '.' || nodeName === -1 || nodeName === root.name || nodeName === root.uuid ) { return root; } // search into skeleton bones. if ( root.skeleton ) { const bone = root.skeleton.getBoneByName( nodeName ); if ( bone !== undefined ) { return bone; } } // search into node subtree. if ( root.children ) { const searchNodeSubtree = function ( children ) { for ( let i = 0; i < children.length; i ++ ) { const childNode = children[ i ]; if ( childNode.name === nodeName || childNode.uuid === nodeName ) { return childNode; } const result = searchNodeSubtree( childNode.children ); if ( result ) return result; } return null; }; const subTreeNode = searchNodeSubtree( root.children ); if ( subTreeNode ) { return subTreeNode; } } return null; } // these are used to "bind" a nonexistent property _getValue_unavailable() {} _setValue_unavailable() {} // Getters _getValue_direct( buffer, offset ) { buffer[ offset ] = this.targetObject[ this.propertyName ]; } _getValue_array( buffer, offset ) { const source = this.resolvedProperty; for ( let i = 0, n = source.length; i !== n; ++ i ) { buffer[ offset ++ ] = source[ i ]; } } _getValue_arrayElement( buffer, offset ) { buffer[ offset ] = this.resolvedProperty[ this.propertyIndex ]; } _getValue_toArray( buffer, offset ) { this.resolvedProperty.toArray( buffer, offset ); } // Direct _setValue_direct( buffer, offset ) { this.targetObject[ this.propertyName ] = buffer[ offset ]; } _setValue_direct_setNeedsUpdate( buffer, offset ) { this.targetObject[ this.propertyName ] = buffer[ offset ]; this.targetObject.needsUpdate = true; } _setValue_direct_setMatrixWorldNeedsUpdate( buffer, offset ) { this.targetObject[ this.propertyName ] = buffer[ offset ]; this.targetObject.matrixWorldNeedsUpdate = true; } // EntireArray _setValue_array( buffer, offset ) { const dest = this.resolvedProperty; for ( let i = 0, n = dest.length; i !== n; ++ i ) { dest[ i ] = buffer[ offset ++ ]; } } _setValue_array_setNeedsUpdate( buffer, offset ) { const dest = this.resolvedProperty; for ( let i = 0, n = dest.length; i !== n; ++ i ) { dest[ i ] = buffer[ offset ++ ]; } this.targetObject.needsUpdate = true; } _setValue_array_setMatrixWorldNeedsUpdate( buffer, offset ) { const dest = this.resolvedProperty; for ( let i = 0, n = dest.length; i !== n; ++ i ) { dest[ i ] = buffer[ offset ++ ]; } this.targetObject.matrixWorldNeedsUpdate = true; } // ArrayElement _setValue_arrayElement( buffer, offset ) { this.resolvedProperty[ this.propertyIndex ] = buffer[ offset ]; } _setValue_arrayElement_setNeedsUpdate( buffer, offset ) { this.resolvedProperty[ this.propertyIndex ] = buffer[ offset ]; this.targetObject.needsUpdate = true; } _setValue_arrayElement_setMatrixWorldNeedsUpdate( buffer, offset ) { this.resolvedProperty[ this.propertyIndex ] = buffer[ offset ]; this.targetObject.matrixWorldNeedsUpdate = true; } // HasToFromArray _setValue_fromArray( buffer, offset ) { this.resolvedProperty.fromArray( buffer, offset ); } _setValue_fromArray_setNeedsUpdate( buffer, offset ) { this.resolvedProperty.fromArray( buffer, offset ); this.targetObject.needsUpdate = true; } _setValue_fromArray_setMatrixWorldNeedsUpdate( buffer, offset ) { this.resolvedProperty.fromArray( buffer, offset ); this.targetObject.matrixWorldNeedsUpdate = true; } _getValue_unbound( targetArray, offset ) { this.bind(); this.getValue( targetArray, offset ); } _setValue_unbound( sourceArray, offset ) { this.bind(); this.setValue( sourceArray, offset ); } /** * Creates a getter / setter pair for the property tracked by this binding. */ bind() { let targetObject = this.node; const parsedPath = this.parsedPath; const objectName = parsedPath.objectName; const propertyName = parsedPath.propertyName; let propertyIndex = parsedPath.propertyIndex; if ( ! targetObject ) { targetObject = PropertyBinding.findNode( this.rootNode, parsedPath.nodeName ); this.node = targetObject; } // set fail state so we can just 'return' on error this.getValue = this._getValue_unavailable; this.setValue = this._setValue_unavailable; // ensure there is a value node if ( ! targetObject ) { console.warn( 'THREE.PropertyBinding: No target node found for track: ' + this.path + '.' ); return; } if ( objectName ) { let objectIndex = parsedPath.objectIndex; // special cases were we need to reach deeper into the hierarchy to get the face materials.... switch ( objectName ) { case 'materials': if ( ! targetObject.material ) { console.error( 'THREE.PropertyBinding: Can not bind to material as node does not have a material.', this ); return; } if ( ! targetObject.material.materials ) { console.error( 'THREE.PropertyBinding: Can not bind to material.materials as node.material does not have a materials array.', this ); return; } targetObject = targetObject.material.materials; break; case 'bones': if ( ! targetObject.skeleton ) { console.error( 'THREE.PropertyBinding: Can not bind to bones as node does not have a skeleton.', this ); return; } // potential future optimization: skip this if propertyIndex is already an integer // and convert the integer string to a true integer. targetObject = targetObject.skeleton.bones; // support resolving morphTarget names into indices. for ( let i = 0; i < targetObject.length; i ++ ) { if ( targetObject[ i ].name === objectIndex ) { objectIndex = i; break; } } break; case 'map': if ( 'map' in targetObject ) { targetObject = targetObject.map; break; } if ( ! targetObject.material ) { console.error( 'THREE.PropertyBinding: Can not bind to material as node does not have a material.', this ); return; } if ( ! targetObject.material.map ) { console.error( 'THREE.PropertyBinding: Can not bind to material.map as node.material does not have a map.', this ); return; } targetObject = targetObject.material.map; break; default: if ( targetObject[ objectName ] === undefined ) { console.error( 'THREE.PropertyBinding: Can not bind to objectName of node undefined.', this ); return; } targetObject = targetObject[ objectName ]; } if ( objectIndex !== undefined ) { if ( targetObject[ objectIndex ] === undefined ) { console.error( 'THREE.PropertyBinding: Trying to bind to objectIndex of objectName, but is undefined.', this, targetObject ); return; } targetObject = targetObject[ objectIndex ]; } } // resolve property const nodeProperty = targetObject[ propertyName ]; if ( nodeProperty === undefined ) { const nodeName = parsedPath.nodeName; console.error( 'THREE.PropertyBinding: Trying to update property for track: ' + nodeName + '.' + propertyName + ' but it wasn\'t found.', targetObject ); return; } // determine versioning scheme let versioning = this.Versioning.None; this.targetObject = targetObject; if ( targetObject.isMaterial === true ) { versioning = this.Versioning.NeedsUpdate; } else if ( targetObject.isObject3D === true ) { versioning = this.Versioning.MatrixWorldNeedsUpdate; } // determine how the property gets bound let bindingType = this.BindingType.Direct; if ( propertyIndex !== undefined ) { // access a sub element of the property array (only primitives are supported right now) if ( propertyName === 'morphTargetInfluences' ) { // potential optimization, skip this if propertyIndex is already an integer, and convert the integer string to a true integer. // support resolving morphTarget names into indices. if ( ! targetObject.geometry ) { console.error( 'THREE.PropertyBinding: Can not bind to morphTargetInfluences because node does not have a geometry.', this ); return; } if ( ! targetObject.geometry.morphAttributes ) { console.error( 'THREE.PropertyBinding: Can not bind to morphTargetInfluences because node does not have a geometry.morphAttributes.', this ); return; } if ( targetObject.morphTargetDictionary[ propertyIndex ] !== undefined ) { propertyIndex = targetObject.morphTargetDictionary[ propertyIndex ]; } } bindingType = this.BindingType.ArrayElement; this.resolvedProperty = nodeProperty; this.propertyIndex = propertyIndex; } else if ( nodeProperty.fromArray !== undefined && nodeProperty.toArray !== undefined ) { // must use copy for Object3D.Euler/Quaternion bindingType = this.BindingType.HasFromToArray; this.resolvedProperty = nodeProperty; } else if ( Array.isArray( nodeProperty ) ) { bindingType = this.BindingType.EntireArray; this.resolvedProperty = nodeProperty; } else { this.propertyName = propertyName; } // select getter / setter this.getValue = this.GetterByBindingType[ bindingType ]; this.setValue = this.SetterByBindingTypeAndVersioning[ bindingType ][ versioning ]; } /** * Unbinds the property. */ unbind() { this.node = null; // back to the prototype version of getValue / setValue // note: avoiding to mutate the shape of 'this' via 'delete' this.getValue = this._getValue_unbound; this.setValue = this._setValue_unbound; } } PropertyBinding.Composite = Composite; PropertyBinding.prototype.BindingType = { Direct: 0, EntireArray: 1, ArrayElement: 2, HasFromToArray: 3 }; PropertyBinding.prototype.Versioning = { None: 0, NeedsUpdate: 1, MatrixWorldNeedsUpdate: 2 }; PropertyBinding.prototype.GetterByBindingType = [ PropertyBinding.prototype._getValue_direct, PropertyBinding.prototype._getValue_array, PropertyBinding.prototype._getValue_arrayElement, PropertyBinding.prototype._getValue_toArray, ]; PropertyBinding.prototype.SetterByBindingTypeAndVersioning = [ [ // Direct PropertyBinding.prototype._setValue_direct, PropertyBinding.prototype._setValue_direct_setNeedsUpdate, PropertyBinding.prototype._setValue_direct_setMatrixWorldNeedsUpdate, ], [ // EntireArray PropertyBinding.prototype._setValue_array, PropertyBinding.prototype._setValue_array_setNeedsUpdate, PropertyBinding.prototype._setValue_array_setMatrixWorldNeedsUpdate, ], [ // ArrayElement PropertyBinding.prototype._setValue_arrayElement, PropertyBinding.prototype._setValue_arrayElement_setNeedsUpdate, PropertyBinding.prototype._setValue_arrayElement_setMatrixWorldNeedsUpdate, ], [ // HasToFromArray PropertyBinding.prototype._setValue_fromArray, PropertyBinding.prototype._setValue_fromArray_setNeedsUpdate, PropertyBinding.prototype._setValue_fromArray_setMatrixWorldNeedsUpdate, ] ]; /** * A group of objects that receives a shared animation state. * * Usage: * * - Add objects you would otherwise pass as 'root' to the * constructor or the .clipAction method of AnimationMixer. * - Instead pass this object as 'root'. * - You can also add and remove objects later when the mixer is running. * * Note: * * - Objects of this class appear as one object to the mixer, * so cache control of the individual objects must be done on the group. * * Limitation: * * - The animated properties must be compatible among the all objects in the group. * - A single property can either be controlled through a target group or directly, but not both. */ class AnimationObjectGroup { /** * Constructs a new animation group. * * @param {...Object3D} arguments - An arbitrary number of 3D objects that share the same animation state. */ constructor() { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isAnimationObjectGroup = true; /** * The UUID of the 3D object. * * @type {string} * @readonly */ this.uuid = generateUUID(); // cached objects followed by the active ones this._objects = Array.prototype.slice.call( arguments ); this.nCachedObjects_ = 0; // threshold // note: read by PropertyBinding.Composite const indices = {}; this._indicesByUUID = indices; // for bookkeeping for ( let i = 0, n = arguments.length; i !== n; ++ i ) { indices[ arguments[ i ].uuid ] = i; } this._paths = []; // inside: string this._parsedPaths = []; // inside: { we don't care, here } this._bindings = []; // inside: Array< PropertyBinding > this._bindingsIndicesByPath = {}; // inside: indices in these arrays const scope = this; this.stats = { objects: { get total() { return scope._objects.length; }, get inUse() { return this.total - scope.nCachedObjects_; } }, get bindingsPerObject() { return scope._bindings.length; } }; } /** * Adds an arbitrary number of objects to this animation group. * * @param {...Object3D} arguments - The 3D objects to add. */ add() { const objects = this._objects, indicesByUUID = this._indicesByUUID, paths = this._paths, parsedPaths = this._parsedPaths, bindings = this._bindings, nBindings = bindings.length; let knownObject = undefined, nObjects = objects.length, nCachedObjects = this.nCachedObjects_; for ( let i = 0, n = arguments.length; i !== n; ++ i ) { const object = arguments[ i ], uuid = object.uuid; let index = indicesByUUID[ uuid ]; if ( index === undefined ) { // unknown object -> add it to the ACTIVE region index = nObjects ++; indicesByUUID[ uuid ] = index; objects.push( object ); // accounting is done, now do the same for all bindings for ( let j = 0, m = nBindings; j !== m; ++ j ) { bindings[ j ].push( new PropertyBinding( object, paths[ j ], parsedPaths[ j ] ) ); } } else if ( index < nCachedObjects ) { knownObject = objects[ index ]; // move existing object to the ACTIVE region const firstActiveIndex = -- nCachedObjects, lastCachedObject = objects[ firstActiveIndex ]; indicesByUUID[ lastCachedObject.uuid ] = index; objects[ index ] = lastCachedObject; indicesByUUID[ uuid ] = firstActiveIndex; objects[ firstActiveIndex ] = object; // accounting is done, now do the same for all bindings for ( let j = 0, m = nBindings; j !== m; ++ j ) { const bindingsForPath = bindings[ j ], lastCached = bindingsForPath[ firstActiveIndex ]; let binding = bindingsForPath[ index ]; bindingsForPath[ index ] = lastCached; if ( binding === undefined ) { // since we do not bother to create new bindings // for objects that are cached, the binding may // or may not exist binding = new PropertyBinding( object, paths[ j ], parsedPaths[ j ] ); } bindingsForPath[ firstActiveIndex ] = binding; } } else if ( objects[ index ] !== knownObject ) { console.error( 'THREE.AnimationObjectGroup: Different objects with the same UUID ' + 'detected. Clean the caches or recreate your infrastructure when reloading scenes.' ); } // else the object is already where we want it to be } // for arguments this.nCachedObjects_ = nCachedObjects; } /** * Removes an arbitrary number of objects to this animation group * * @param {...Object3D} arguments - The 3D objects to remove. */ remove() { const objects = this._objects, indicesByUUID = this._indicesByUUID, bindings = this._bindings, nBindings = bindings.length; let nCachedObjects = this.nCachedObjects_; for ( let i = 0, n = arguments.length; i !== n; ++ i ) { const object = arguments[ i ], uuid = object.uuid, index = indicesByUUID[ uuid ]; if ( index !== undefined && index >= nCachedObjects ) { // move existing object into the CACHED region const lastCachedIndex = nCachedObjects ++, firstActiveObject = objects[ lastCachedIndex ]; indicesByUUID[ firstActiveObject.uuid ] = index; objects[ index ] = firstActiveObject; indicesByUUID[ uuid ] = lastCachedIndex; objects[ lastCachedIndex ] = object; // accounting is done, now do the same for all bindings for ( let j = 0, m = nBindings; j !== m; ++ j ) { const bindingsForPath = bindings[ j ], firstActive = bindingsForPath[ lastCachedIndex ], binding = bindingsForPath[ index ]; bindingsForPath[ index ] = firstActive; bindingsForPath[ lastCachedIndex ] = binding; } } } // for arguments this.nCachedObjects_ = nCachedObjects; } /** * Deallocates all memory resources for the passed 3D objects of this animation group. * * @param {...Object3D} arguments - The 3D objects to uncache. */ uncache() { const objects = this._objects, indicesByUUID = this._indicesByUUID, bindings = this._bindings, nBindings = bindings.length; let nCachedObjects = this.nCachedObjects_, nObjects = objects.length; for ( let i = 0, n = arguments.length; i !== n; ++ i ) { const object = arguments[ i ], uuid = object.uuid, index = indicesByUUID[ uuid ]; if ( index !== undefined ) { delete indicesByUUID[ uuid ]; if ( index < nCachedObjects ) { // object is cached, shrink the CACHED region const firstActiveIndex = -- nCachedObjects, lastCachedObject = objects[ firstActiveIndex ], lastIndex = -- nObjects, lastObject = objects[ lastIndex ]; // last cached object takes this object's place indicesByUUID[ lastCachedObject.uuid ] = index; objects[ index ] = lastCachedObject; // last object goes to the activated slot and pop indicesByUUID[ lastObject.uuid ] = firstActiveIndex; objects[ firstActiveIndex ] = lastObject; objects.pop(); // accounting is done, now do the same for all bindings for ( let j = 0, m = nBindings; j !== m; ++ j ) { const bindingsForPath = bindings[ j ], lastCached = bindingsForPath[ firstActiveIndex ], last = bindingsForPath[ lastIndex ]; bindingsForPath[ index ] = lastCached; bindingsForPath[ firstActiveIndex ] = last; bindingsForPath.pop(); } } else { // object is active, just swap with the last and pop const lastIndex = -- nObjects, lastObject = objects[ lastIndex ]; if ( lastIndex > 0 ) { indicesByUUID[ lastObject.uuid ] = index; } objects[ index ] = lastObject; objects.pop(); // accounting is done, now do the same for all bindings for ( let j = 0, m = nBindings; j !== m; ++ j ) { const bindingsForPath = bindings[ j ]; bindingsForPath[ index ] = bindingsForPath[ lastIndex ]; bindingsForPath.pop(); } } // cached or active } // if object is known } // for arguments this.nCachedObjects_ = nCachedObjects; } // Internal interface used by befriended PropertyBinding.Composite: subscribe_( path, parsedPath ) { // returns an array of bindings for the given path that is changed // according to the contained objects in the group const indicesByPath = this._bindingsIndicesByPath; let index = indicesByPath[ path ]; const bindings = this._bindings; if ( index !== undefined ) return bindings[ index ]; const paths = this._paths, parsedPaths = this._parsedPaths, objects = this._objects, nObjects = objects.length, nCachedObjects = this.nCachedObjects_, bindingsForPath = new Array( nObjects ); index = bindings.length; indicesByPath[ path ] = index; paths.push( path ); parsedPaths.push( parsedPath ); bindings.push( bindingsForPath ); for ( let i = nCachedObjects, n = objects.length; i !== n; ++ i ) { const object = objects[ i ]; bindingsForPath[ i ] = new PropertyBinding( object, path, parsedPath ); } return bindingsForPath; } unsubscribe_( path ) { // tells the group to forget about a property path and no longer // update the array previously obtained with 'subscribe_' const indicesByPath = this._bindingsIndicesByPath, index = indicesByPath[ path ]; if ( index !== undefined ) { const paths = this._paths, parsedPaths = this._parsedPaths, bindings = this._bindings, lastBindingsIndex = bindings.length - 1, lastBindings = bindings[ lastBindingsIndex ], lastBindingsPath = path[ lastBindingsIndex ]; indicesByPath[ lastBindingsPath ] = index; bindings[ index ] = lastBindings; bindings.pop(); parsedPaths[ index ] = parsedPaths[ lastBindingsIndex ]; parsedPaths.pop(); paths[ index ] = paths[ lastBindingsIndex ]; paths.pop(); } } } /** * An instance of `AnimationAction` schedules the playback of an animation which is * stored in {@link AnimationClip}. */ class AnimationAction { /** * Constructs a new animation action. * * @param {AnimationMixer} mixer - The mixer that is controlled by this action. * @param {AnimationClip} clip - The animation clip that holds the actual keyframes. * @param {?Object3D} [localRoot=null] - The root object on which this action is performed. * @param {(NormalAnimationBlendMode|AdditiveAnimationBlendMode)} [blendMode] - The blend mode. */ constructor( mixer, clip, localRoot = null, blendMode = clip.blendMode ) { this._mixer = mixer; this._clip = clip; this._localRoot = localRoot; /** * Defines how the animation is blended/combined when two or more animations * are simultaneously played. * * @type {(NormalAnimationBlendMode|AdditiveAnimationBlendMode)} */ this.blendMode = blendMode; const tracks = clip.tracks, nTracks = tracks.length, interpolants = new Array( nTracks ); const interpolantSettings = { endingStart: ZeroCurvatureEnding, endingEnd: ZeroCurvatureEnding }; for ( let i = 0; i !== nTracks; ++ i ) { const interpolant = tracks[ i ].createInterpolant( null ); interpolants[ i ] = interpolant; interpolant.settings = interpolantSettings; } this._interpolantSettings = interpolantSettings; this._interpolants = interpolants; // bound by the mixer // inside: PropertyMixer (managed by the mixer) this._propertyBindings = new Array( nTracks ); this._cacheIndex = null; // for the memory manager this._byClipCacheIndex = null; // for the memory manager this._timeScaleInterpolant = null; this._weightInterpolant = null; /** * The loop mode, set via {@link AnimationAction#setLoop}. * * @type {(LoopRepeat|LoopOnce|LoopPingPong)} * @default LoopRepeat */ this.loop = LoopRepeat; this._loopCount = -1; // global mixer time when the action is to be started // it's set back to 'null' upon start of the action this._startTime = null; /** * The local time of this action (in seconds, starting with `0`). * * The value gets clamped or wrapped to `[0,clip.duration]` (according to the * loop state). * * @type {number} * @default Infinity */ this.time = 0; /** * Scaling factor for the {@link AnimationAction#time}. A value of `0` causes the * animation to pause. Negative values cause the animation to play backwards. * * @type {number} * @default 1 */ this.timeScale = 1; this._effectiveTimeScale = 1; /** * The degree of influence of this action (in the interval `[0, 1]`). Values * between `0` (no impact) and `1` (full impact) can be used to blend between * several actions. * * @type {number} * @default 1 */ this.weight = 1; this._effectiveWeight = 1; /** * The number of repetitions of the performed clip over the course of this action. * Can be set via {@link AnimationAction#setLoop}. * * Setting this number has no effect if {@link AnimationAction#loop} is set to * `THREE:LoopOnce`. * * @type {number} * @default Infinity */ this.repetitions = Infinity; /** * If set to `true`, the playback of the action is paused. * * @type {boolean} * @default false */ this.paused = false; /** * If set to `false`, the action is disabled so it has no impact. * * When the action is re-enabled, the animation continues from its current * time (setting `enabled` to `false` doesn't reset the action). * * @type {boolean} * @default true */ this.enabled = true; /** * If set to true the animation will automatically be paused on its last frame. * * If set to false, {@link AnimationAction#enabled} will automatically be switched * to `false` when the last loop of the action has finished, so that this action has * no further impact. * * Note: This member has no impact if the action is interrupted (it * has only an effect if its last loop has really finished). * * @type {boolean} * @default false */ this.clampWhenFinished = false; /** * Enables smooth interpolation without separate clips for start, loop and end. * * @type {boolean} * @default true */ this.zeroSlopeAtStart = true; /** * Enables smooth interpolation without separate clips for start, loop and end. * * @type {boolean} * @default true */ this.zeroSlopeAtEnd = true; } /** * Starts the playback of the animation. * * @return {AnimationAction} A reference to this animation action. */ play() { this._mixer._activateAction( this ); return this; } /** * Stops the playback of the animation. * * @return {AnimationAction} A reference to this animation action. */ stop() { this._mixer._deactivateAction( this ); return this.reset(); } /** * Resets the playback of the animation. * * @return {AnimationAction} A reference to this animation action. */ reset() { this.paused = false; this.enabled = true; this.time = 0; // restart clip this._loopCount = -1;// forget previous loops this._startTime = null;// forget scheduling return this.stopFading().stopWarping(); } /** * Returns `true` if the animation is running. * * @return {boolean} Whether the animation is running or not. */ isRunning() { return this.enabled && ! this.paused && this.timeScale !== 0 && this._startTime === null && this._mixer._isActiveAction( this ); } /** * Returns `true` when {@link AnimationAction#play} has been called. * * @return {boolean} Whether the animation is scheduled or not. */ isScheduled() { return this._mixer._isActiveAction( this ); } /** * Defines the time when the animation should start. * * @param {number} time - The start time in seconds. * @return {AnimationAction} A reference to this animation action. */ startAt( time ) { this._startTime = time; return this; } /** * Configures the loop settings for this action. * * @param {(LoopRepeat|LoopOnce|LoopPingPong)} mode - The loop mode. * @param {number} repetitions - The number of repetitions. * @return {AnimationAction} A reference to this animation action. */ setLoop( mode, repetitions ) { this.loop = mode; this.repetitions = repetitions; return this; } /** * Sets the effective weight of this action. * * An action has no effect and thus an effective weight of zero when the * action is disabled. * * @param {number} weight - The weight to set. * @return {AnimationAction} A reference to this animation action. */ setEffectiveWeight( weight ) { this.weight = weight; // note: same logic as when updated at runtime this._effectiveWeight = this.enabled ? weight : 0; return this.stopFading(); } /** * Returns the effective weight of this action. * * @return {number} The effective weight. */ getEffectiveWeight() { return this._effectiveWeight; } /** * Fades the animation in by increasing its weight gradually from `0` to `1`, * within the passed time interval. * * @param {number} duration - The duration of the fade. * @return {AnimationAction} A reference to this animation action. */ fadeIn( duration ) { return this._scheduleFading( duration, 0, 1 ); } /** * Fades the animation out by decreasing its weight gradually from `1` to `0`, * within the passed time interval. * * @param {number} duration - The duration of the fade. * @return {AnimationAction} A reference to this animation action. */ fadeOut( duration ) { return this._scheduleFading( duration, 1, 0 ); } /** * Causes this action to fade in and the given action to fade out, * within the passed time interval. * * @param {AnimationAction} fadeOutAction - The animation action to fade out. * @param {number} duration - The duration of the fade. * @param {boolean} [warp=false] - Whether warping should be used or not. * @return {AnimationAction} A reference to this animation action. */ crossFadeFrom( fadeOutAction, duration, warp = false ) { fadeOutAction.fadeOut( duration ); this.fadeIn( duration ); if ( warp === true ) { const fadeInDuration = this._clip.duration, fadeOutDuration = fadeOutAction._clip.duration, startEndRatio = fadeOutDuration / fadeInDuration, endStartRatio = fadeInDuration / fadeOutDuration; fadeOutAction.warp( 1.0, startEndRatio, duration ); this.warp( endStartRatio, 1.0, duration ); } return this; } /** * Causes this action to fade out and the given action to fade in, * within the passed time interval. * * @param {AnimationAction} fadeInAction - The animation action to fade in. * @param {number} duration - The duration of the fade. * @param {boolean} [warp=false] - Whether warping should be used or not. * @return {AnimationAction} A reference to this animation action. */ crossFadeTo( fadeInAction, duration, warp = false ) { return fadeInAction.crossFadeFrom( this, duration, warp ); } /** * Stops any fading which is applied to this action. * * @return {AnimationAction} A reference to this animation action. */ stopFading() { const weightInterpolant = this._weightInterpolant; if ( weightInterpolant !== null ) { this._weightInterpolant = null; this._mixer._takeBackControlInterpolant( weightInterpolant ); } return this; } /** * Sets the effective time scale of this action. * * An action has no effect and thus an effective time scale of zero when the * action is paused. * * @param {number} timeScale - The time scale to set. * @return {AnimationAction} A reference to this animation action. */ setEffectiveTimeScale( timeScale ) { this.timeScale = timeScale; this._effectiveTimeScale = this.paused ? 0 : timeScale; return this.stopWarping(); } /** * Returns the effective time scale of this action. * * @return {number} The effective time scale. */ getEffectiveTimeScale() { return this._effectiveTimeScale; } /** * Sets the duration for a single loop of this action. * * @param {number} duration - The duration to set. * @return {AnimationAction} A reference to this animation action. */ setDuration( duration ) { this.timeScale = this._clip.duration / duration; return this.stopWarping(); } /** * Synchronizes this action with the passed other action. * * @param {AnimationAction} action - The action to sync with. * @return {AnimationAction} A reference to this animation action. */ syncWith( action ) { this.time = action.time; this.timeScale = action.timeScale; return this.stopWarping(); } /** * Decelerates this animation's speed to `0` within the passed time interval. * * @param {number} duration - The duration. * @return {AnimationAction} A reference to this animation action. */ halt( duration ) { return this.warp( this._effectiveTimeScale, 0, duration ); } /** * Changes the playback speed, within the passed time interval, by modifying * {@link AnimationAction#timeScale} gradually from `startTimeScale` to * `endTimeScale`. * * @param {number} startTimeScale - The start time scale. * @param {number} endTimeScale - The end time scale. * @param {number} duration - The duration. * @return {AnimationAction} A reference to this animation action. */ warp( startTimeScale, endTimeScale, duration ) { const mixer = this._mixer, now = mixer.time, timeScale = this.timeScale; let interpolant = this._timeScaleInterpolant; if ( interpolant === null ) { interpolant = mixer._lendControlInterpolant(); this._timeScaleInterpolant = interpolant; } const times = interpolant.parameterPositions, values = interpolant.sampleValues; times[ 0 ] = now; times[ 1 ] = now + duration; values[ 0 ] = startTimeScale / timeScale; values[ 1 ] = endTimeScale / timeScale; return this; } /** * Stops any scheduled warping which is applied to this action. * * @return {AnimationAction} A reference to this animation action. */ stopWarping() { const timeScaleInterpolant = this._timeScaleInterpolant; if ( timeScaleInterpolant !== null ) { this._timeScaleInterpolant = null; this._mixer._takeBackControlInterpolant( timeScaleInterpolant ); } return this; } /** * Returns the animation mixer of this animation action. * * @return {AnimationMixer} The animation mixer. */ getMixer() { return this._mixer; } /** * Returns the animation clip of this animation action. * * @return {AnimationClip} The animation clip. */ getClip() { return this._clip; } /** * Returns the root object of this animation action. * * @return {Object3D} The root object. */ getRoot() { return this._localRoot || this._mixer._root; } // Interna _update( time, deltaTime, timeDirection, accuIndex ) { // called by the mixer if ( ! this.enabled ) { // call ._updateWeight() to update ._effectiveWeight this._updateWeight( time ); return; } const startTime = this._startTime; if ( startTime !== null ) { // check for scheduled start of action const timeRunning = ( time - startTime ) * timeDirection; if ( timeRunning < 0 || timeDirection === 0 ) { deltaTime = 0; } else { this._startTime = null; // unschedule deltaTime = timeDirection * timeRunning; } } // apply time scale and advance time deltaTime *= this._updateTimeScale( time ); const clipTime = this._updateTime( deltaTime ); // note: _updateTime may disable the action resulting in // an effective weight of 0 const weight = this._updateWeight( time ); if ( weight > 0 ) { const interpolants = this._interpolants; const propertyMixers = this._propertyBindings; switch ( this.blendMode ) { case AdditiveAnimationBlendMode: for ( let j = 0, m = interpolants.length; j !== m; ++ j ) { interpolants[ j ].evaluate( clipTime ); propertyMixers[ j ].accumulateAdditive( weight ); } break; case NormalAnimationBlendMode: default: for ( let j = 0, m = interpolants.length; j !== m; ++ j ) { interpolants[ j ].evaluate( clipTime ); propertyMixers[ j ].accumulate( accuIndex, weight ); } } } } _updateWeight( time ) { let weight = 0; if ( this.enabled ) { weight = this.weight; const interpolant = this._weightInterpolant; if ( interpolant !== null ) { const interpolantValue = interpolant.evaluate( time )[ 0 ]; weight *= interpolantValue; if ( time > interpolant.parameterPositions[ 1 ] ) { this.stopFading(); if ( interpolantValue === 0 ) { // faded out, disable this.enabled = false; } } } } this._effectiveWeight = weight; return weight; } _updateTimeScale( time ) { let timeScale = 0; if ( ! this.paused ) { timeScale = this.timeScale; const interpolant = this._timeScaleInterpolant; if ( interpolant !== null ) { const interpolantValue = interpolant.evaluate( time )[ 0 ]; timeScale *= interpolantValue; if ( time > interpolant.parameterPositions[ 1 ] ) { this.stopWarping(); if ( timeScale === 0 ) { // motion has halted, pause this.paused = true; } else { // warp done - apply final time scale this.timeScale = timeScale; } } } } this._effectiveTimeScale = timeScale; return timeScale; } _updateTime( deltaTime ) { const duration = this._clip.duration; const loop = this.loop; let time = this.time + deltaTime; let loopCount = this._loopCount; const pingPong = ( loop === LoopPingPong ); if ( deltaTime === 0 ) { if ( loopCount === -1 ) return time; return ( pingPong && ( loopCount & 1 ) === 1 ) ? duration - time : time; } if ( loop === LoopOnce ) { if ( loopCount === -1 ) { // just started this._loopCount = 0; this._setEndings( true, true, false ); } handle_stop: { if ( time >= duration ) { time = duration; } else if ( time < 0 ) { time = 0; } else { this.time = time; break handle_stop; } if ( this.clampWhenFinished ) this.paused = true; else this.enabled = false; this.time = time; this._mixer.dispatchEvent( { type: 'finished', action: this, direction: deltaTime < 0 ? -1 : 1 } ); } } else { // repetitive Repeat or PingPong if ( loopCount === -1 ) { // just started if ( deltaTime >= 0 ) { loopCount = 0; this._setEndings( true, this.repetitions === 0, pingPong ); } else { // when looping in reverse direction, the initial // transition through zero counts as a repetition, // so leave loopCount at -1 this._setEndings( this.repetitions === 0, true, pingPong ); } } if ( time >= duration || time < 0 ) { // wrap around const loopDelta = Math.floor( time / duration ); // signed time -= duration * loopDelta; loopCount += Math.abs( loopDelta ); const pending = this.repetitions - loopCount; if ( pending <= 0 ) { // have to stop (switch state, clamp time, fire event) if ( this.clampWhenFinished ) this.paused = true; else this.enabled = false; time = deltaTime > 0 ? duration : 0; this.time = time; this._mixer.dispatchEvent( { type: 'finished', action: this, direction: deltaTime > 0 ? 1 : -1 } ); } else { // keep running if ( pending === 1 ) { // entering the last round const atStart = deltaTime < 0; this._setEndings( atStart, ! atStart, pingPong ); } else { this._setEndings( false, false, pingPong ); } this._loopCount = loopCount; this.time = time; this._mixer.dispatchEvent( { type: 'loop', action: this, loopDelta: loopDelta } ); } } else { this.time = time; } if ( pingPong && ( loopCount & 1 ) === 1 ) { // invert time for the "pong round" return duration - time; } } return time; } _setEndings( atStart, atEnd, pingPong ) { const settings = this._interpolantSettings; if ( pingPong ) { settings.endingStart = ZeroSlopeEnding; settings.endingEnd = ZeroSlopeEnding; } else { // assuming for LoopOnce atStart == atEnd == true if ( atStart ) { settings.endingStart = this.zeroSlopeAtStart ? ZeroSlopeEnding : ZeroCurvatureEnding; } else { settings.endingStart = WrapAroundEnding; } if ( atEnd ) { settings.endingEnd = this.zeroSlopeAtEnd ? ZeroSlopeEnding : ZeroCurvatureEnding; } else { settings.endingEnd = WrapAroundEnding; } } } _scheduleFading( duration, weightNow, weightThen ) { const mixer = this._mixer, now = mixer.time; let interpolant = this._weightInterpolant; if ( interpolant === null ) { interpolant = mixer._lendControlInterpolant(); this._weightInterpolant = interpolant; } const times = interpolant.parameterPositions, values = interpolant.sampleValues; times[ 0 ] = now; values[ 0 ] = weightNow; times[ 1 ] = now + duration; values[ 1 ] = weightThen; return this; } } const _controlInterpolantsResultBuffer = new Float32Array( 1 ); /** * `AnimationMixer` is a player for animations on a particular object in * the scene. When multiple objects in the scene are animated independently, * one `AnimationMixer` may be used for each object. */ class AnimationMixer extends EventDispatcher { /** * Constructs a new animation mixer. * * @param {Object3D} root - The object whose animations shall be played by this mixer. */ constructor( root ) { super(); this._root = root; this._initMemoryManager(); this._accuIndex = 0; /** * The global mixer time (in seconds; starting with `0` on the mixer's creation). * * @type {number} * @default 0 */ this.time = 0; /** * A scaling factor for the global time. * * Note: Setting this member to `0` and later back to `1` is a * possibility to pause/unpause all actions that are controlled by this * mixer. * * @type {number} * @default 1 */ this.timeScale = 1.0; } _bindAction( action, prototypeAction ) { const root = action._localRoot || this._root, tracks = action._clip.tracks, nTracks = tracks.length, bindings = action._propertyBindings, interpolants = action._interpolants, rootUuid = root.uuid, bindingsByRoot = this._bindingsByRootAndName; let bindingsByName = bindingsByRoot[ rootUuid ]; if ( bindingsByName === undefined ) { bindingsByName = {}; bindingsByRoot[ rootUuid ] = bindingsByName; } for ( let i = 0; i !== nTracks; ++ i ) { const track = tracks[ i ], trackName = track.name; let binding = bindingsByName[ trackName ]; if ( binding !== undefined ) { ++ binding.referenceCount; bindings[ i ] = binding; } else { binding = bindings[ i ]; if ( binding !== undefined ) { // existing binding, make sure the cache knows if ( binding._cacheIndex === null ) { ++ binding.referenceCount; this._addInactiveBinding( binding, rootUuid, trackName ); } continue; } const path = prototypeAction && prototypeAction. _propertyBindings[ i ].binding.parsedPath; binding = new PropertyMixer( PropertyBinding.create( root, trackName, path ), track.ValueTypeName, track.getValueSize() ); ++ binding.referenceCount; this._addInactiveBinding( binding, rootUuid, trackName ); bindings[ i ] = binding; } interpolants[ i ].resultBuffer = binding.buffer; } } _activateAction( action ) { if ( ! this._isActiveAction( action ) ) { if ( action._cacheIndex === null ) { // this action has been forgotten by the cache, but the user // appears to be still using it -> rebind const rootUuid = ( action._localRoot || this._root ).uuid, clipUuid = action._clip.uuid, actionsForClip = this._actionsByClip[ clipUuid ]; this._bindAction( action, actionsForClip && actionsForClip.knownActions[ 0 ] ); this._addInactiveAction( action, clipUuid, rootUuid ); } const bindings = action._propertyBindings; // increment reference counts / sort out state for ( let i = 0, n = bindings.length; i !== n; ++ i ) { const binding = bindings[ i ]; if ( binding.useCount ++ === 0 ) { this._lendBinding( binding ); binding.saveOriginalState(); } } this._lendAction( action ); } } _deactivateAction( action ) { if ( this._isActiveAction( action ) ) { const bindings = action._propertyBindings; // decrement reference counts / sort out state for ( let i = 0, n = bindings.length; i !== n; ++ i ) { const binding = bindings[ i ]; if ( -- binding.useCount === 0 ) { binding.restoreOriginalState(); this._takeBackBinding( binding ); } } this._takeBackAction( action ); } } // Memory manager _initMemoryManager() { this._actions = []; // 'nActiveActions' followed by inactive ones this._nActiveActions = 0; this._actionsByClip = {}; // inside: // { // knownActions: Array< AnimationAction > - used as prototypes // actionByRoot: AnimationAction - lookup // } this._bindings = []; // 'nActiveBindings' followed by inactive ones this._nActiveBindings = 0; this._bindingsByRootAndName = {}; // inside: Map< name, PropertyMixer > this._controlInterpolants = []; // same game as above this._nActiveControlInterpolants = 0; const scope = this; this.stats = { actions: { get total() { return scope._actions.length; }, get inUse() { return scope._nActiveActions; } }, bindings: { get total() { return scope._bindings.length; }, get inUse() { return scope._nActiveBindings; } }, controlInterpolants: { get total() { return scope._controlInterpolants.length; }, get inUse() { return scope._nActiveControlInterpolants; } } }; } // Memory management for AnimationAction objects _isActiveAction( action ) { const index = action._cacheIndex; return index !== null && index < this._nActiveActions; } _addInactiveAction( action, clipUuid, rootUuid ) { const actions = this._actions, actionsByClip = this._actionsByClip; let actionsForClip = actionsByClip[ clipUuid ]; if ( actionsForClip === undefined ) { actionsForClip = { knownActions: [ action ], actionByRoot: {} }; action._byClipCacheIndex = 0; actionsByClip[ clipUuid ] = actionsForClip; } else { const knownActions = actionsForClip.knownActions; action._byClipCacheIndex = knownActions.length; knownActions.push( action ); } action._cacheIndex = actions.length; actions.push( action ); actionsForClip.actionByRoot[ rootUuid ] = action; } _removeInactiveAction( action ) { const actions = this._actions, lastInactiveAction = actions[ actions.length - 1 ], cacheIndex = action._cacheIndex; lastInactiveAction._cacheIndex = cacheIndex; actions[ cacheIndex ] = lastInactiveAction; actions.pop(); action._cacheIndex = null; const clipUuid = action._clip.uuid, actionsByClip = this._actionsByClip, actionsForClip = actionsByClip[ clipUuid ], knownActionsForClip = actionsForClip.knownActions, lastKnownAction = knownActionsForClip[ knownActionsForClip.length - 1 ], byClipCacheIndex = action._byClipCacheIndex; lastKnownAction._byClipCacheIndex = byClipCacheIndex; knownActionsForClip[ byClipCacheIndex ] = lastKnownAction; knownActionsForClip.pop(); action._byClipCacheIndex = null; const actionByRoot = actionsForClip.actionByRoot, rootUuid = ( action._localRoot || this._root ).uuid; delete actionByRoot[ rootUuid ]; if ( knownActionsForClip.length === 0 ) { delete actionsByClip[ clipUuid ]; } this._removeInactiveBindingsForAction( action ); } _removeInactiveBindingsForAction( action ) { const bindings = action._propertyBindings; for ( let i = 0, n = bindings.length; i !== n; ++ i ) { const binding = bindings[ i ]; if ( -- binding.referenceCount === 0 ) { this._removeInactiveBinding( binding ); } } } _lendAction( action ) { // [ active actions | inactive actions ] // [ active actions >| inactive actions ] // s a // <-swap-> // a s const actions = this._actions, prevIndex = action._cacheIndex, lastActiveIndex = this._nActiveActions ++, firstInactiveAction = actions[ lastActiveIndex ]; action._cacheIndex = lastActiveIndex; actions[ lastActiveIndex ] = action; firstInactiveAction._cacheIndex = prevIndex; actions[ prevIndex ] = firstInactiveAction; } _takeBackAction( action ) { // [ active actions | inactive actions ] // [ active actions |< inactive actions ] // a s // <-swap-> // s a const actions = this._actions, prevIndex = action._cacheIndex, firstInactiveIndex = -- this._nActiveActions, lastActiveAction = actions[ firstInactiveIndex ]; action._cacheIndex = firstInactiveIndex; actions[ firstInactiveIndex ] = action; lastActiveAction._cacheIndex = prevIndex; actions[ prevIndex ] = lastActiveAction; } // Memory management for PropertyMixer objects _addInactiveBinding( binding, rootUuid, trackName ) { const bindingsByRoot = this._bindingsByRootAndName, bindings = this._bindings; let bindingByName = bindingsByRoot[ rootUuid ]; if ( bindingByName === undefined ) { bindingByName = {}; bindingsByRoot[ rootUuid ] = bindingByName; } bindingByName[ trackName ] = binding; binding._cacheIndex = bindings.length; bindings.push( binding ); } _removeInactiveBinding( binding ) { const bindings = this._bindings, propBinding = binding.binding, rootUuid = propBinding.rootNode.uuid, trackName = propBinding.path, bindingsByRoot = this._bindingsByRootAndName, bindingByName = bindingsByRoot[ rootUuid ], lastInactiveBinding = bindings[ bindings.length - 1 ], cacheIndex = binding._cacheIndex; lastInactiveBinding._cacheIndex = cacheIndex; bindings[ cacheIndex ] = lastInactiveBinding; bindings.pop(); delete bindingByName[ trackName ]; if ( Object.keys( bindingByName ).length === 0 ) { delete bindingsByRoot[ rootUuid ]; } } _lendBinding( binding ) { const bindings = this._bindings, prevIndex = binding._cacheIndex, lastActiveIndex = this._nActiveBindings ++, firstInactiveBinding = bindings[ lastActiveIndex ]; binding._cacheIndex = lastActiveIndex; bindings[ lastActiveIndex ] = binding; firstInactiveBinding._cacheIndex = prevIndex; bindings[ prevIndex ] = firstInactiveBinding; } _takeBackBinding( binding ) { const bindings = this._bindings, prevIndex = binding._cacheIndex, firstInactiveIndex = -- this._nActiveBindings, lastActiveBinding = bindings[ firstInactiveIndex ]; binding._cacheIndex = firstInactiveIndex; bindings[ firstInactiveIndex ] = binding; lastActiveBinding._cacheIndex = prevIndex; bindings[ prevIndex ] = lastActiveBinding; } // Memory management of Interpolants for weight and time scale _lendControlInterpolant() { const interpolants = this._controlInterpolants, lastActiveIndex = this._nActiveControlInterpolants ++; let interpolant = interpolants[ lastActiveIndex ]; if ( interpolant === undefined ) { interpolant = new LinearInterpolant( new Float32Array( 2 ), new Float32Array( 2 ), 1, _controlInterpolantsResultBuffer ); interpolant.__cacheIndex = lastActiveIndex; interpolants[ lastActiveIndex ] = interpolant; } return interpolant; } _takeBackControlInterpolant( interpolant ) { const interpolants = this._controlInterpolants, prevIndex = interpolant.__cacheIndex, firstInactiveIndex = -- this._nActiveControlInterpolants, lastActiveInterpolant = interpolants[ firstInactiveIndex ]; interpolant.__cacheIndex = firstInactiveIndex; interpolants[ firstInactiveIndex ] = interpolant; lastActiveInterpolant.__cacheIndex = prevIndex; interpolants[ prevIndex ] = lastActiveInterpolant; } /** * Returns an instance of {@link AnimationAction} for the passed clip. * * If an action fitting the clip and root parameters doesn't yet exist, it * will be created by this method. Calling this method several times with the * same clip and root parameters always returns the same action. * * @param {AnimationClip|string} clip - An animation clip or alternatively the name of the animation clip. * @param {Object3D} [optionalRoot] - An alternative root object. * @param {(NormalAnimationBlendMode|AdditiveAnimationBlendMode)} [blendMode] - The blend mode. * @return {?AnimationAction} The animation action. */ clipAction( clip, optionalRoot, blendMode ) { const root = optionalRoot || this._root, rootUuid = root.uuid; let clipObject = typeof clip === 'string' ? AnimationClip.findByName( root, clip ) : clip; const clipUuid = clipObject !== null ? clipObject.uuid : clip; const actionsForClip = this._actionsByClip[ clipUuid ]; let prototypeAction = null; if ( blendMode === undefined ) { if ( clipObject !== null ) { blendMode = clipObject.blendMode; } else { blendMode = NormalAnimationBlendMode; } } if ( actionsForClip !== undefined ) { const existingAction = actionsForClip.actionByRoot[ rootUuid ]; if ( existingAction !== undefined && existingAction.blendMode === blendMode ) { return existingAction; } // we know the clip, so we don't have to parse all // the bindings again but can just copy prototypeAction = actionsForClip.knownActions[ 0 ]; // also, take the clip from the prototype action if ( clipObject === null ) clipObject = prototypeAction._clip; } // clip must be known when specified via string if ( clipObject === null ) return null; // allocate all resources required to run it const newAction = new AnimationAction( this, clipObject, optionalRoot, blendMode ); this._bindAction( newAction, prototypeAction ); // and make the action known to the memory manager this._addInactiveAction( newAction, clipUuid, rootUuid ); return newAction; } /** * Returns an existing animation action for the passed clip. * * @param {AnimationClip|string} clip - An animation clip or alternatively the name of the animation clip. * @param {Object3D} [optionalRoot] - An alternative root object. * @return {?AnimationAction} The animation action. Returns `null` if no action was found. */ existingAction( clip, optionalRoot ) { const root = optionalRoot || this._root, rootUuid = root.uuid, clipObject = typeof clip === 'string' ? AnimationClip.findByName( root, clip ) : clip, clipUuid = clipObject ? clipObject.uuid : clip, actionsForClip = this._actionsByClip[ clipUuid ]; if ( actionsForClip !== undefined ) { return actionsForClip.actionByRoot[ rootUuid ] || null; } return null; } /** * Deactivates all previously scheduled actions on this mixer. * * @return {AnimationMixer} A reference to thi animation mixer. */ stopAllAction() { const actions = this._actions, nActions = this._nActiveActions; for ( let i = nActions - 1; i >= 0; -- i ) { actions[ i ].stop(); } return this; } /** * Advances the global mixer time and updates the animation. * * This is usually done in the render loop by passing the delta * time from {@link Clock} or {@link Timer}. * * @param {number} deltaTime - The delta time in seconds. * @return {AnimationMixer} A reference to thi animation mixer. */ update( deltaTime ) { deltaTime *= this.timeScale; const actions = this._actions, nActions = this._nActiveActions, time = this.time += deltaTime, timeDirection = Math.sign( deltaTime ), accuIndex = this._accuIndex ^= 1; // run active actions for ( let i = 0; i !== nActions; ++ i ) { const action = actions[ i ]; action._update( time, deltaTime, timeDirection, accuIndex ); } // update scene graph const bindings = this._bindings, nBindings = this._nActiveBindings; for ( let i = 0; i !== nBindings; ++ i ) { bindings[ i ].apply( accuIndex ); } return this; } /** * Sets the global mixer to a specific time and updates the animation accordingly. * * This is useful when you need to jump to an exact time in an animation. The * input parameter will be scaled by {@link AnimationMixer#timeScale} * * @param {number} time - The time to set in seconds. * @return {AnimationMixer} A reference to thi animation mixer. */ setTime( time ) { this.time = 0; // Zero out time attribute for AnimationMixer object; for ( let i = 0; i < this._actions.length; i ++ ) { this._actions[ i ].time = 0; // Zero out time attribute for all associated AnimationAction objects. } return this.update( time ); // Update used to set exact time. Returns "this" AnimationMixer object. } /** * Returns this mixer's root object. * * @return {Object3D} The mixer's root object. */ getRoot() { return this._root; } /** * Deallocates all memory resources for a clip. Before using this method make * sure to call {@link AnimationAction#stop} for all related actions. * * @param {AnimationClip} clip - The clip to uncache. */ uncacheClip( clip ) { const actions = this._actions, clipUuid = clip.uuid, actionsByClip = this._actionsByClip, actionsForClip = actionsByClip[ clipUuid ]; if ( actionsForClip !== undefined ) { // note: just calling _removeInactiveAction would mess up the // iteration state and also require updating the state we can // just throw away const actionsToRemove = actionsForClip.knownActions; for ( let i = 0, n = actionsToRemove.length; i !== n; ++ i ) { const action = actionsToRemove[ i ]; this._deactivateAction( action ); const cacheIndex = action._cacheIndex, lastInactiveAction = actions[ actions.length - 1 ]; action._cacheIndex = null; action._byClipCacheIndex = null; lastInactiveAction._cacheIndex = cacheIndex; actions[ cacheIndex ] = lastInactiveAction; actions.pop(); this._removeInactiveBindingsForAction( action ); } delete actionsByClip[ clipUuid ]; } } /** * Deallocates all memory resources for a root object. Before using this * method make sure to call {@link AnimationAction#stop} for all related * actions or alternatively {@link AnimationMixer#stopAllAction} when the * mixer operates on a single root. * * @param {Object3D} root - The root object to uncache. */ uncacheRoot( root ) { const rootUuid = root.uuid, actionsByClip = this._actionsByClip; for ( const clipUuid in actionsByClip ) { const actionByRoot = actionsByClip[ clipUuid ].actionByRoot, action = actionByRoot[ rootUuid ]; if ( action !== undefined ) { this._deactivateAction( action ); this._removeInactiveAction( action ); } } const bindingsByRoot = this._bindingsByRootAndName, bindingByName = bindingsByRoot[ rootUuid ]; if ( bindingByName !== undefined ) { for ( const trackName in bindingByName ) { const binding = bindingByName[ trackName ]; binding.restoreOriginalState(); this._removeInactiveBinding( binding ); } } } /** * Deallocates all memory resources for an action. The action is identified by the * given clip and an optional root object. Before using this method make * sure to call {@link AnimationAction#stop} to deactivate the action. * * @param {AnimationClip|string} clip - An animation clip or alternatively the name of the animation clip. * @param {Object3D} [optionalRoot] - An alternative root object. */ uncacheAction( clip, optionalRoot ) { const action = this.existingAction( clip, optionalRoot ); if ( action !== null ) { this._deactivateAction( action ); this._removeInactiveAction( action ); } } } /** * Represents a 3D render target. * * @augments RenderTarget */ class RenderTarget3D extends RenderTarget { /** * Constructs a new 3D render target. * * @param {number} [width=1] - The width of the render target. * @param {number} [height=1] - The height of the render target. * @param {number} [depth=1] - The height of the render target. * @param {RenderTarget~Options} [options] - The configuration object. */ constructor( width = 1, height = 1, depth = 1, options = {} ) { super( width, height, options ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isRenderTarget3D = true; this.depth = depth; /** * Overwritten with a different texture type. * * @type {Data3DTexture} */ this.texture = new Data3DTexture( null, width, height, depth ); this.texture.isRenderTargetTexture = true; } } /** * Represents an array render target. * * @augments RenderTarget */ class RenderTargetArray extends RenderTarget { /** * Constructs a new 3D render target. * * @param {number} [width=1] - The width of the render target. * @param {number} [height=1] - The height of the render target. * @param {number} [depth=1] - The height of the render target. * @param {RenderTarget~Options} [options] - The configuration object. */ constructor( width = 1, height = 1, depth = 1, options = {} ) { super( width, height, options ); this.isRenderTargetArray = true; this.depth = depth; /** * Overwritten with a different texture type. * * @type {DataArrayTexture} */ this.texture = new DataArrayTexture( null, width, height, depth ); this.texture.isRenderTargetTexture = true; } } /** * Represents a uniform which is a global shader variable. They are passed to shader programs. * * When declaring a uniform of a {@link ShaderMaterial}, it is declared by value or by object. * ```js * uniforms: { * time: { value: 1.0 }, * resolution: new Uniform( new Vector2() ) * }; * ``` * Since this class can only be used in context of {@link ShaderMaterial}, it is only supported * in {@link WebGLRenderer}. */ class Uniform { /** * Constructs a new uniform. * * @param {any} value - The uniform value. */ constructor( value ) { /** * The uniform value. * * @type {any} */ this.value = value; } /** * Returns a new uniform with copied values from this instance. * If the value has a `clone()` method, the value is cloned as well. * * @return {Uniform} A clone of this instance. */ clone() { return new Uniform( this.value.clone === undefined ? this.value : this.value.clone() ); } } let _id$1 = 0; /** * A class for managing multiple uniforms in a single group. The renderer will process * such a definition as a single UBO. * * Since this class can only be used in context of {@link ShaderMaterial}, it is only supported * in {@link WebGLRenderer}. * * @augments EventDispatcher */ class UniformsGroup extends EventDispatcher { /** * Constructs a new uniforms group. */ constructor() { super(); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isUniformsGroup = true; /** * The ID of the 3D object. * * @name UniformsGroup#id * @type {number} * @readonly */ Object.defineProperty( this, 'id', { value: _id$1 ++ } ); /** * The name of the uniforms group. * * @type {string} */ this.name = ''; /** * The buffer usage. * * @type {(StaticDrawUsage|DynamicDrawUsage|StreamDrawUsage|StaticReadUsage|DynamicReadUsage|StreamReadUsage|StaticCopyUsage|DynamicCopyUsage|StreamCopyUsage)} * @default StaticDrawUsage */ this.usage = StaticDrawUsage; /** * An array holding the uniforms. * * @type {Array} */ this.uniforms = []; } /** * Adds the given uniform to this uniforms group. * * @param {Uniform} uniform - The uniform to add. * @return {UniformsGroup} A reference to this uniforms group. */ add( uniform ) { this.uniforms.push( uniform ); return this; } /** * Removes the given uniform from this uniforms group. * * @param {Uniform} uniform - The uniform to remove. * @return {UniformsGroup} A reference to this uniforms group. */ remove( uniform ) { const index = this.uniforms.indexOf( uniform ); if ( index !== -1 ) this.uniforms.splice( index, 1 ); return this; } /** * Sets the name of this uniforms group. * * @param {string} name - The name to set. * @return {UniformsGroup} A reference to this uniforms group. */ setName( name ) { this.name = name; return this; } /** * Sets the usage of this uniforms group. * * @param {(StaticDrawUsage|DynamicDrawUsage|StreamDrawUsage|StaticReadUsage|DynamicReadUsage|StreamReadUsage|StaticCopyUsage|DynamicCopyUsage|StreamCopyUsage)} value - The usage to set. * @return {UniformsGroup} A reference to this uniforms group. */ setUsage( value ) { this.usage = value; return this; } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. * * @fires Texture#dispose */ dispose() { this.dispatchEvent( { type: 'dispose' } ); } /** * Copies the values of the given uniforms group to this instance. * * @param {UniformsGroup} source - The uniforms group to copy. * @return {UniformsGroup} A reference to this uniforms group. */ copy( source ) { this.name = source.name; this.usage = source.usage; const uniformsSource = source.uniforms; this.uniforms.length = 0; for ( let i = 0, l = uniformsSource.length; i < l; i ++ ) { const uniforms = Array.isArray( uniformsSource[ i ] ) ? uniformsSource[ i ] : [ uniformsSource[ i ] ]; for ( let j = 0; j < uniforms.length; j ++ ) { this.uniforms.push( uniforms[ j ].clone() ); } } return this; } /** * Returns a new uniforms group with copied values from this instance. * * @return {UniformsGroup} A clone of this instance. */ clone() { return new this.constructor().copy( this ); } } /** * An instanced version of an interleaved buffer. * * @augments InterleavedBuffer */ class InstancedInterleavedBuffer extends InterleavedBuffer { /** * Constructs a new instanced interleaved buffer. * * @param {TypedArray} array - A typed array with a shared buffer storing attribute data. * @param {number} stride - The number of typed-array elements per vertex. * @param {number} [meshPerAttribute=1] - Defines how often a value of this interleaved buffer should be repeated. */ constructor( array, stride, meshPerAttribute = 1 ) { super( array, stride ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isInstancedInterleavedBuffer = true; /** * Defines how often a value of this buffer attribute should be repeated, * see {@link InstancedBufferAttribute#meshPerAttribute}. * * @type {number} * @default 1 */ this.meshPerAttribute = meshPerAttribute; } copy( source ) { super.copy( source ); this.meshPerAttribute = source.meshPerAttribute; return this; } clone( data ) { const ib = super.clone( data ); ib.meshPerAttribute = this.meshPerAttribute; return ib; } toJSON( data ) { const json = super.toJSON( data ); json.isInstancedInterleavedBuffer = true; json.meshPerAttribute = this.meshPerAttribute; return json; } } /** * An alternative version of a buffer attribute with more control over the VBO. * * The renderer does not construct a VBO for this kind of attribute. Instead, it uses * whatever VBO is passed in constructor and can later be altered via the `buffer` property. * * The most common use case for this class is when some kind of GPGPU calculation interferes * or even produces the VBOs in question. * * Notice that this class can only be used with {@link WebGLRenderer}. */ class GLBufferAttribute { /** * Constructs a new GL buffer attribute. * * @param {WebGLBuffer} buffer - The native WebGL buffer. * @param {number} type - The native data type (e.g. `gl.FLOAT`). * @param {number} itemSize - The item size. * @param {number} elementSize - The corresponding size (in bytes) for the given `type` parameter. * @param {number} count - The expected number of vertices in VBO. */ constructor( buffer, type, itemSize, elementSize, count ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isGLBufferAttribute = true; /** * The name of the buffer attribute. * * @type {string} */ this.name = ''; /** * The native WebGL buffer. * * @type {WebGLBuffer} */ this.buffer = buffer; /** * The native data type. * * @type {number} */ this.type = type; /** * The item size, see {@link BufferAttribute#itemSize}. * * @type {number} */ this.itemSize = itemSize; /** * The corresponding size (in bytes) for the given `type` parameter. * * @type {number} */ this.elementSize = elementSize; /** * The expected number of vertices in VBO. * * @type {number} */ this.count = count; /** * A version number, incremented every time the `needsUpdate` is set to `true`. * * @type {number} */ this.version = 0; } /** * Flag to indicate that this attribute has changed and should be re-sent to * the GPU. Set this to `true` when you modify the value of the array. * * @type {number} * @default false * @param {boolean} value */ set needsUpdate( value ) { if ( value === true ) this.version ++; } /** * Sets the given native WebGL buffer. * * @param {WebGLBuffer} buffer - The buffer to set. * @return {BufferAttribute} A reference to this instance. */ setBuffer( buffer ) { this.buffer = buffer; return this; } /** * Sets the given native data type and element size. * * @param {number} type - The native data type (e.g. `gl.FLOAT`). * @param {number} elementSize - The corresponding size (in bytes) for the given `type` parameter. * @return {BufferAttribute} A reference to this instance. */ setType( type, elementSize ) { this.type = type; this.elementSize = elementSize; return this; } /** * Sets the item size. * * @param {number} itemSize - The item size. * @return {BufferAttribute} A reference to this instance. */ setItemSize( itemSize ) { this.itemSize = itemSize; return this; } /** * Sets the count (the expected number of vertices in VBO). * * @param {number} count - The count. * @return {BufferAttribute} A reference to this instance. */ setCount( count ) { this.count = count; return this; } } const _matrix = /*@__PURE__*/ new Matrix4(); /** * This class is designed to assist with raycasting. Raycasting is used for * mouse picking (working out what objects in the 3d space the mouse is over) * amongst other things. */ class Raycaster { /** * Constructs a new raycaster. * * @param {Vector3} origin - The origin vector where the ray casts from. * @param {Vector3} direction - The (normalized) direction vector that gives direction to the ray. * @param {number} [near=0] - All results returned are further away than near. Near can't be negative. * @param {number} [far=Infinity] - All results returned are closer than far. Far can't be lower than near. */ constructor( origin, direction, near = 0, far = Infinity ) { /** * The ray used for raycasting. * * @type {Ray} */ this.ray = new Ray( origin, direction ); /** * All results returned are further away than near. Near can't be negative. * * @type {number} * @default 0 */ this.near = near; /** * All results returned are further away than near. Near can't be negative. * * @type {number} * @default Infinity */ this.far = far; /** * The camera to use when raycasting against view-dependent objects such as * billboarded objects like sprites. This field can be set manually or * is set when calling `setFromCamera()`. * * @type {?Camera} * @default null */ this.camera = null; /** * Allows to selectively ignore 3D objects when performing intersection tests. * The following code example ensures that only 3D objects on layer `1` will be * honored by raycaster. * ```js * raycaster.layers.set( 1 ); * object.layers.enable( 1 ); * ``` * * @type {Layers} */ this.layers = new Layers(); /** * A parameter object that configures the raycasting. It has the structure: * * ``` * { * Mesh: {}, * Line: { threshold: 1 }, * LOD: {}, * Points: { threshold: 1 }, * Sprite: {} * } * ``` * Where `threshold` is the precision of the raycaster when intersecting objects, in world units. * * @type {Object} */ this.params = { Mesh: {}, Line: { threshold: 1 }, LOD: {}, Points: { threshold: 1 }, Sprite: {} }; } /** * Updates the ray with a new origin and direction by copying the values from the arguments. * * @param {Vector3} origin - The origin vector where the ray casts from. * @param {Vector3} direction - The (normalized) direction vector that gives direction to the ray. */ set( origin, direction ) { // direction is assumed to be normalized (for accurate distance calculations) this.ray.set( origin, direction ); } /** * Uses the given coordinates and camera to compute a new origin and direction for the internal ray. * * @param {Vector2} coords - 2D coordinates of the mouse, in normalized device coordinates (NDC). * X and Y components should be between `-1` and `1`. * @param {Camera} camera - The camera from which the ray should originate. */ setFromCamera( coords, camera ) { if ( camera.isPerspectiveCamera ) { this.ray.origin.setFromMatrixPosition( camera.matrixWorld ); this.ray.direction.set( coords.x, coords.y, 0.5 ).unproject( camera ).sub( this.ray.origin ).normalize(); this.camera = camera; } else if ( camera.isOrthographicCamera ) { this.ray.origin.set( coords.x, coords.y, ( camera.near + camera.far ) / ( camera.near - camera.far ) ).unproject( camera ); // set origin in plane of camera this.ray.direction.set( 0, 0, -1 ).transformDirection( camera.matrixWorld ); this.camera = camera; } else { console.error( 'THREE.Raycaster: Unsupported camera type: ' + camera.type ); } } /** * Uses the given WebXR controller to compute a new origin and direction for the internal ray. * * @param {WebXRController} controller - The controller to copy the position and direction from. * @return {Raycaster} A reference to this raycaster. */ setFromXRController( controller ) { _matrix.identity().extractRotation( controller.matrixWorld ); this.ray.origin.setFromMatrixPosition( controller.matrixWorld ); this.ray.direction.set( 0, 0, -1 ).applyMatrix4( _matrix ); return this; } /** * The intersection point of a raycaster intersection test. * @typedef {Object} Raycaster~Intersection * @property {number} distance - The distance from the ray's origin to the intersection point. * @property {number} distanceToRay - Some 3D objects e.g. {@link Points} provide the distance of the * intersection to the nearest point on the ray. For other objects it will be `undefined`. * @property {Vector3} point - The intersection point, in world coordinates. * @property {Object} face - The face that has been intersected. * @property {number} faceIndex - The face index. * @property {Object3D} object - The 3D object that has been intersected. * @property {Vector2} uv - U,V coordinates at point of intersection. * @property {Vector2} uv1 - Second set of U,V coordinates at point of intersection. * @property {Vector3} uv1 - Interpolated normal vector at point of intersection. * @property {number} instanceId - The index number of the instance where the ray * intersects the {@link InstancedMesh}. */ /** * Checks all intersection between the ray and the object with or without the * descendants. Intersections are returned sorted by distance, closest first. * * `Raycaster` delegates to the `raycast()` method of the passed 3D object, when * evaluating whether the ray intersects the object or not. This allows meshes to respond * differently to ray casting than lines or points. * * Note that for meshes, faces must be pointed towards the origin of the ray in order * to be detected; intersections of the ray passing through the back of a face will not * be detected. To raycast against both faces of an object, you'll want to set {@link Material#side} * to `THREE.DoubleSide`. * * @param {Object3D} object - The 3D object to check for intersection with the ray. * @param {boolean} [recursive=true] - If set to `true`, it also checks all descendants. * Otherwise it only checks intersection with the object. * @param {Array} [intersects=[]] The target array that holds the result of the method. * @return {Array} An array holding the intersection points. */ intersectObject( object, recursive = true, intersects = [] ) { intersect( object, this, intersects, recursive ); intersects.sort( ascSort ); return intersects; } /** * Checks all intersection between the ray and the objects with or without * the descendants. Intersections are returned sorted by distance, closest first. * * @param {Array} objects - The 3D objects to check for intersection with the ray. * @param {boolean} [recursive=true] - If set to `true`, it also checks all descendants. * Otherwise it only checks intersection with the object. * @param {Array} [intersects=[]] The target array that holds the result of the method. * @return {Array} An array holding the intersection points. */ intersectObjects( objects, recursive = true, intersects = [] ) { for ( let i = 0, l = objects.length; i < l; i ++ ) { intersect( objects[ i ], this, intersects, recursive ); } intersects.sort( ascSort ); return intersects; } } function ascSort( a, b ) { return a.distance - b.distance; } function intersect( object, raycaster, intersects, recursive ) { let propagate = true; if ( object.layers.test( raycaster.layers ) ) { const result = object.raycast( raycaster, intersects ); if ( result === false ) propagate = false; } if ( propagate === true && recursive === true ) { const children = object.children; for ( let i = 0, l = children.length; i < l; i ++ ) { intersect( children[ i ], raycaster, intersects, true ); } } } /** * This class can be used to represent points in 3D space as * [Spherical coordinates]{@link https://en.wikipedia.org/wiki/Spherical_coordinate_system}. */ class Spherical { /** * Constructs a new spherical. * * @param {number} [radius=1] - The radius, or the Euclidean distance (straight-line distance) from the point to the origin. * @param {number} [phi=0] - The polar angle in radians from the y (up) axis. * @param {number} [theta=0] - The equator/azimuthal angle in radians around the y (up) axis. */ constructor( radius = 1, phi = 0, theta = 0 ) { /** * The radius, or the Euclidean distance (straight-line distance) from the point to the origin. * * @type {number} * @default 1 */ this.radius = radius; /** * The polar angle in radians from the y (up) axis. * * @type {number} * @default 0 */ this.phi = phi; /** * The equator/azimuthal angle in radians around the y (up) axis. * * @type {number} * @default 0 */ this.theta = theta; } /** * Sets the spherical components by copying the given values. * * @param {number} radius - The radius. * @param {number} phi - The polar angle. * @param {number} theta - The azimuthal angle. * @return {Spherical} A reference to this spherical. */ set( radius, phi, theta ) { this.radius = radius; this.phi = phi; this.theta = theta; return this; } /** * Copies the values of the given spherical to this instance. * * @param {Spherical} other - The spherical to copy. * @return {Spherical} A reference to this spherical. */ copy( other ) { this.radius = other.radius; this.phi = other.phi; this.theta = other.theta; return this; } /** * Restricts the polar angle [page:.phi phi] to be between `0.000001` and pi - * `0.000001`. * * @return {Spherical} A reference to this spherical. */ makeSafe() { const EPS = 0.000001; this.phi = clamp( this.phi, EPS, Math.PI - EPS ); return this; } /** * Sets the spherical components from the given vector which is assumed to hold * Cartesian coordinates. * * @param {Vector3} v - The vector to set. * @return {Spherical} A reference to this spherical. */ setFromVector3( v ) { return this.setFromCartesianCoords( v.x, v.y, v.z ); } /** * Sets the spherical components from the given Cartesian coordinates. * * @param {number} x - The x value. * @param {number} y - The x value. * @param {number} z - The x value. * @return {Spherical} A reference to this spherical. */ setFromCartesianCoords( x, y, z ) { this.radius = Math.sqrt( x * x + y * y + z * z ); if ( this.radius === 0 ) { this.theta = 0; this.phi = 0; } else { this.theta = Math.atan2( x, z ); this.phi = Math.acos( clamp( y / this.radius, -1, 1 ) ); } return this; } /** * Returns a new spherical with copied values from this instance. * * @return {Spherical} A clone of this instance. */ clone() { return new this.constructor().copy( this ); } } /** * This class can be used to represent points in 3D space as * [Cylindrical coordinates]{@link https://en.wikipedia.org/wiki/Cylindrical_coordinate_system}. */ class Cylindrical { /** * Constructs a new cylindrical. * * @param {number} [radius=1] - The distance from the origin to a point in the x-z plane. * @param {number} [theta=0] - A counterclockwise angle in the x-z plane measured in radians from the positive z-axis. * @param {number} [y=0] - The height above the x-z plane. */ constructor( radius = 1, theta = 0, y = 0 ) { /** * The distance from the origin to a point in the x-z plane. * * @type {number} * @default 1 */ this.radius = radius; /** * A counterclockwise angle in the x-z plane measured in radians from the positive z-axis. * * @type {number} * @default 0 */ this.theta = theta; /** * The height above the x-z plane. * * @type {number} * @default 0 */ this.y = y; } /** * Sets the cylindrical components by copying the given values. * * @param {number} radius - The radius. * @param {number} theta - The theta angle. * @param {number} y - The height value. * @return {Cylindrical} A reference to this cylindrical. */ set( radius, theta, y ) { this.radius = radius; this.theta = theta; this.y = y; return this; } /** * Copies the values of the given cylindrical to this instance. * * @param {Cylindrical} other - The cylindrical to copy. * @return {Cylindrical} A reference to this cylindrical. */ copy( other ) { this.radius = other.radius; this.theta = other.theta; this.y = other.y; return this; } /** * Sets the cylindrical components from the given vector which is assumed to hold * Cartesian coordinates. * * @param {Vector3} v - The vector to set. * @return {Cylindrical} A reference to this cylindrical. */ setFromVector3( v ) { return this.setFromCartesianCoords( v.x, v.y, v.z ); } /** * Sets the cylindrical components from the given Cartesian coordinates. * * @param {number} x - The x value. * @param {number} y - The x value. * @param {number} z - The x value. * @return {Cylindrical} A reference to this cylindrical. */ setFromCartesianCoords( x, y, z ) { this.radius = Math.sqrt( x * x + z * z ); this.theta = Math.atan2( x, z ); this.y = y; return this; } /** * Returns a new cylindrical with copied values from this instance. * * @return {Cylindrical} A clone of this instance. */ clone() { return new this.constructor().copy( this ); } } /** * Represents a 2x2 matrix. * * A Note on Row-Major and Column-Major Ordering: * * The constructor and {@link Matrix2#set} method take arguments in * [row-major]{@link https://en.wikipedia.org/wiki/Row-_and_column-major_order#Column-major_order} * order, while internally they are stored in the {@link Matrix2#elements} array in column-major order. * This means that calling: * ```js * const m = new THREE.Matrix2(); * m.set( 11, 12, * 21, 22 ); * ``` * will result in the elements array containing: * ```js * m.elements = [ 11, 21, * 12, 22 ]; * ``` * and internally all calculations are performed using column-major ordering. * However, as the actual ordering makes no difference mathematically and * most people are used to thinking about matrices in row-major order, the * three.js documentation shows matrices in row-major order. Just bear in * mind that if you are reading the source code, you'll have to take the * transpose of any matrices outlined here to make sense of the calculations. */ class Matrix2 { /** * Constructs a new 2x2 matrix. The arguments are supposed to be * in row-major order. If no arguments are provided, the constructor * initializes the matrix as an identity matrix. * * @param {number} [n11] - 1-1 matrix element. * @param {number} [n12] - 1-2 matrix element. * @param {number} [n21] - 2-1 matrix element. * @param {number} [n22] - 2-2 matrix element. */ constructor( n11, n12, n21, n22 ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ Matrix2.prototype.isMatrix2 = true; /** * A column-major list of matrix values. * * @type {Array} */ this.elements = [ 1, 0, 0, 1, ]; if ( n11 !== undefined ) { this.set( n11, n12, n21, n22 ); } } /** * Sets this matrix to the 2x2 identity matrix. * * @return {Matrix2} A reference to this matrix. */ identity() { this.set( 1, 0, 0, 1, ); return this; } /** * Sets the elements of the matrix from the given array. * * @param {Array} array - The matrix elements in column-major order. * @param {number} [offset=0] - Index of the first element in the array. * @return {Matrix2} A reference to this matrix. */ fromArray( array, offset = 0 ) { for ( let i = 0; i < 4; i ++ ) { this.elements[ i ] = array[ i + offset ]; } return this; } /** * Sets the elements of the matrix.The arguments are supposed to be * in row-major order. * * @param {number} n11 - 1-1 matrix element. * @param {number} n12 - 1-2 matrix element. * @param {number} n21 - 2-1 matrix element. * @param {number} n22 - 2-2 matrix element. * @return {Matrix2} A reference to this matrix. */ set( n11, n12, n21, n22 ) { const te = this.elements; te[ 0 ] = n11; te[ 2 ] = n12; te[ 1 ] = n21; te[ 3 ] = n22; return this; } } const _vector$4 = /*@__PURE__*/ new Vector2(); /** * Represents an axis-aligned bounding box (AABB) in 2D space. */ class Box2 { /** * Constructs a new bounding box. * * @param {Vector2} [min=(Infinity,Infinity)] - A vector representing the lower boundary of the box. * @param {Vector2} [max=(-Infinity,-Infinity)] - A vector representing the upper boundary of the box. */ constructor( min = new Vector2( + Infinity, + Infinity ), max = new Vector2( - Infinity, - Infinity ) ) { /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isBox2 = true; /** * The lower boundary of the box. * * @type {Vector2} */ this.min = min; /** * The upper boundary of the box. * * @type {Vector2} */ this.max = max; } /** * Sets the lower and upper boundaries of this box. * Please note that this method only copies the values from the given objects. * * @param {Vector2} min - The lower boundary of the box. * @param {Vector2} max - The upper boundary of the box. * @return {Box2} A reference to this bounding box. */ set( min, max ) { this.min.copy( min ); this.max.copy( max ); return this; } /** * Sets the upper and lower bounds of this box so it encloses the position data * in the given array. * * @param {Array} points - An array holding 2D position data as instances of {@link Vector2}. * @return {Box2} A reference to this bounding box. */ setFromPoints( points ) { this.makeEmpty(); for ( let i = 0, il = points.length; i < il; i ++ ) { this.expandByPoint( points[ i ] ); } return this; } /** * Centers this box on the given center vector and sets this box's width, height and * depth to the given size values. * * @param {Vector2} center - The center of the box. * @param {Vector2} size - The x and y dimensions of the box. * @return {Box2} A reference to this bounding box. */ setFromCenterAndSize( center, size ) { const halfSize = _vector$4.copy( size ).multiplyScalar( 0.5 ); this.min.copy( center ).sub( halfSize ); this.max.copy( center ).add( halfSize ); return this; } /** * Returns a new box with copied values from this instance. * * @return {Box2} A clone of this instance. */ clone() { return new this.constructor().copy( this ); } /** * Copies the values of the given box to this instance. * * @param {Box2} box - The box to copy. * @return {Box2} A reference to this bounding box. */ copy( box ) { this.min.copy( box.min ); this.max.copy( box.max ); return this; } /** * Makes this box empty which means in encloses a zero space in 2D. * * @return {Box2} A reference to this bounding box. */ makeEmpty() { this.min.x = this.min.y = + Infinity; this.max.x = this.max.y = - Infinity; return this; } /** * Returns true if this box includes zero points within its bounds. * Note that a box with equal lower and upper bounds still includes one * point, the one both bounds share. * * @return {boolean} Whether this box is empty or not. */ isEmpty() { // this is a more robust check for empty than ( volume <= 0 ) because volume can get positive with two negative axes return ( this.max.x < this.min.x ) || ( this.max.y < this.min.y ); } /** * Returns the center point of this box. * * @param {Vector2} target - The target vector that is used to store the method's result. * @return {Vector2} The center point. */ getCenter( target ) { return this.isEmpty() ? target.set( 0, 0 ) : target.addVectors( this.min, this.max ).multiplyScalar( 0.5 ); } /** * Returns the dimensions of this box. * * @param {Vector2} target - The target vector that is used to store the method's result. * @return {Vector2} The size. */ getSize( target ) { return this.isEmpty() ? target.set( 0, 0 ) : target.subVectors( this.max, this.min ); } /** * Expands the boundaries of this box to include the given point. * * @param {Vector2} point - The point that should be included by the bounding box. * @return {Box2} A reference to this bounding box. */ expandByPoint( point ) { this.min.min( point ); this.max.max( point ); return this; } /** * Expands this box equilaterally by the given vector. The width of this * box will be expanded by the x component of the vector in both * directions. The height of this box will be expanded by the y component of * the vector in both directions. * * @param {Vector2} vector - The vector that should expand the bounding box. * @return {Box2} A reference to this bounding box. */ expandByVector( vector ) { this.min.sub( vector ); this.max.add( vector ); return this; } /** * Expands each dimension of the box by the given scalar. If negative, the * dimensions of the box will be contracted. * * @param {number} scalar - The scalar value that should expand the bounding box. * @return {Box2} A reference to this bounding box. */ expandByScalar( scalar ) { this.min.addScalar( - scalar ); this.max.addScalar( scalar ); return this; } /** * Returns `true` if the given point lies within or on the boundaries of this box. * * @param {Vector2} point - The point to test. * @return {boolean} Whether the bounding box contains the given point or not. */ containsPoint( point ) { return point.x >= this.min.x && point.x <= this.max.x && point.y >= this.min.y && point.y <= this.max.y; } /** * Returns `true` if this bounding box includes the entirety of the given bounding box. * If this box and the given one are identical, this function also returns `true`. * * @param {Box2} box - The bounding box to test. * @return {boolean} Whether the bounding box contains the given bounding box or not. */ containsBox( box ) { return this.min.x <= box.min.x && box.max.x <= this.max.x && this.min.y <= box.min.y && box.max.y <= this.max.y; } /** * Returns a point as a proportion of this box's width and height. * * @param {Vector2} point - A point in 2D space. * @param {Vector2} target - The target vector that is used to store the method's result. * @return {Vector2} A point as a proportion of this box's width and height. */ getParameter( point, target ) { // This can potentially have a divide by zero if the box // has a size dimension of 0. return target.set( ( point.x - this.min.x ) / ( this.max.x - this.min.x ), ( point.y - this.min.y ) / ( this.max.y - this.min.y ) ); } /** * Returns `true` if the given bounding box intersects with this bounding box. * * @param {Box2} box - The bounding box to test. * @return {boolean} Whether the given bounding box intersects with this bounding box. */ intersectsBox( box ) { // using 4 splitting planes to rule out intersections return box.max.x >= this.min.x && box.min.x <= this.max.x && box.max.y >= this.min.y && box.min.y <= this.max.y; } /** * Clamps the given point within the bounds of this box. * * @param {Vector2} point - The point to clamp. * @param {Vector2} target - The target vector that is used to store the method's result. * @return {Vector2} The clamped point. */ clampPoint( point, target ) { return target.copy( point ).clamp( this.min, this.max ); } /** * Returns the euclidean distance from any edge of this box to the specified point. If * the given point lies inside of this box, the distance will be `0`. * * @param {Vector2} point - The point to compute the distance to. * @return {number} The euclidean distance. */ distanceToPoint( point ) { return this.clampPoint( point, _vector$4 ).distanceTo( point ); } /** * Computes the intersection of this bounding box and the given one, setting the upper * bound of this box to the lesser of the two boxes' upper bounds and the * lower bound of this box to the greater of the two boxes' lower bounds. If * there's no overlap, makes this box empty. * * @param {Box2} box - The bounding box to intersect with. * @return {Box2} A reference to this bounding box. */ intersect( box ) { this.min.max( box.min ); this.max.min( box.max ); if ( this.isEmpty() ) this.makeEmpty(); return this; } /** * Computes the union of this box and another and the given one, setting the upper * bound of this box to the greater of the two boxes' upper bounds and the * lower bound of this box to the lesser of the two boxes' lower bounds. * * @param {Box2} box - The bounding box that will be unioned with this instance. * @return {Box2} A reference to this bounding box. */ union( box ) { this.min.min( box.min ); this.max.max( box.max ); return this; } /** * Adds the given offset to both the upper and lower bounds of this bounding box, * effectively moving it in 2D space. * * @param {Vector2} offset - The offset that should be used to translate the bounding box. * @return {Box2} A reference to this bounding box. */ translate( offset ) { this.min.add( offset ); this.max.add( offset ); return this; } /** * Returns `true` if this bounding box is equal with the given one. * * @param {Box2} box - The box to test for equality. * @return {boolean} Whether this bounding box is equal with the given one. */ equals( box ) { return box.min.equals( this.min ) && box.max.equals( this.max ); } } const _startP = /*@__PURE__*/ new Vector3(); const _startEnd = /*@__PURE__*/ new Vector3(); /** * An analytical line segment in 3D space represented by a start and end point. */ class Line3 { /** * Constructs a new line segment. * * @param {Vector3} [start=(0,0,0)] - Start of the line segment. * @param {Vector3} [end=(0,0,0)] - End of the line segment. */ constructor( start = new Vector3(), end = new Vector3() ) { /** * Start of the line segment. * * @type {Vector3} */ this.start = start; /** * End of the line segment. * * @type {Vector3} */ this.end = end; } /** * Sets the start and end values by copying the given vectors. * * @param {Vector3} start - The start point. * @param {Vector3} end - The end point. * @return {Line3} A reference to this line segment. */ set( start, end ) { this.start.copy( start ); this.end.copy( end ); return this; } /** * Copies the values of the given line segment to this instance. * * @param {Line3} line - The line segment to copy. * @return {Line3} A reference to this line segment. */ copy( line ) { this.start.copy( line.start ); this.end.copy( line.end ); return this; } /** * Returns the center of the line segment. * * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} The center point. */ getCenter( target ) { return target.addVectors( this.start, this.end ).multiplyScalar( 0.5 ); } /** * Returns the delta vector of the line segment's start and end point. * * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} The delta vector. */ delta( target ) { return target.subVectors( this.end, this.start ); } /** * Returns the squared Euclidean distance between the line' start and end point. * * @return {number} The squared Euclidean distance. */ distanceSq() { return this.start.distanceToSquared( this.end ); } /** * Returns the Euclidean distance between the line' start and end point. * * @return {number} The Euclidean distance. */ distance() { return this.start.distanceTo( this.end ); } /** * Returns a vector at a certain position along the line segment. * * @param {number} t - A value between `[0,1]` to represent a position along the line segment. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} The delta vector. */ at( t, target ) { return this.delta( target ).multiplyScalar( t ).add( this.start ); } /** * Returns a point parameter based on the closest point as projected on the line segment. * * @param {Vector3} point - The point for which to return a point parameter. * @param {boolean} clampToLine - Whether to clamp the result to the range `[0,1]` or not. * @return {number} The point parameter. */ closestPointToPointParameter( point, clampToLine ) { _startP.subVectors( point, this.start ); _startEnd.subVectors( this.end, this.start ); const startEnd2 = _startEnd.dot( _startEnd ); const startEnd_startP = _startEnd.dot( _startP ); let t = startEnd_startP / startEnd2; if ( clampToLine ) { t = clamp( t, 0, 1 ); } return t; } /** * Returns the closets point on the line for a given point. * * @param {Vector3} point - The point to compute the closest point on the line for. * @param {boolean} clampToLine - Whether to clamp the result to the range `[0,1]` or not. * @param {Vector3} target - The target vector that is used to store the method's result. * @return {Vector3} The closest point on the line. */ closestPointToPoint( point, clampToLine, target ) { const t = this.closestPointToPointParameter( point, clampToLine ); return this.delta( target ).multiplyScalar( t ).add( this.start ); } /** * Applies a 4x4 transformation matrix to this line segment. * * @param {Matrix4} matrix - The transformation matrix. * @return {Line3} A reference to this line segment. */ applyMatrix4( matrix ) { this.start.applyMatrix4( matrix ); this.end.applyMatrix4( matrix ); return this; } /** * Returns `true` if this line segment is equal with the given one. * * @param {Line3} line - The line segment to test for equality. * @return {boolean} Whether this line segment is equal with the given one. */ equals( line ) { return line.start.equals( this.start ) && line.end.equals( this.end ); } /** * Returns a new line segment with copied values from this instance. * * @return {Line3} A clone of this instance. */ clone() { return new this.constructor().copy( this ); } } const _vector$3 = /*@__PURE__*/ new Vector3(); /** * This displays a cone shaped helper object for a {@link SpotLight}. * * ```js * const spotLight = new THREE.SpotLight( 0xffffff ); * spotLight.position.set( 10, 10, 10 ); * scene.add( spotLight ); * * const spotLightHelper = new THREE.SpotLightHelper( spotLight ); * scene.add( spotLightHelper ); * ``` * * @augments Object3D */ class SpotLightHelper extends Object3D { /** * Constructs a new spot light helper. * * @param {HemisphereLight} light - The light to be visualized. * @param {number|Color|string} [color] - The helper's color. If not set, the helper will take * the color of the light. */ constructor( light, color ) { super(); /** * The light being visualized. * * @type {SpotLight} */ this.light = light; this.matrixAutoUpdate = false; /** * The color parameter passed in the constructor. * If not set, the helper will take the color of the light. * * @type {number|Color|string} */ this.color = color; this.type = 'SpotLightHelper'; const geometry = new BufferGeometry(); const positions = [ 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, -1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, -1, 1 ]; for ( let i = 0, j = 1, l = 32; i < l; i ++, j ++ ) { const p1 = ( i / l ) * Math.PI * 2; const p2 = ( j / l ) * Math.PI * 2; positions.push( Math.cos( p1 ), Math.sin( p1 ), 1, Math.cos( p2 ), Math.sin( p2 ), 1 ); } geometry.setAttribute( 'position', new Float32BufferAttribute( positions, 3 ) ); const material = new LineBasicMaterial( { fog: false, toneMapped: false } ); this.cone = new LineSegments( geometry, material ); this.add( this.cone ); this.update(); } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. */ dispose() { this.cone.geometry.dispose(); this.cone.material.dispose(); } /** * Updates the helper to match the position and direction of the * light being visualized. */ update() { this.light.updateWorldMatrix( true, false ); this.light.target.updateWorldMatrix( true, false ); // update the local matrix based on the parent and light target transforms if ( this.parent ) { this.parent.updateWorldMatrix( true ); this.matrix .copy( this.parent.matrixWorld ) .invert() .multiply( this.light.matrixWorld ); } else { this.matrix.copy( this.light.matrixWorld ); } this.matrixWorld.copy( this.light.matrixWorld ); const coneLength = this.light.distance ? this.light.distance : 1000; const coneWidth = coneLength * Math.tan( this.light.angle ); this.cone.scale.set( coneWidth, coneWidth, coneLength ); _vector$3.setFromMatrixPosition( this.light.target.matrixWorld ); this.cone.lookAt( _vector$3 ); if ( this.color !== undefined ) { this.cone.material.color.set( this.color ); } else { this.cone.material.color.copy( this.light.color ); } } } const _vector$2 = /*@__PURE__*/ new Vector3(); const _boneMatrix = /*@__PURE__*/ new Matrix4(); const _matrixWorldInv = /*@__PURE__*/ new Matrix4(); /** * A helper object to assist with visualizing a {@link Skeleton}. * * ```js * const helper = new THREE.SkeletonHelper( skinnedMesh ); * scene.add( helper ); * ``` * * @augments LineSegments */ class SkeletonHelper extends LineSegments { /** * Constructs a new hemisphere light helper. * * @param {Object3D} object - Usually an instance of {@link SkinnedMesh}. However, any 3D object * can be used if it represents a hierarchy of bones (see {@link Bone}). */ constructor( object ) { const bones = getBoneList( object ); const geometry = new BufferGeometry(); const vertices = []; const colors = []; const color1 = new Color( 0, 0, 1 ); const color2 = new Color( 0, 1, 0 ); for ( let i = 0; i < bones.length; i ++ ) { const bone = bones[ i ]; if ( bone.parent && bone.parent.isBone ) { vertices.push( 0, 0, 0 ); vertices.push( 0, 0, 0 ); colors.push( color1.r, color1.g, color1.b ); colors.push( color2.r, color2.g, color2.b ); } } geometry.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) ); geometry.setAttribute( 'color', new Float32BufferAttribute( colors, 3 ) ); const material = new LineBasicMaterial( { vertexColors: true, depthTest: false, depthWrite: false, toneMapped: false, transparent: true } ); super( geometry, material ); /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isSkeletonHelper = true; this.type = 'SkeletonHelper'; /** * The object being visualized. * * @type {Object3D} */ this.root = object; /** * he list of bones that the helper visualizes. * * @type {Array} */ this.bones = bones; this.matrix = object.matrixWorld; this.matrixAutoUpdate = false; } updateMatrixWorld( force ) { const bones = this.bones; const geometry = this.geometry; const position = geometry.getAttribute( 'position' ); _matrixWorldInv.copy( this.root.matrixWorld ).invert(); for ( let i = 0, j = 0; i < bones.length; i ++ ) { const bone = bones[ i ]; if ( bone.parent && bone.parent.isBone ) { _boneMatrix.multiplyMatrices( _matrixWorldInv, bone.matrixWorld ); _vector$2.setFromMatrixPosition( _boneMatrix ); position.setXYZ( j, _vector$2.x, _vector$2.y, _vector$2.z ); _boneMatrix.multiplyMatrices( _matrixWorldInv, bone.parent.matrixWorld ); _vector$2.setFromMatrixPosition( _boneMatrix ); position.setXYZ( j + 1, _vector$2.x, _vector$2.y, _vector$2.z ); j += 2; } } geometry.getAttribute( 'position' ).needsUpdate = true; super.updateMatrixWorld( force ); } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. */ dispose() { this.geometry.dispose(); this.material.dispose(); } } function getBoneList( object ) { const boneList = []; if ( object.isBone === true ) { boneList.push( object ); } for ( let i = 0; i < object.children.length; i ++ ) { boneList.push( ...getBoneList( object.children[ i ] ) ); } return boneList; } /** * This displays a helper object consisting of a spherical mesh for * visualizing an instance of {@link PointLight}. * * ```js * const pointLight = new THREE.PointLight( 0xff0000, 1, 100 ); * pointLight.position.set( 10, 10, 10 ); * scene.add( pointLight ); * * const sphereSize = 1; * const pointLightHelper = new THREE.PointLightHelper( pointLight, sphereSize ); * scene.add( pointLightHelper ); * ``` * * @augments Mesh */ class PointLightHelper extends Mesh { /** * Constructs a new point light helper. * * @param {PointLight} light - The light to be visualized. * @param {number} [sphereSize=1] - The size of the sphere helper. * @param {number|Color|string} [color] - The helper's color. If not set, the helper will take * the color of the light. */ constructor( light, sphereSize, color ) { const geometry = new SphereGeometry( sphereSize, 4, 2 ); const material = new MeshBasicMaterial( { wireframe: true, fog: false, toneMapped: false } ); super( geometry, material ); /** * The light being visualized. * * @type {HemisphereLight} */ this.light = light; /** * The color parameter passed in the constructor. * If not set, the helper will take the color of the light. * * @type {number|Color|string} */ this.color = color; this.type = 'PointLightHelper'; this.matrix = this.light.matrixWorld; this.matrixAutoUpdate = false; this.update(); /* // TODO: delete this comment? const distanceGeometry = new THREE.IcosahedronGeometry( 1, 2 ); const distanceMaterial = new THREE.MeshBasicMaterial( { color: hexColor, fog: false, wireframe: true, opacity: 0.1, transparent: true } ); this.lightSphere = new THREE.Mesh( bulbGeometry, bulbMaterial ); this.lightDistance = new THREE.Mesh( distanceGeometry, distanceMaterial ); const d = light.distance; if ( d === 0.0 ) { this.lightDistance.visible = false; } else { this.lightDistance.scale.set( d, d, d ); } this.add( this.lightDistance ); */ } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. */ dispose() { this.geometry.dispose(); this.material.dispose(); } /** * Updates the helper to match the position of the * light being visualized. */ update() { this.light.updateWorldMatrix( true, false ); if ( this.color !== undefined ) { this.material.color.set( this.color ); } else { this.material.color.copy( this.light.color ); } /* const d = this.light.distance; if ( d === 0.0 ) { this.lightDistance.visible = false; } else { this.lightDistance.visible = true; this.lightDistance.scale.set( d, d, d ); } */ } } const _vector$1 = /*@__PURE__*/ new Vector3(); const _color1 = /*@__PURE__*/ new Color(); const _color2 = /*@__PURE__*/ new Color(); /** * Creates a visual aid consisting of a spherical mesh for a * given {@link HemisphereLight}. * * ```js * const light = new THREE.HemisphereLight( 0xffffbb, 0x080820, 1 ); * const helper = new THREE.HemisphereLightHelper( light, 5 ); * scene.add( helper ); * ``` * * @augments Object3D */ class HemisphereLightHelper extends Object3D { /** * Constructs a new hemisphere light helper. * * @param {HemisphereLight} light - The light to be visualized. * @param {number} [size=1] - The size of the mesh used to visualize the light. * @param {number|Color|string} [color] - The helper's color. If not set, the helper will take * the color of the light. */ constructor( light, size, color ) { super(); /** * The light being visualized. * * @type {HemisphereLight} */ this.light = light; this.matrix = light.matrixWorld; this.matrixAutoUpdate = false; /** * The color parameter passed in the constructor. * If not set, the helper will take the color of the light. * * @type {number|Color|string} */ this.color = color; this.type = 'HemisphereLightHelper'; const geometry = new OctahedronGeometry( size ); geometry.rotateY( Math.PI * 0.5 ); this.material = new MeshBasicMaterial( { wireframe: true, fog: false, toneMapped: false } ); if ( this.color === undefined ) this.material.vertexColors = true; const position = geometry.getAttribute( 'position' ); const colors = new Float32Array( position.count * 3 ); geometry.setAttribute( 'color', new BufferAttribute( colors, 3 ) ); this.add( new Mesh( geometry, this.material ) ); this.update(); } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. */ dispose() { this.children[ 0 ].geometry.dispose(); this.children[ 0 ].material.dispose(); } /** * Updates the helper to match the position and direction of the * light being visualized. */ update() { const mesh = this.children[ 0 ]; if ( this.color !== undefined ) { this.material.color.set( this.color ); } else { const colors = mesh.geometry.getAttribute( 'color' ); _color1.copy( this.light.color ); _color2.copy( this.light.groundColor ); for ( let i = 0, l = colors.count; i < l; i ++ ) { const color = ( i < ( l / 2 ) ) ? _color1 : _color2; colors.setXYZ( i, color.r, color.g, color.b ); } colors.needsUpdate = true; } this.light.updateWorldMatrix( true, false ); mesh.lookAt( _vector$1.setFromMatrixPosition( this.light.matrixWorld ).negate() ); } } /** * The helper is an object to define grids. Grids are two-dimensional * arrays of lines. * * ```js * const size = 10; * const divisions = 10; * * const gridHelper = new THREE.GridHelper( size, divisions ); * scene.add( gridHelper ); * ``` * * @augments LineSegments */ class GridHelper extends LineSegments { /** * Constructs a new grid helper. * * @param {number} [size=10] - The size of the grid. * @param {number} [divisions=10] - The number of divisions across the grid. * @param {number|Color|string} [color1=0x444444] - The color of the center line. * @param {number|Color|string} [color2=0x888888] - The color of the lines of the grid. */ constructor( size = 10, divisions = 10, color1 = 0x444444, color2 = 0x888888 ) { color1 = new Color( color1 ); color2 = new Color( color2 ); const center = divisions / 2; const step = size / divisions; const halfSize = size / 2; const vertices = [], colors = []; for ( let i = 0, j = 0, k = - halfSize; i <= divisions; i ++, k += step ) { vertices.push( - halfSize, 0, k, halfSize, 0, k ); vertices.push( k, 0, - halfSize, k, 0, halfSize ); const color = i === center ? color1 : color2; color.toArray( colors, j ); j += 3; color.toArray( colors, j ); j += 3; color.toArray( colors, j ); j += 3; color.toArray( colors, j ); j += 3; } const geometry = new BufferGeometry(); geometry.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) ); geometry.setAttribute( 'color', new Float32BufferAttribute( colors, 3 ) ); const material = new LineBasicMaterial( { vertexColors: true, toneMapped: false } ); super( geometry, material ); this.type = 'GridHelper'; } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. */ dispose() { this.geometry.dispose(); this.material.dispose(); } } /** * This helper is an object to define polar grids. Grids are * two-dimensional arrays of lines. * * ```js * const radius = 10; * const sectors = 16; * const rings = 8; * const divisions = 64; * * const helper = new THREE.PolarGridHelper( radius, sectors, rings, divisions ); * scene.add( helper ); * ``` * * @augments LineSegments */ class PolarGridHelper extends LineSegments { /** * Constructs a new polar grid helper. * * @param {number} [radius=10] - The radius of the polar grid. This can be any positive number. * @param {number} [sectors=16] - The number of sectors the grid will be divided into. This can be any positive integer. * @param {number} [rings=16] - The number of rings. This can be any positive integer. * @param {number} [divisions=64] - The number of line segments used for each circle. This can be any positive integer. * @param {number|Color|string} [color1=0x444444] - The first color used for grid elements. * @param {number|Color|string} [color2=0x888888] - The second color used for grid elements. */ constructor( radius = 10, sectors = 16, rings = 8, divisions = 64, color1 = 0x444444, color2 = 0x888888 ) { color1 = new Color( color1 ); color2 = new Color( color2 ); const vertices = []; const colors = []; // create the sectors if ( sectors > 1 ) { for ( let i = 0; i < sectors; i ++ ) { const v = ( i / sectors ) * ( Math.PI * 2 ); const x = Math.sin( v ) * radius; const z = Math.cos( v ) * radius; vertices.push( 0, 0, 0 ); vertices.push( x, 0, z ); const color = ( i & 1 ) ? color1 : color2; colors.push( color.r, color.g, color.b ); colors.push( color.r, color.g, color.b ); } } // create the rings for ( let i = 0; i < rings; i ++ ) { const color = ( i & 1 ) ? color1 : color2; const r = radius - ( radius / rings * i ); for ( let j = 0; j < divisions; j ++ ) { // first vertex let v = ( j / divisions ) * ( Math.PI * 2 ); let x = Math.sin( v ) * r; let z = Math.cos( v ) * r; vertices.push( x, 0, z ); colors.push( color.r, color.g, color.b ); // second vertex v = ( ( j + 1 ) / divisions ) * ( Math.PI * 2 ); x = Math.sin( v ) * r; z = Math.cos( v ) * r; vertices.push( x, 0, z ); colors.push( color.r, color.g, color.b ); } } const geometry = new BufferGeometry(); geometry.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) ); geometry.setAttribute( 'color', new Float32BufferAttribute( colors, 3 ) ); const material = new LineBasicMaterial( { vertexColors: true, toneMapped: false } ); super( geometry, material ); this.type = 'PolarGridHelper'; } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. */ dispose() { this.geometry.dispose(); this.material.dispose(); } } const _v1 = /*@__PURE__*/ new Vector3(); const _v2 = /*@__PURE__*/ new Vector3(); const _v3 = /*@__PURE__*/ new Vector3(); /** * Helper object to assist with visualizing a {@link DirectionalLight}'s * effect on the scene. This consists of plane and a line representing the * light's position and direction. * * ```js * const light = new THREE.DirectionalLight( 0xFFFFFF ); * scene.add( light ); * * const helper = new THREE.DirectionalLightHelper( light, 5 ); * scene.add( helper ); * ``` * * @augments Object3D */ class DirectionalLightHelper extends Object3D { /** * Constructs a new directional light helper. * * @param {DirectionalLight} light - The light to be visualized. * @param {number} [size=1] - The dimensions of the plane. * @param {number|Color|string} [color] - The helper's color. If not set, the helper will take * the color of the light. */ constructor( light, size, color ) { super(); /** * The light being visualized. * * @type {DirectionalLight} */ this.light = light; this.matrix = light.matrixWorld; this.matrixAutoUpdate = false; /** * The color parameter passed in the constructor. * If not set, the helper will take the color of the light. * * @type {number|Color|string} */ this.color = color; this.type = 'DirectionalLightHelper'; if ( size === undefined ) size = 1; let geometry = new BufferGeometry(); geometry.setAttribute( 'position', new Float32BufferAttribute( [ - size, size, 0, size, size, 0, size, - size, 0, - size, - size, 0, - size, size, 0 ], 3 ) ); const material = new LineBasicMaterial( { fog: false, toneMapped: false } ); /** * Contains the line showing the location of the directional light. * * @type {Line} */ this.lightPlane = new Line( geometry, material ); this.add( this.lightPlane ); geometry = new BufferGeometry(); geometry.setAttribute( 'position', new Float32BufferAttribute( [ 0, 0, 0, 0, 0, 1 ], 3 ) ); /** * Represents the target line of the directional light. * * @type {Line} */ this.targetLine = new Line( geometry, material ); this.add( this.targetLine ); this.update(); } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. */ dispose() { this.lightPlane.geometry.dispose(); this.lightPlane.material.dispose(); this.targetLine.geometry.dispose(); this.targetLine.material.dispose(); } /** * Updates the helper to match the position and direction of the * light being visualized. */ update() { this.light.updateWorldMatrix( true, false ); this.light.target.updateWorldMatrix( true, false ); _v1.setFromMatrixPosition( this.light.matrixWorld ); _v2.setFromMatrixPosition( this.light.target.matrixWorld ); _v3.subVectors( _v2, _v1 ); this.lightPlane.lookAt( _v2 ); if ( this.color !== undefined ) { this.lightPlane.material.color.set( this.color ); this.targetLine.material.color.set( this.color ); } else { this.lightPlane.material.color.copy( this.light.color ); this.targetLine.material.color.copy( this.light.color ); } this.targetLine.lookAt( _v2 ); this.targetLine.scale.z = _v3.length(); } } const _vector = /*@__PURE__*/ new Vector3(); const _camera = /*@__PURE__*/ new Camera(); /** * This helps with visualizing what a camera contains in its frustum. It * visualizes the frustum of a camera using a line segments. * * Based on frustum visualization in [lightgl.js shadowmap example]{@link https://github.com/evanw/lightgl.js/blob/master/tests/shadowmap.html}. * * `CameraHelper` must be a child of the scene. * * ```js * const camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 0.1, 1000 ); * const helper = new THREE.CameraHelper( camera ); * scene.add( helper ); * ``` * * @augments LineSegments */ class CameraHelper extends LineSegments { /** * Constructs a new arrow helper. * * @param {Camera} camera - The camera to visualize. */ constructor( camera ) { const geometry = new BufferGeometry(); const material = new LineBasicMaterial( { color: 0xffffff, vertexColors: true, toneMapped: false } ); const vertices = []; const colors = []; const pointMap = {}; // near addLine( 'n1', 'n2' ); addLine( 'n2', 'n4' ); addLine( 'n4', 'n3' ); addLine( 'n3', 'n1' ); // far addLine( 'f1', 'f2' ); addLine( 'f2', 'f4' ); addLine( 'f4', 'f3' ); addLine( 'f3', 'f1' ); // sides addLine( 'n1', 'f1' ); addLine( 'n2', 'f2' ); addLine( 'n3', 'f3' ); addLine( 'n4', 'f4' ); // cone addLine( 'p', 'n1' ); addLine( 'p', 'n2' ); addLine( 'p', 'n3' ); addLine( 'p', 'n4' ); // up addLine( 'u1', 'u2' ); addLine( 'u2', 'u3' ); addLine( 'u3', 'u1' ); // target addLine( 'c', 't' ); addLine( 'p', 'c' ); // cross addLine( 'cn1', 'cn2' ); addLine( 'cn3', 'cn4' ); addLine( 'cf1', 'cf2' ); addLine( 'cf3', 'cf4' ); function addLine( a, b ) { addPoint( a ); addPoint( b ); } function addPoint( id ) { vertices.push( 0, 0, 0 ); colors.push( 0, 0, 0 ); if ( pointMap[ id ] === undefined ) { pointMap[ id ] = []; } pointMap[ id ].push( ( vertices.length / 3 ) - 1 ); } geometry.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) ); geometry.setAttribute( 'color', new Float32BufferAttribute( colors, 3 ) ); super( geometry, material ); this.type = 'CameraHelper'; /** * The camera being visualized. * * @type {Camera} */ this.camera = camera; if ( this.camera.updateProjectionMatrix ) this.camera.updateProjectionMatrix(); this.matrix = camera.matrixWorld; this.matrixAutoUpdate = false; /** * This contains the points used to visualize the camera. * * @type {Object>} */ this.pointMap = pointMap; this.update(); // colors const colorFrustum = new Color( 0xffaa00 ); const colorCone = new Color( 0xff0000 ); const colorUp = new Color( 0x00aaff ); const colorTarget = new Color( 0xffffff ); const colorCross = new Color( 0x333333 ); this.setColors( colorFrustum, colorCone, colorUp, colorTarget, colorCross ); } /** * Defines the colors of the helper. * * @param {Color} frustum - The frustum line color. * @param {Color} cone - The cone line color. * @param {Color} up - The up line color. * @param {Color} target - The target line color. * @param {Color} cross - The cross line color. */ setColors( frustum, cone, up, target, cross ) { const geometry = this.geometry; const colorAttribute = geometry.getAttribute( 'color' ); // near colorAttribute.setXYZ( 0, frustum.r, frustum.g, frustum.b ); colorAttribute.setXYZ( 1, frustum.r, frustum.g, frustum.b ); // n1, n2 colorAttribute.setXYZ( 2, frustum.r, frustum.g, frustum.b ); colorAttribute.setXYZ( 3, frustum.r, frustum.g, frustum.b ); // n2, n4 colorAttribute.setXYZ( 4, frustum.r, frustum.g, frustum.b ); colorAttribute.setXYZ( 5, frustum.r, frustum.g, frustum.b ); // n4, n3 colorAttribute.setXYZ( 6, frustum.r, frustum.g, frustum.b ); colorAttribute.setXYZ( 7, frustum.r, frustum.g, frustum.b ); // n3, n1 // far colorAttribute.setXYZ( 8, frustum.r, frustum.g, frustum.b ); colorAttribute.setXYZ( 9, frustum.r, frustum.g, frustum.b ); // f1, f2 colorAttribute.setXYZ( 10, frustum.r, frustum.g, frustum.b ); colorAttribute.setXYZ( 11, frustum.r, frustum.g, frustum.b ); // f2, f4 colorAttribute.setXYZ( 12, frustum.r, frustum.g, frustum.b ); colorAttribute.setXYZ( 13, frustum.r, frustum.g, frustum.b ); // f4, f3 colorAttribute.setXYZ( 14, frustum.r, frustum.g, frustum.b ); colorAttribute.setXYZ( 15, frustum.r, frustum.g, frustum.b ); // f3, f1 // sides colorAttribute.setXYZ( 16, frustum.r, frustum.g, frustum.b ); colorAttribute.setXYZ( 17, frustum.r, frustum.g, frustum.b ); // n1, f1 colorAttribute.setXYZ( 18, frustum.r, frustum.g, frustum.b ); colorAttribute.setXYZ( 19, frustum.r, frustum.g, frustum.b ); // n2, f2 colorAttribute.setXYZ( 20, frustum.r, frustum.g, frustum.b ); colorAttribute.setXYZ( 21, frustum.r, frustum.g, frustum.b ); // n3, f3 colorAttribute.setXYZ( 22, frustum.r, frustum.g, frustum.b ); colorAttribute.setXYZ( 23, frustum.r, frustum.g, frustum.b ); // n4, f4 // cone colorAttribute.setXYZ( 24, cone.r, cone.g, cone.b ); colorAttribute.setXYZ( 25, cone.r, cone.g, cone.b ); // p, n1 colorAttribute.setXYZ( 26, cone.r, cone.g, cone.b ); colorAttribute.setXYZ( 27, cone.r, cone.g, cone.b ); // p, n2 colorAttribute.setXYZ( 28, cone.r, cone.g, cone.b ); colorAttribute.setXYZ( 29, cone.r, cone.g, cone.b ); // p, n3 colorAttribute.setXYZ( 30, cone.r, cone.g, cone.b ); colorAttribute.setXYZ( 31, cone.r, cone.g, cone.b ); // p, n4 // up colorAttribute.setXYZ( 32, up.r, up.g, up.b ); colorAttribute.setXYZ( 33, up.r, up.g, up.b ); // u1, u2 colorAttribute.setXYZ( 34, up.r, up.g, up.b ); colorAttribute.setXYZ( 35, up.r, up.g, up.b ); // u2, u3 colorAttribute.setXYZ( 36, up.r, up.g, up.b ); colorAttribute.setXYZ( 37, up.r, up.g, up.b ); // u3, u1 // target colorAttribute.setXYZ( 38, target.r, target.g, target.b ); colorAttribute.setXYZ( 39, target.r, target.g, target.b ); // c, t colorAttribute.setXYZ( 40, cross.r, cross.g, cross.b ); colorAttribute.setXYZ( 41, cross.r, cross.g, cross.b ); // p, c // cross colorAttribute.setXYZ( 42, cross.r, cross.g, cross.b ); colorAttribute.setXYZ( 43, cross.r, cross.g, cross.b ); // cn1, cn2 colorAttribute.setXYZ( 44, cross.r, cross.g, cross.b ); colorAttribute.setXYZ( 45, cross.r, cross.g, cross.b ); // cn3, cn4 colorAttribute.setXYZ( 46, cross.r, cross.g, cross.b ); colorAttribute.setXYZ( 47, cross.r, cross.g, cross.b ); // cf1, cf2 colorAttribute.setXYZ( 48, cross.r, cross.g, cross.b ); colorAttribute.setXYZ( 49, cross.r, cross.g, cross.b ); // cf3, cf4 colorAttribute.needsUpdate = true; } /** * Updates the helper based on the projection matrix of the camera. */ update() { const geometry = this.geometry; const pointMap = this.pointMap; const w = 1, h = 1; // we need just camera projection matrix inverse // world matrix must be identity _camera.projectionMatrixInverse.copy( this.camera.projectionMatrixInverse ); // Adjust z values based on coordinate system const nearZ = this.camera.coordinateSystem === WebGLCoordinateSystem ? -1 : 0; // center / target setPoint( 'c', pointMap, geometry, _camera, 0, 0, nearZ ); setPoint( 't', pointMap, geometry, _camera, 0, 0, 1 ); // near setPoint( 'n1', pointMap, geometry, _camera, -1, -1, nearZ ); setPoint( 'n2', pointMap, geometry, _camera, w, -1, nearZ ); setPoint( 'n3', pointMap, geometry, _camera, -1, h, nearZ ); setPoint( 'n4', pointMap, geometry, _camera, w, h, nearZ ); // far setPoint( 'f1', pointMap, geometry, _camera, -1, -1, 1 ); setPoint( 'f2', pointMap, geometry, _camera, w, -1, 1 ); setPoint( 'f3', pointMap, geometry, _camera, -1, h, 1 ); setPoint( 'f4', pointMap, geometry, _camera, w, h, 1 ); // up setPoint( 'u1', pointMap, geometry, _camera, w * 0.7, h * 1.1, nearZ ); setPoint( 'u2', pointMap, geometry, _camera, -1 * 0.7, h * 1.1, nearZ ); setPoint( 'u3', pointMap, geometry, _camera, 0, h * 2, nearZ ); // cross setPoint( 'cf1', pointMap, geometry, _camera, -1, 0, 1 ); setPoint( 'cf2', pointMap, geometry, _camera, w, 0, 1 ); setPoint( 'cf3', pointMap, geometry, _camera, 0, -1, 1 ); setPoint( 'cf4', pointMap, geometry, _camera, 0, h, 1 ); setPoint( 'cn1', pointMap, geometry, _camera, -1, 0, nearZ ); setPoint( 'cn2', pointMap, geometry, _camera, w, 0, nearZ ); setPoint( 'cn3', pointMap, geometry, _camera, 0, -1, nearZ ); setPoint( 'cn4', pointMap, geometry, _camera, 0, h, nearZ ); geometry.getAttribute( 'position' ).needsUpdate = true; } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. */ dispose() { this.geometry.dispose(); this.material.dispose(); } } function setPoint( point, pointMap, geometry, camera, x, y, z ) { _vector.set( x, y, z ).unproject( camera ); const points = pointMap[ point ]; if ( points !== undefined ) { const position = geometry.getAttribute( 'position' ); for ( let i = 0, l = points.length; i < l; i ++ ) { position.setXYZ( points[ i ], _vector.x, _vector.y, _vector.z ); } } } const _box = /*@__PURE__*/ new Box3(); /** * Helper object to graphically show the world-axis-aligned bounding box * around an object. The actual bounding box is handled with {@link Box3}, * this is just a visual helper for debugging. It can be automatically * resized with {@link BoxHelper#update} when the object it's created from * is transformed. Note that the object must have a geometry for this to work, * so it won't work with sprites. * * ```js * const sphere = new THREE.SphereGeometry(); * const object = new THREE.Mesh( sphere, new THREE.MeshBasicMaterial( 0xff0000 ) ); * const box = new THREE.BoxHelper( object, 0xffff00 ); * scene.add( box ); * ``` * * @augments LineSegments */ class BoxHelper extends LineSegments { /** * Constructs a new box helper. * * @param {Object3D} [object] - The 3D object to show the world-axis-aligned bounding box. * @param {number|Color|string} [color=0xffff00] - The box's color. */ constructor( object, color = 0xffff00 ) { const indices = new Uint16Array( [ 0, 1, 1, 2, 2, 3, 3, 0, 4, 5, 5, 6, 6, 7, 7, 4, 0, 4, 1, 5, 2, 6, 3, 7 ] ); const positions = new Float32Array( 8 * 3 ); const geometry = new BufferGeometry(); geometry.setIndex( new BufferAttribute( indices, 1 ) ); geometry.setAttribute( 'position', new BufferAttribute( positions, 3 ) ); super( geometry, new LineBasicMaterial( { color: color, toneMapped: false } ) ); /** * The 3D object being visualized. * * @type {Object3D} */ this.object = object; this.type = 'BoxHelper'; this.matrixAutoUpdate = false; this.update(); } /** * Updates the helper's geometry to match the dimensions of the object, * including any children. */ update() { if ( this.object !== undefined ) { _box.setFromObject( this.object ); } if ( _box.isEmpty() ) return; const min = _box.min; const max = _box.max; /* 5____4 1/___0/| | 6__|_7 2/___3/ 0: max.x, max.y, max.z 1: min.x, max.y, max.z 2: min.x, min.y, max.z 3: max.x, min.y, max.z 4: max.x, max.y, min.z 5: min.x, max.y, min.z 6: min.x, min.y, min.z 7: max.x, min.y, min.z */ const position = this.geometry.attributes.position; const array = position.array; array[ 0 ] = max.x; array[ 1 ] = max.y; array[ 2 ] = max.z; array[ 3 ] = min.x; array[ 4 ] = max.y; array[ 5 ] = max.z; array[ 6 ] = min.x; array[ 7 ] = min.y; array[ 8 ] = max.z; array[ 9 ] = max.x; array[ 10 ] = min.y; array[ 11 ] = max.z; array[ 12 ] = max.x; array[ 13 ] = max.y; array[ 14 ] = min.z; array[ 15 ] = min.x; array[ 16 ] = max.y; array[ 17 ] = min.z; array[ 18 ] = min.x; array[ 19 ] = min.y; array[ 20 ] = min.z; array[ 21 ] = max.x; array[ 22 ] = min.y; array[ 23 ] = min.z; position.needsUpdate = true; this.geometry.computeBoundingSphere(); } /** * Updates the wireframe box for the passed object. * * @param {Object3D} object - The 3D object to create the helper for. * @return {BoxHelper} A reference to this instance. */ setFromObject( object ) { this.object = object; this.update(); return this; } copy( source, recursive ) { super.copy( source, recursive ); this.object = source.object; return this; } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. */ dispose() { this.geometry.dispose(); this.material.dispose(); } } /** * A helper object to visualize an instance of {@link Box3}. * * ```js * const box = new THREE.Box3(); * box.setFromCenterAndSize( new THREE.Vector3( 1, 1, 1 ), new THREE.Vector3( 2, 1, 3 ) ); * * const helper = new THREE.Box3Helper( box, 0xffff00 ); * scene.add( helper ) * ``` * * @augments LineSegments */ class Box3Helper extends LineSegments { /** * Constructs a new box3 helper. * * @param {Box3} box - The box to visualize. * @param {number|Color|string} [color=0xffff00] - The box's color. */ constructor( box, color = 0xffff00 ) { const indices = new Uint16Array( [ 0, 1, 1, 2, 2, 3, 3, 0, 4, 5, 5, 6, 6, 7, 7, 4, 0, 4, 1, 5, 2, 6, 3, 7 ] ); const positions = [ 1, 1, 1, -1, 1, 1, -1, -1, 1, 1, -1, 1, 1, 1, -1, -1, 1, -1, -1, -1, -1, 1, -1, -1 ]; const geometry = new BufferGeometry(); geometry.setIndex( new BufferAttribute( indices, 1 ) ); geometry.setAttribute( 'position', new Float32BufferAttribute( positions, 3 ) ); super( geometry, new LineBasicMaterial( { color: color, toneMapped: false } ) ); /** * The box being visualized. * * @type {Box3} */ this.box = box; this.type = 'Box3Helper'; this.geometry.computeBoundingSphere(); } updateMatrixWorld( force ) { const box = this.box; if ( box.isEmpty() ) return; box.getCenter( this.position ); box.getSize( this.scale ); this.scale.multiplyScalar( 0.5 ); super.updateMatrixWorld( force ); } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. */ dispose() { this.geometry.dispose(); this.material.dispose(); } } /** * A helper object to visualize an instance of {@link Plane}. * * ```js * const plane = new THREE.Plane( new THREE.Vector3( 1, 1, 0.2 ), 3 ); * const helper = new THREE.PlaneHelper( plane, 1, 0xffff00 ); * scene.add( helper ); * ``` * * @augments Line */ class PlaneHelper extends Line { /** * Constructs a new plane helper. * * @param {Plane} plane - The plane to be visualized. * @param {number} [size=1] - The side length of plane helper. * @param {number|Color|string} [hex=0xffff00] - The helper's color. */ constructor( plane, size = 1, hex = 0xffff00 ) { const color = hex; const positions = [ 1, -1, 0, -1, 1, 0, -1, -1, 0, 1, 1, 0, -1, 1, 0, -1, -1, 0, 1, -1, 0, 1, 1, 0 ]; const geometry = new BufferGeometry(); geometry.setAttribute( 'position', new Float32BufferAttribute( positions, 3 ) ); geometry.computeBoundingSphere(); super( geometry, new LineBasicMaterial( { color: color, toneMapped: false } ) ); this.type = 'PlaneHelper'; /** * The plane being visualized. * * @type {Plane} */ this.plane = plane; /** * The side length of plane helper. * * @type {number} * @default 1 */ this.size = size; const positions2 = [ 1, 1, 0, -1, 1, 0, -1, -1, 0, 1, 1, 0, -1, -1, 0, 1, -1, 0 ]; const geometry2 = new BufferGeometry(); geometry2.setAttribute( 'position', new Float32BufferAttribute( positions2, 3 ) ); geometry2.computeBoundingSphere(); this.add( new Mesh( geometry2, new MeshBasicMaterial( { color: color, opacity: 0.2, transparent: true, depthWrite: false, toneMapped: false } ) ) ); } updateMatrixWorld( force ) { this.position.set( 0, 0, 0 ); this.scale.set( 0.5 * this.size, 0.5 * this.size, 1 ); this.lookAt( this.plane.normal ); this.translateZ( - this.plane.constant ); super.updateMatrixWorld( force ); } /** * Updates the helper to match the position and direction of the * light being visualized. */ dispose() { this.geometry.dispose(); this.material.dispose(); this.children[ 0 ].geometry.dispose(); this.children[ 0 ].material.dispose(); } } const _axis = /*@__PURE__*/ new Vector3(); let _lineGeometry, _coneGeometry; /** * An 3D arrow object for visualizing directions. * * ```js * const dir = new THREE.Vector3( 1, 2, 0 ); * * //normalize the direction vector (convert to vector of length 1) * dir.normalize(); * * const origin = new THREE.Vector3( 0, 0, 0 ); * const length = 1; * const hex = 0xffff00; * * const arrowHelper = new THREE.ArrowHelper( dir, origin, length, hex ); * scene.add( arrowHelper ); * ``` * * @augments Object3D */ class ArrowHelper extends Object3D { /** * Constructs a new arrow helper. * * @param {Vector3} [dir=(0, 0, 1)] - The (normalized) direction vector. * @param {Vector3} [origin=(0, 0, 0)] - Point at which the arrow starts. * @param {number} [length=1] - Length of the arrow in world units. * @param {(number|Color|string)} [color=0xffff00] - Color of the arrow. * @param {number} [headLength=length*0.2] - The length of the head of the arrow. * @param {number} [headWidth=headLength*0.2] - The width of the head of the arrow. */ constructor( dir = new Vector3( 0, 0, 1 ), origin = new Vector3( 0, 0, 0 ), length = 1, color = 0xffff00, headLength = length * 0.2, headWidth = headLength * 0.2 ) { super(); this.type = 'ArrowHelper'; if ( _lineGeometry === undefined ) { _lineGeometry = new BufferGeometry(); _lineGeometry.setAttribute( 'position', new Float32BufferAttribute( [ 0, 0, 0, 0, 1, 0 ], 3 ) ); _coneGeometry = new CylinderGeometry( 0, 0.5, 1, 5, 1 ); _coneGeometry.translate( 0, -0.5, 0 ); } this.position.copy( origin ); /** * The line part of the arrow helper. * * @type {Line} */ this.line = new Line( _lineGeometry, new LineBasicMaterial( { color: color, toneMapped: false } ) ); this.line.matrixAutoUpdate = false; this.add( this.line ); /** * The cone part of the arrow helper. * * @type {Mesh} */ this.cone = new Mesh( _coneGeometry, new MeshBasicMaterial( { color: color, toneMapped: false } ) ); this.cone.matrixAutoUpdate = false; this.add( this.cone ); this.setDirection( dir ); this.setLength( length, headLength, headWidth ); } /** * Sets the direction of the helper. * * @param {Vector3} dir - The normalized direction vector. */ setDirection( dir ) { // dir is assumed to be normalized if ( dir.y > 0.99999 ) { this.quaternion.set( 0, 0, 0, 1 ); } else if ( dir.y < -0.99999 ) { this.quaternion.set( 1, 0, 0, 0 ); } else { _axis.set( dir.z, 0, - dir.x ).normalize(); const radians = Math.acos( dir.y ); this.quaternion.setFromAxisAngle( _axis, radians ); } } /** * Sets the length of the helper. * * @param {number} length - Length of the arrow in world units. * @param {number} [headLength=length*0.2] - The length of the head of the arrow. * @param {number} [headWidth=headLength*0.2] - The width of the head of the arrow. */ setLength( length, headLength = length * 0.2, headWidth = headLength * 0.2 ) { this.line.scale.set( 1, Math.max( 0.0001, length - headLength ), 1 ); // see #17458 this.line.updateMatrix(); this.cone.scale.set( headWidth, headLength, headWidth ); this.cone.position.y = length; this.cone.updateMatrix(); } /** * Sets the color of the helper. * * @param {number|Color|string} color - The color to set. */ setColor( color ) { this.line.material.color.set( color ); this.cone.material.color.set( color ); } copy( source ) { super.copy( source, false ); this.line.copy( source.line ); this.cone.copy( source.cone ); return this; } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. */ dispose() { this.line.geometry.dispose(); this.line.material.dispose(); this.cone.geometry.dispose(); this.cone.material.dispose(); } } /** * An axis object to visualize the 3 axes in a simple way. * The X axis is red. The Y axis is green. The Z axis is blue. * * ```js * const axesHelper = new THREE.AxesHelper( 5 ); * scene.add( axesHelper ); * ``` * * @augments LineSegments */ class AxesHelper extends LineSegments { /** * Constructs a new axes helper. * * @param {number} [size=1] - Size of the lines representing the axes. */ constructor( size = 1 ) { const vertices = [ 0, 0, 0, size, 0, 0, 0, 0, 0, 0, size, 0, 0, 0, 0, 0, 0, size ]; const colors = [ 1, 0, 0, 1, 0.6, 0, 0, 1, 0, 0.6, 1, 0, 0, 0, 1, 0, 0.6, 1 ]; const geometry = new BufferGeometry(); geometry.setAttribute( 'position', new Float32BufferAttribute( vertices, 3 ) ); geometry.setAttribute( 'color', new Float32BufferAttribute( colors, 3 ) ); const material = new LineBasicMaterial( { vertexColors: true, toneMapped: false } ); super( geometry, material ); this.type = 'AxesHelper'; } /** * Defines the colors of the axes helper. * * @param {number|Color|string} xAxisColor - The color for the x axis. * @param {number|Color|string} yAxisColor - The color for the y axis. * @param {number|Color|string} zAxisColor - The color for the z axis. * @return {AxesHelper} A reference to this axes helper. */ setColors( xAxisColor, yAxisColor, zAxisColor ) { const color = new Color(); const array = this.geometry.attributes.color.array; color.set( xAxisColor ); color.toArray( array, 0 ); color.toArray( array, 3 ); color.set( yAxisColor ); color.toArray( array, 6 ); color.toArray( array, 9 ); color.set( zAxisColor ); color.toArray( array, 12 ); color.toArray( array, 15 ); this.geometry.attributes.color.needsUpdate = true; return this; } /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. */ dispose() { this.geometry.dispose(); this.material.dispose(); } } /** * This class is used to convert a series of paths to an array of * shapes. It is specifically used in context of fonts and SVG. */ class ShapePath { /** * Constructs a new shape path. */ constructor() { this.type = 'ShapePath'; /** * The color of the shape. * * @type {Color} */ this.color = new Color(); /** * The paths that have been generated for this shape. * * @type {Array} * @default null */ this.subPaths = []; /** * The current path that is being generated. * * @type {?Path} * @default null */ this.currentPath = null; } /** * Creates a new path and moves it current point to the given one. * * @param {number} x - The x coordinate. * @param {number} y - The y coordinate. * @return {ShapePath} A reference to this shape path. */ moveTo( x, y ) { this.currentPath = new Path(); this.subPaths.push( this.currentPath ); this.currentPath.moveTo( x, y ); return this; } /** * Adds an instance of {@link LineCurve} to the path by connecting * the current point with the given one. * * @param {number} x - The x coordinate of the end point. * @param {number} y - The y coordinate of the end point. * @return {ShapePath} A reference to this shape path. */ lineTo( x, y ) { this.currentPath.lineTo( x, y ); return this; } /** * Adds an instance of {@link QuadraticBezierCurve} to the path by connecting * the current point with the given one. * * @param {number} aCPx - The x coordinate of the control point. * @param {number} aCPy - The y coordinate of the control point. * @param {number} aX - The x coordinate of the end point. * @param {number} aY - The y coordinate of the end point. * @return {ShapePath} A reference to this shape path. */ quadraticCurveTo( aCPx, aCPy, aX, aY ) { this.currentPath.quadraticCurveTo( aCPx, aCPy, aX, aY ); return this; } /** * Adds an instance of {@link CubicBezierCurve} to the path by connecting * the current point with the given one. * * @param {number} aCP1x - The x coordinate of the first control point. * @param {number} aCP1y - The y coordinate of the first control point. * @param {number} aCP2x - The x coordinate of the second control point. * @param {number} aCP2y - The y coordinate of the second control point. * @param {number} aX - The x coordinate of the end point. * @param {number} aY - The y coordinate of the end point. * @return {ShapePath} A reference to this shape path. */ bezierCurveTo( aCP1x, aCP1y, aCP2x, aCP2y, aX, aY ) { this.currentPath.bezierCurveTo( aCP1x, aCP1y, aCP2x, aCP2y, aX, aY ); return this; } /** * Adds an instance of {@link SplineCurve} to the path by connecting * the current point with the given list of points. * * @param {Array} pts - An array of points in 2D space. * @return {ShapePath} A reference to this shape path. */ splineThru( pts ) { this.currentPath.splineThru( pts ); return this; } /** * Converts the paths into an array of shapes. * * @param {boolean} isCCW - By default solid shapes are defined clockwise (CW) and holes are defined counterclockwise (CCW). * If this flag is set to `true`, then those are flipped. * @return {Array} An array of shapes. */ toShapes( isCCW ) { function toShapesNoHoles( inSubpaths ) { const shapes = []; for ( let i = 0, l = inSubpaths.length; i < l; i ++ ) { const tmpPath = inSubpaths[ i ]; const tmpShape = new Shape(); tmpShape.curves = tmpPath.curves; shapes.push( tmpShape ); } return shapes; } function isPointInsidePolygon( inPt, inPolygon ) { const polyLen = inPolygon.length; // inPt on polygon contour => immediate success or // toggling of inside/outside at every single! intersection point of an edge // with the horizontal line through inPt, left of inPt // not counting lowerY endpoints of edges and whole edges on that line let inside = false; for ( let p = polyLen - 1, q = 0; q < polyLen; p = q ++ ) { let edgeLowPt = inPolygon[ p ]; let edgeHighPt = inPolygon[ q ]; let edgeDx = edgeHighPt.x - edgeLowPt.x; let edgeDy = edgeHighPt.y - edgeLowPt.y; if ( Math.abs( edgeDy ) > Number.EPSILON ) { // not parallel if ( edgeDy < 0 ) { edgeLowPt = inPolygon[ q ]; edgeDx = - edgeDx; edgeHighPt = inPolygon[ p ]; edgeDy = - edgeDy; } if ( ( inPt.y < edgeLowPt.y ) || ( inPt.y > edgeHighPt.y ) ) continue; if ( inPt.y === edgeLowPt.y ) { if ( inPt.x === edgeLowPt.x ) return true; // inPt is on contour ? // continue; // no intersection or edgeLowPt => doesn't count !!! } else { const perpEdge = edgeDy * ( inPt.x - edgeLowPt.x ) - edgeDx * ( inPt.y - edgeLowPt.y ); if ( perpEdge === 0 ) return true; // inPt is on contour ? if ( perpEdge < 0 ) continue; inside = ! inside; // true intersection left of inPt } } else { // parallel or collinear if ( inPt.y !== edgeLowPt.y ) continue; // parallel // edge lies on the same horizontal line as inPt if ( ( ( edgeHighPt.x <= inPt.x ) && ( inPt.x <= edgeLowPt.x ) ) || ( ( edgeLowPt.x <= inPt.x ) && ( inPt.x <= edgeHighPt.x ) ) ) return true; // inPt: Point on contour ! // continue; } } return inside; } const isClockWise = ShapeUtils.isClockWise; const subPaths = this.subPaths; if ( subPaths.length === 0 ) return []; let solid, tmpPath, tmpShape; const shapes = []; if ( subPaths.length === 1 ) { tmpPath = subPaths[ 0 ]; tmpShape = new Shape(); tmpShape.curves = tmpPath.curves; shapes.push( tmpShape ); return shapes; } let holesFirst = ! isClockWise( subPaths[ 0 ].getPoints() ); holesFirst = isCCW ? ! holesFirst : holesFirst; // console.log("Holes first", holesFirst); const betterShapeHoles = []; const newShapes = []; let newShapeHoles = []; let mainIdx = 0; let tmpPoints; newShapes[ mainIdx ] = undefined; newShapeHoles[ mainIdx ] = []; for ( let i = 0, l = subPaths.length; i < l; i ++ ) { tmpPath = subPaths[ i ]; tmpPoints = tmpPath.getPoints(); solid = isClockWise( tmpPoints ); solid = isCCW ? ! solid : solid; if ( solid ) { if ( ( ! holesFirst ) && ( newShapes[ mainIdx ] ) ) mainIdx ++; newShapes[ mainIdx ] = { s: new Shape(), p: tmpPoints }; newShapes[ mainIdx ].s.curves = tmpPath.curves; if ( holesFirst ) mainIdx ++; newShapeHoles[ mainIdx ] = []; //console.log('cw', i); } else { newShapeHoles[ mainIdx ].push( { h: tmpPath, p: tmpPoints[ 0 ] } ); //console.log('ccw', i); } } // only Holes? -> probably all Shapes with wrong orientation if ( ! newShapes[ 0 ] ) return toShapesNoHoles( subPaths ); if ( newShapes.length > 1 ) { let ambiguous = false; let toChange = 0; for ( let sIdx = 0, sLen = newShapes.length; sIdx < sLen; sIdx ++ ) { betterShapeHoles[ sIdx ] = []; } for ( let sIdx = 0, sLen = newShapes.length; sIdx < sLen; sIdx ++ ) { const sho = newShapeHoles[ sIdx ]; for ( let hIdx = 0; hIdx < sho.length; hIdx ++ ) { const ho = sho[ hIdx ]; let hole_unassigned = true; for ( let s2Idx = 0; s2Idx < newShapes.length; s2Idx ++ ) { if ( isPointInsidePolygon( ho.p, newShapes[ s2Idx ].p ) ) { if ( sIdx !== s2Idx ) toChange ++; if ( hole_unassigned ) { hole_unassigned = false; betterShapeHoles[ s2Idx ].push( ho ); } else { ambiguous = true; } } } if ( hole_unassigned ) { betterShapeHoles[ sIdx ].push( ho ); } } } if ( toChange > 0 && ambiguous === false ) { newShapeHoles = betterShapeHoles; } } let tmpHoles; for ( let i = 0, il = newShapes.length; i < il; i ++ ) { tmpShape = newShapes[ i ].s; shapes.push( tmpShape ); tmpHoles = newShapeHoles[ i ]; for ( let j = 0, jl = tmpHoles.length; j < jl; j ++ ) { tmpShape.holes.push( tmpHoles[ j ].h ); } } //console.log("shape", shapes); return shapes; } } /** * Abstract base class for controls. * * @abstract * @augments EventDispatcher */ class Controls extends EventDispatcher { /** * Constructs a new controls instance. * * @param {Object3D} object - The object that is managed by the controls. * @param {?HTMLDOMElement} domElement - The HTML element used for event listeners. */ constructor( object, domElement = null ) { super(); /** * The object that is managed by the controls. * * @type {Object3D} */ this.object = object; /** * The HTML element used for event listeners. * * @type {?HTMLDOMElement} * @default null */ this.domElement = domElement; /** * Whether the controls responds to user input or not. * * @type {boolean} * @default true */ this.enabled = true; /** * The internal state of the controls. * * @type {number} * @default -1 */ this.state = -1; /** * This object defines the keyboard input of the controls. * * @type {Object} */ this.keys = {}; /** * This object defines what type of actions are assigned to the available mouse buttons. * It depends on the control implementation what kind of mouse buttons and actions are supported. * * @type {{LEFT: ?number, MIDDLE: ?number, RIGHT: ?number}} */ this.mouseButtons = { LEFT: null, MIDDLE: null, RIGHT: null }; /** * This object defines what type of actions are assigned to what kind of touch interaction. * It depends on the control implementation what kind of touch interaction and actions are supported. * * @type {{ONE: ?number, TWO: ?number}} */ this.touches = { ONE: null, TWO: null }; } /** * Connects the controls to the DOM. This method has so called "side effects" since * it adds the module's event listeners to the DOM. * * @param {HTMLDOMElement} element - The DOM element to connect to. */ connect( element ) { if ( element === undefined ) { console.warn( 'THREE.Controls: connect() now requires an element.' ); // @deprecated, the warning can be removed with r185 return; } if ( this.domElement !== null ) this.disconnect(); this.domElement = element; } /** * Disconnects the controls from the DOM. */ disconnect() {} /** * Call this method if you no longer want use to the controls. It frees all internal * resources and removes all event listeners. */ dispose() {} /** * Controls should implement this method if they have to update their internal state * per simulation step. * * @param {number} [delta] - The time delta in seconds. */ update( /* delta */ ) {} } /** * Scales the texture as large as possible within its surface without cropping * or stretching the texture. The method preserves the original aspect ratio of * the texture. Akin to CSS `object-fit: contain` * * @param {Texture} texture - The texture. * @param {number} aspect - The texture's aspect ratio. * @return {Texture} The updated texture. */ function contain( texture, aspect ) { const imageAspect = ( texture.image && texture.image.width ) ? texture.image.width / texture.image.height : 1; if ( imageAspect > aspect ) { texture.repeat.x = 1; texture.repeat.y = imageAspect / aspect; texture.offset.x = 0; texture.offset.y = ( 1 - texture.repeat.y ) / 2; } else { texture.repeat.x = aspect / imageAspect; texture.repeat.y = 1; texture.offset.x = ( 1 - texture.repeat.x ) / 2; texture.offset.y = 0; } return texture; } /** * Scales the texture to the smallest possible size to fill the surface, leaving * no empty space. The method preserves the original aspect ratio of the texture. * Akin to CSS `object-fit: cover`. * * @param {Texture} texture - The texture. * @param {number} aspect - The texture's aspect ratio. * @return {Texture} The updated texture. */ function cover( texture, aspect ) { const imageAspect = ( texture.image && texture.image.width ) ? texture.image.width / texture.image.height : 1; if ( imageAspect > aspect ) { texture.repeat.x = aspect / imageAspect; texture.repeat.y = 1; texture.offset.x = ( 1 - texture.repeat.x ) / 2; texture.offset.y = 0; } else { texture.repeat.x = 1; texture.repeat.y = imageAspect / aspect; texture.offset.x = 0; texture.offset.y = ( 1 - texture.repeat.y ) / 2; } return texture; } /** * Configures the texture to the default transformation. Akin to CSS `object-fit: fill`. * * @param {Texture} texture - The texture. * @return {Texture} The updated texture. */ function fill( texture ) { texture.repeat.x = 1; texture.repeat.y = 1; texture.offset.x = 0; texture.offset.y = 0; return texture; } /** * Determines how many bytes must be used to represent the texture. * * @param {number} width - The width of the texture. * @param {number} height - The height of the texture. * @param {number} format - The texture's format. * @param {number} type - The texture's type. * @return {number} The byte length. */ function getByteLength( width, height, format, type ) { const typeByteLength = getTextureTypeByteLength( type ); switch ( format ) { // https://registry.khronos.org/OpenGL-Refpages/es3.0/html/glTexImage2D.xhtml case AlphaFormat: return width * height; case LuminanceFormat: return width * height; case LuminanceAlphaFormat: return width * height * 2; case RedFormat: return ( ( width * height ) / typeByteLength.components ) * typeByteLength.byteLength; case RedIntegerFormat: return ( ( width * height ) / typeByteLength.components ) * typeByteLength.byteLength; case RGFormat: return ( ( width * height * 2 ) / typeByteLength.components ) * typeByteLength.byteLength; case RGIntegerFormat: return ( ( width * height * 2 ) / typeByteLength.components ) * typeByteLength.byteLength; case RGBFormat: return ( ( width * height * 3 ) / typeByteLength.components ) * typeByteLength.byteLength; case RGBAFormat: return ( ( width * height * 4 ) / typeByteLength.components ) * typeByteLength.byteLength; case RGBAIntegerFormat: return ( ( width * height * 4 ) / typeByteLength.components ) * typeByteLength.byteLength; // https://registry.khronos.org/webgl/extensions/WEBGL_compressed_texture_s3tc_srgb/ case RGB_S3TC_DXT1_Format: case RGBA_S3TC_DXT1_Format: return Math.floor( ( width + 3 ) / 4 ) * Math.floor( ( height + 3 ) / 4 ) * 8; case RGBA_S3TC_DXT3_Format: case RGBA_S3TC_DXT5_Format: return Math.floor( ( width + 3 ) / 4 ) * Math.floor( ( height + 3 ) / 4 ) * 16; // https://registry.khronos.org/webgl/extensions/WEBGL_compressed_texture_pvrtc/ case RGB_PVRTC_2BPPV1_Format: case RGBA_PVRTC_2BPPV1_Format: return ( Math.max( width, 16 ) * Math.max( height, 8 ) ) / 4; case RGB_PVRTC_4BPPV1_Format: case RGBA_PVRTC_4BPPV1_Format: return ( Math.max( width, 8 ) * Math.max( height, 8 ) ) / 2; // https://registry.khronos.org/webgl/extensions/WEBGL_compressed_texture_etc/ case RGB_ETC1_Format: case RGB_ETC2_Format: return Math.floor( ( width + 3 ) / 4 ) * Math.floor( ( height + 3 ) / 4 ) * 8; case RGBA_ETC2_EAC_Format: return Math.floor( ( width + 3 ) / 4 ) * Math.floor( ( height + 3 ) / 4 ) * 16; // https://registry.khronos.org/webgl/extensions/WEBGL_compressed_texture_astc/ case RGBA_ASTC_4x4_Format: return Math.floor( ( width + 3 ) / 4 ) * Math.floor( ( height + 3 ) / 4 ) * 16; case RGBA_ASTC_5x4_Format: return Math.floor( ( width + 4 ) / 5 ) * Math.floor( ( height + 3 ) / 4 ) * 16; case RGBA_ASTC_5x5_Format: return Math.floor( ( width + 4 ) / 5 ) * Math.floor( ( height + 4 ) / 5 ) * 16; case RGBA_ASTC_6x5_Format: return Math.floor( ( width + 5 ) / 6 ) * Math.floor( ( height + 4 ) / 5 ) * 16; case RGBA_ASTC_6x6_Format: return Math.floor( ( width + 5 ) / 6 ) * Math.floor( ( height + 5 ) / 6 ) * 16; case RGBA_ASTC_8x5_Format: return Math.floor( ( width + 7 ) / 8 ) * Math.floor( ( height + 4 ) / 5 ) * 16; case RGBA_ASTC_8x6_Format: return Math.floor( ( width + 7 ) / 8 ) * Math.floor( ( height + 5 ) / 6 ) * 16; case RGBA_ASTC_8x8_Format: return Math.floor( ( width + 7 ) / 8 ) * Math.floor( ( height + 7 ) / 8 ) * 16; case RGBA_ASTC_10x5_Format: return Math.floor( ( width + 9 ) / 10 ) * Math.floor( ( height + 4 ) / 5 ) * 16; case RGBA_ASTC_10x6_Format: return Math.floor( ( width + 9 ) / 10 ) * Math.floor( ( height + 5 ) / 6 ) * 16; case RGBA_ASTC_10x8_Format: return Math.floor( ( width + 9 ) / 10 ) * Math.floor( ( height + 7 ) / 8 ) * 16; case RGBA_ASTC_10x10_Format: return Math.floor( ( width + 9 ) / 10 ) * Math.floor( ( height + 9 ) / 10 ) * 16; case RGBA_ASTC_12x10_Format: return Math.floor( ( width + 11 ) / 12 ) * Math.floor( ( height + 9 ) / 10 ) * 16; case RGBA_ASTC_12x12_Format: return Math.floor( ( width + 11 ) / 12 ) * Math.floor( ( height + 11 ) / 12 ) * 16; // https://registry.khronos.org/webgl/extensions/EXT_texture_compression_bptc/ case RGBA_BPTC_Format: case RGB_BPTC_SIGNED_Format: case RGB_BPTC_UNSIGNED_Format: return Math.ceil( width / 4 ) * Math.ceil( height / 4 ) * 16; // https://registry.khronos.org/webgl/extensions/EXT_texture_compression_rgtc/ case RED_RGTC1_Format: case SIGNED_RED_RGTC1_Format: return Math.ceil( width / 4 ) * Math.ceil( height / 4 ) * 8; case RED_GREEN_RGTC2_Format: case SIGNED_RED_GREEN_RGTC2_Format: return Math.ceil( width / 4 ) * Math.ceil( height / 4 ) * 16; } throw new Error( `Unable to determine texture byte length for ${format} format.`, ); } function getTextureTypeByteLength( type ) { switch ( type ) { case UnsignedByteType: case ByteType: return { byteLength: 1, components: 1 }; case UnsignedShortType: case ShortType: case HalfFloatType: return { byteLength: 2, components: 1 }; case UnsignedShort4444Type: case UnsignedShort5551Type: return { byteLength: 2, components: 4 }; case UnsignedIntType: case IntType: case FloatType: return { byteLength: 4, components: 1 }; case UnsignedInt5999Type: return { byteLength: 4, components: 3 }; } throw new Error( `Unknown texture type ${type}.` ); } /** * A class containing utility functions for textures. * * @hideconstructor */ class TextureUtils { /** * Scales the texture as large as possible within its surface without cropping * or stretching the texture. The method preserves the original aspect ratio of * the texture. Akin to CSS `object-fit: contain` * * @param {Texture} texture - The texture. * @param {number} aspect - The texture's aspect ratio. * @return {Texture} The updated texture. */ static contain( texture, aspect ) { return contain( texture, aspect ); } /** * Scales the texture to the smallest possible size to fill the surface, leaving * no empty space. The method preserves the original aspect ratio of the texture. * Akin to CSS `object-fit: cover`. * * @param {Texture} texture - The texture. * @param {number} aspect - The texture's aspect ratio. * @return {Texture} The updated texture. */ static cover( texture, aspect ) { return cover( texture, aspect ); } /** * Configures the texture to the default transformation. Akin to CSS `object-fit: fill`. * * @param {Texture} texture - The texture. * @return {Texture} The updated texture. */ static fill( texture ) { return fill( texture ); } /** * Determines how many bytes must be used to represent the texture. * * @param {number} width - The width of the texture. * @param {number} height - The height of the texture. * @param {number} format - The texture's format. * @param {number} type - The texture's type. * @return {number} The byte length. */ static getByteLength( width, height, format, type ) { return getByteLength( width, height, format, type ); } } if ( typeof __THREE_DEVTOOLS__ !== 'undefined' ) { __THREE_DEVTOOLS__.dispatchEvent( new CustomEvent( 'register', { detail: { revision: REVISION, } } ) ); } if ( typeof window !== 'undefined' ) { if ( window.__THREE__ ) { console.warn( 'WARNING: Multiple instances of Three.js being imported.' ); } else { window.__THREE__ = REVISION; } } function WebGLAnimation() { let context = null; let isAnimating = false; let animationLoop = null; let requestId = null; function onAnimationFrame( time, frame ) { animationLoop( time, frame ); requestId = context.requestAnimationFrame( onAnimationFrame ); } return { start: function () { if ( isAnimating === true ) return; if ( animationLoop === null ) return; requestId = context.requestAnimationFrame( onAnimationFrame ); isAnimating = true; }, stop: function () { context.cancelAnimationFrame( requestId ); isAnimating = false; }, setAnimationLoop: function ( callback ) { animationLoop = callback; }, setContext: function ( value ) { context = value; } }; } function WebGLAttributes( gl ) { const buffers = new WeakMap(); function createBuffer( attribute, bufferType ) { const array = attribute.array; const usage = attribute.usage; const size = array.byteLength; const buffer = gl.createBuffer(); gl.bindBuffer( bufferType, buffer ); gl.bufferData( bufferType, array, usage ); attribute.onUploadCallback(); let type; if ( array instanceof Float32Array ) { type = gl.FLOAT; } else if ( array instanceof Uint16Array ) { if ( attribute.isFloat16BufferAttribute ) { type = gl.HALF_FLOAT; } else { type = gl.UNSIGNED_SHORT; } } else if ( array instanceof Int16Array ) { type = gl.SHORT; } else if ( array instanceof Uint32Array ) { type = gl.UNSIGNED_INT; } else if ( array instanceof Int32Array ) { type = gl.INT; } else if ( array instanceof Int8Array ) { type = gl.BYTE; } else if ( array instanceof Uint8Array ) { type = gl.UNSIGNED_BYTE; } else if ( array instanceof Uint8ClampedArray ) { type = gl.UNSIGNED_BYTE; } else { throw new Error( 'THREE.WebGLAttributes: Unsupported buffer data format: ' + array ); } return { buffer: buffer, type: type, bytesPerElement: array.BYTES_PER_ELEMENT, version: attribute.version, size: size }; } function updateBuffer( buffer, attribute, bufferType ) { const array = attribute.array; const updateRanges = attribute.updateRanges; gl.bindBuffer( bufferType, buffer ); if ( updateRanges.length === 0 ) { // Not using update ranges gl.bufferSubData( bufferType, 0, array ); } else { // Before applying update ranges, we merge any adjacent / overlapping // ranges to reduce load on `gl.bufferSubData`. Empirically, this has led // to performance improvements for applications which make heavy use of // update ranges. Likely due to GPU command overhead. // // Note that to reduce garbage collection between frames, we merge the // update ranges in-place. This is safe because this method will clear the // update ranges once updated. updateRanges.sort( ( a, b ) => a.start - b.start ); // To merge the update ranges in-place, we work from left to right in the // existing updateRanges array, merging ranges. This may result in a final // array which is smaller than the original. This index tracks the last // index representing a merged range, any data after this index can be // trimmed once the merge algorithm is completed. let mergeIndex = 0; for ( let i = 1; i < updateRanges.length; i ++ ) { const previousRange = updateRanges[ mergeIndex ]; const range = updateRanges[ i ]; // We add one here to merge adjacent ranges. This is safe because ranges // operate over positive integers. if ( range.start <= previousRange.start + previousRange.count + 1 ) { previousRange.count = Math.max( previousRange.count, range.start + range.count - previousRange.start ); } else { ++ mergeIndex; updateRanges[ mergeIndex ] = range; } } // Trim the array to only contain the merged ranges. updateRanges.length = mergeIndex + 1; for ( let i = 0, l = updateRanges.length; i < l; i ++ ) { const range = updateRanges[ i ]; gl.bufferSubData( bufferType, range.start * array.BYTES_PER_ELEMENT, array, range.start, range.count ); } attribute.clearUpdateRanges(); } attribute.onUploadCallback(); } // function get( attribute ) { if ( attribute.isInterleavedBufferAttribute ) attribute = attribute.data; return buffers.get( attribute ); } function remove( attribute ) { if ( attribute.isInterleavedBufferAttribute ) attribute = attribute.data; const data = buffers.get( attribute ); if ( data ) { gl.deleteBuffer( data.buffer ); buffers.delete( attribute ); } } function update( attribute, bufferType ) { if ( attribute.isInterleavedBufferAttribute ) attribute = attribute.data; if ( attribute.isGLBufferAttribute ) { const cached = buffers.get( attribute ); if ( ! cached || cached.version < attribute.version ) { buffers.set( attribute, { buffer: attribute.buffer, type: attribute.type, bytesPerElement: attribute.elementSize, version: attribute.version } ); } return; } const data = buffers.get( attribute ); if ( data === undefined ) { buffers.set( attribute, createBuffer( attribute, bufferType ) ); } else if ( data.version < attribute.version ) { if ( data.size !== attribute.array.byteLength ) { throw new Error( 'THREE.WebGLAttributes: The size of the buffer attribute\'s array buffer does not match the original size. Resizing buffer attributes is not supported.' ); } updateBuffer( data.buffer, attribute, bufferType ); data.version = attribute.version; } } return { get: get, remove: remove, update: update }; } var alphahash_fragment = "#ifdef USE_ALPHAHASH\n\tif ( diffuseColor.a < getAlphaHashThreshold( vPosition ) ) discard;\n#endif"; var alphahash_pars_fragment = "#ifdef USE_ALPHAHASH\n\tconst float ALPHA_HASH_SCALE = 0.05;\n\tfloat hash2D( vec2 value ) {\n\t\treturn fract( 1.0e4 * sin( 17.0 * value.x + 0.1 * value.y ) * ( 0.1 + abs( sin( 13.0 * value.y + value.x ) ) ) );\n\t}\n\tfloat hash3D( vec3 value ) {\n\t\treturn hash2D( vec2( hash2D( value.xy ), value.z ) );\n\t}\n\tfloat getAlphaHashThreshold( vec3 position ) {\n\t\tfloat maxDeriv = max(\n\t\t\tlength( dFdx( position.xyz ) ),\n\t\t\tlength( dFdy( position.xyz ) )\n\t\t);\n\t\tfloat pixScale = 1.0 / ( ALPHA_HASH_SCALE * maxDeriv );\n\t\tvec2 pixScales = vec2(\n\t\t\texp2( floor( log2( pixScale ) ) ),\n\t\t\texp2( ceil( log2( pixScale ) ) )\n\t\t);\n\t\tvec2 alpha = vec2(\n\t\t\thash3D( floor( pixScales.x * position.xyz ) ),\n\t\t\thash3D( floor( pixScales.y * position.xyz ) )\n\t\t);\n\t\tfloat lerpFactor = fract( log2( pixScale ) );\n\t\tfloat x = ( 1.0 - lerpFactor ) * alpha.x + lerpFactor * alpha.y;\n\t\tfloat a = min( lerpFactor, 1.0 - lerpFactor );\n\t\tvec3 cases = vec3(\n\t\t\tx * x / ( 2.0 * a * ( 1.0 - a ) ),\n\t\t\t( x - 0.5 * a ) / ( 1.0 - a ),\n\t\t\t1.0 - ( ( 1.0 - x ) * ( 1.0 - x ) / ( 2.0 * a * ( 1.0 - a ) ) )\n\t\t);\n\t\tfloat threshold = ( x < ( 1.0 - a ) )\n\t\t\t? ( ( x < a ) ? cases.x : cases.y )\n\t\t\t: cases.z;\n\t\treturn clamp( threshold , 1.0e-6, 1.0 );\n\t}\n#endif"; var alphamap_fragment = "#ifdef USE_ALPHAMAP\n\tdiffuseColor.a *= texture2D( alphaMap, vAlphaMapUv ).g;\n#endif"; var alphamap_pars_fragment = "#ifdef USE_ALPHAMAP\n\tuniform sampler2D alphaMap;\n#endif"; var alphatest_fragment = "#ifdef USE_ALPHATEST\n\t#ifdef ALPHA_TO_COVERAGE\n\tdiffuseColor.a = smoothstep( alphaTest, alphaTest + fwidth( diffuseColor.a ), diffuseColor.a );\n\tif ( diffuseColor.a == 0.0 ) discard;\n\t#else\n\tif ( diffuseColor.a < alphaTest ) discard;\n\t#endif\n#endif"; var alphatest_pars_fragment = "#ifdef USE_ALPHATEST\n\tuniform float alphaTest;\n#endif"; var aomap_fragment = "#ifdef USE_AOMAP\n\tfloat ambientOcclusion = ( texture2D( aoMap, vAoMapUv ).r - 1.0 ) * aoMapIntensity + 1.0;\n\treflectedLight.indirectDiffuse *= ambientOcclusion;\n\t#if defined( USE_CLEARCOAT ) \n\t\tclearcoatSpecularIndirect *= ambientOcclusion;\n\t#endif\n\t#if defined( USE_SHEEN ) \n\t\tsheenSpecularIndirect *= ambientOcclusion;\n\t#endif\n\t#if defined( USE_ENVMAP ) && defined( STANDARD )\n\t\tfloat dotNV = saturate( dot( geometryNormal, geometryViewDir ) );\n\t\treflectedLight.indirectSpecular *= computeSpecularOcclusion( dotNV, ambientOcclusion, material.roughness );\n\t#endif\n#endif"; var aomap_pars_fragment = "#ifdef USE_AOMAP\n\tuniform sampler2D aoMap;\n\tuniform float aoMapIntensity;\n#endif"; var batching_pars_vertex = "#ifdef USE_BATCHING\n\t#if ! defined( GL_ANGLE_multi_draw )\n\t#define gl_DrawID _gl_DrawID\n\tuniform int _gl_DrawID;\n\t#endif\n\tuniform highp sampler2D batchingTexture;\n\tuniform highp usampler2D batchingIdTexture;\n\tmat4 getBatchingMatrix( const in float i ) {\n\t\tint size = textureSize( batchingTexture, 0 ).x;\n\t\tint j = int( i ) * 4;\n\t\tint x = j % size;\n\t\tint y = j / size;\n\t\tvec4 v1 = texelFetch( batchingTexture, ivec2( x, y ), 0 );\n\t\tvec4 v2 = texelFetch( batchingTexture, ivec2( x + 1, y ), 0 );\n\t\tvec4 v3 = texelFetch( batchingTexture, ivec2( x + 2, y ), 0 );\n\t\tvec4 v4 = texelFetch( batchingTexture, ivec2( x + 3, y ), 0 );\n\t\treturn mat4( v1, v2, v3, v4 );\n\t}\n\tfloat getIndirectIndex( const in int i ) {\n\t\tint size = textureSize( batchingIdTexture, 0 ).x;\n\t\tint x = i % size;\n\t\tint y = i / size;\n\t\treturn float( texelFetch( batchingIdTexture, ivec2( x, y ), 0 ).r );\n\t}\n#endif\n#ifdef USE_BATCHING_COLOR\n\tuniform sampler2D batchingColorTexture;\n\tvec3 getBatchingColor( const in float i ) {\n\t\tint size = textureSize( batchingColorTexture, 0 ).x;\n\t\tint j = int( i );\n\t\tint x = j % size;\n\t\tint y = j / size;\n\t\treturn texelFetch( batchingColorTexture, ivec2( x, y ), 0 ).rgb;\n\t}\n#endif"; var batching_vertex = "#ifdef USE_BATCHING\n\tmat4 batchingMatrix = getBatchingMatrix( getIndirectIndex( gl_DrawID ) );\n#endif"; var begin_vertex = "vec3 transformed = vec3( position );\n#ifdef USE_ALPHAHASH\n\tvPosition = vec3( position );\n#endif"; var beginnormal_vertex = "vec3 objectNormal = vec3( normal );\n#ifdef USE_TANGENT\n\tvec3 objectTangent = vec3( tangent.xyz );\n#endif"; var bsdfs = "float G_BlinnPhong_Implicit( ) {\n\treturn 0.25;\n}\nfloat D_BlinnPhong( const in float shininess, const in float dotNH ) {\n\treturn RECIPROCAL_PI * ( shininess * 0.5 + 1.0 ) * pow( dotNH, shininess );\n}\nvec3 BRDF_BlinnPhong( const in vec3 lightDir, const in vec3 viewDir, const in vec3 normal, const in vec3 specularColor, const in float shininess ) {\n\tvec3 halfDir = normalize( lightDir + viewDir );\n\tfloat dotNH = saturate( dot( normal, halfDir ) );\n\tfloat dotVH = saturate( dot( viewDir, halfDir ) );\n\tvec3 F = F_Schlick( specularColor, 1.0, dotVH );\n\tfloat G = G_BlinnPhong_Implicit( );\n\tfloat D = D_BlinnPhong( shininess, dotNH );\n\treturn F * ( G * D );\n} // validated"; var iridescence_fragment = "#ifdef USE_IRIDESCENCE\n\tconst mat3 XYZ_TO_REC709 = mat3(\n\t\t 3.2404542, -0.9692660, 0.0556434,\n\t\t-1.5371385, 1.8760108, -0.2040259,\n\t\t-0.4985314, 0.0415560, 1.0572252\n\t);\n\tvec3 Fresnel0ToIor( vec3 fresnel0 ) {\n\t\tvec3 sqrtF0 = sqrt( fresnel0 );\n\t\treturn ( vec3( 1.0 ) + sqrtF0 ) / ( vec3( 1.0 ) - sqrtF0 );\n\t}\n\tvec3 IorToFresnel0( vec3 transmittedIor, float incidentIor ) {\n\t\treturn pow2( ( transmittedIor - vec3( incidentIor ) ) / ( transmittedIor + vec3( incidentIor ) ) );\n\t}\n\tfloat IorToFresnel0( float transmittedIor, float incidentIor ) {\n\t\treturn pow2( ( transmittedIor - incidentIor ) / ( transmittedIor + incidentIor ));\n\t}\n\tvec3 evalSensitivity( float OPD, vec3 shift ) {\n\t\tfloat phase = 2.0 * PI * OPD * 1.0e-9;\n\t\tvec3 val = vec3( 5.4856e-13, 4.4201e-13, 5.2481e-13 );\n\t\tvec3 pos = vec3( 1.6810e+06, 1.7953e+06, 2.2084e+06 );\n\t\tvec3 var = vec3( 4.3278e+09, 9.3046e+09, 6.6121e+09 );\n\t\tvec3 xyz = val * sqrt( 2.0 * PI * var ) * cos( pos * phase + shift ) * exp( - pow2( phase ) * var );\n\t\txyz.x += 9.7470e-14 * sqrt( 2.0 * PI * 4.5282e+09 ) * cos( 2.2399e+06 * phase + shift[ 0 ] ) * exp( - 4.5282e+09 * pow2( phase ) );\n\t\txyz /= 1.0685e-7;\n\t\tvec3 rgb = XYZ_TO_REC709 * xyz;\n\t\treturn rgb;\n\t}\n\tvec3 evalIridescence( float outsideIOR, float eta2, float cosTheta1, float thinFilmThickness, vec3 baseF0 ) {\n\t\tvec3 I;\n\t\tfloat iridescenceIOR = mix( outsideIOR, eta2, smoothstep( 0.0, 0.03, thinFilmThickness ) );\n\t\tfloat sinTheta2Sq = pow2( outsideIOR / iridescenceIOR ) * ( 1.0 - pow2( cosTheta1 ) );\n\t\tfloat cosTheta2Sq = 1.0 - sinTheta2Sq;\n\t\tif ( cosTheta2Sq < 0.0 ) {\n\t\t\treturn vec3( 1.0 );\n\t\t}\n\t\tfloat cosTheta2 = sqrt( cosTheta2Sq );\n\t\tfloat R0 = IorToFresnel0( iridescenceIOR, outsideIOR );\n\t\tfloat R12 = F_Schlick( R0, 1.0, cosTheta1 );\n\t\tfloat T121 = 1.0 - R12;\n\t\tfloat phi12 = 0.0;\n\t\tif ( iridescenceIOR < outsideIOR ) phi12 = PI;\n\t\tfloat phi21 = PI - phi12;\n\t\tvec3 baseIOR = Fresnel0ToIor( clamp( baseF0, 0.0, 0.9999 ) );\t\tvec3 R1 = IorToFresnel0( baseIOR, iridescenceIOR );\n\t\tvec3 R23 = F_Schlick( R1, 1.0, cosTheta2 );\n\t\tvec3 phi23 = vec3( 0.0 );\n\t\tif ( baseIOR[ 0 ] < iridescenceIOR ) phi23[ 0 ] = PI;\n\t\tif ( baseIOR[ 1 ] < iridescenceIOR ) phi23[ 1 ] = PI;\n\t\tif ( baseIOR[ 2 ] < iridescenceIOR ) phi23[ 2 ] = PI;\n\t\tfloat OPD = 2.0 * iridescenceIOR * thinFilmThickness * cosTheta2;\n\t\tvec3 phi = vec3( phi21 ) + phi23;\n\t\tvec3 R123 = clamp( R12 * R23, 1e-5, 0.9999 );\n\t\tvec3 r123 = sqrt( R123 );\n\t\tvec3 Rs = pow2( T121 ) * R23 / ( vec3( 1.0 ) - R123 );\n\t\tvec3 C0 = R12 + Rs;\n\t\tI = C0;\n\t\tvec3 Cm = Rs - T121;\n\t\tfor ( int m = 1; m <= 2; ++ m ) {\n\t\t\tCm *= r123;\n\t\t\tvec3 Sm = 2.0 * evalSensitivity( float( m ) * OPD, float( m ) * phi );\n\t\t\tI += Cm * Sm;\n\t\t}\n\t\treturn max( I, vec3( 0.0 ) );\n\t}\n#endif"; var bumpmap_pars_fragment = "#ifdef USE_BUMPMAP\n\tuniform sampler2D bumpMap;\n\tuniform float bumpScale;\n\tvec2 dHdxy_fwd() {\n\t\tvec2 dSTdx = dFdx( vBumpMapUv );\n\t\tvec2 dSTdy = dFdy( vBumpMapUv );\n\t\tfloat Hll = bumpScale * texture2D( bumpMap, vBumpMapUv ).x;\n\t\tfloat dBx = bumpScale * texture2D( bumpMap, vBumpMapUv + dSTdx ).x - Hll;\n\t\tfloat dBy = bumpScale * texture2D( bumpMap, vBumpMapUv + dSTdy ).x - Hll;\n\t\treturn vec2( dBx, dBy );\n\t}\n\tvec3 perturbNormalArb( vec3 surf_pos, vec3 surf_norm, vec2 dHdxy, float faceDirection ) {\n\t\tvec3 vSigmaX = normalize( dFdx( surf_pos.xyz ) );\n\t\tvec3 vSigmaY = normalize( dFdy( surf_pos.xyz ) );\n\t\tvec3 vN = surf_norm;\n\t\tvec3 R1 = cross( vSigmaY, vN );\n\t\tvec3 R2 = cross( vN, vSigmaX );\n\t\tfloat fDet = dot( vSigmaX, R1 ) * faceDirection;\n\t\tvec3 vGrad = sign( fDet ) * ( dHdxy.x * R1 + dHdxy.y * R2 );\n\t\treturn normalize( abs( fDet ) * surf_norm - vGrad );\n\t}\n#endif"; var clipping_planes_fragment = "#if NUM_CLIPPING_PLANES > 0\n\tvec4 plane;\n\t#ifdef ALPHA_TO_COVERAGE\n\t\tfloat distanceToPlane, distanceGradient;\n\t\tfloat clipOpacity = 1.0;\n\t\t#pragma unroll_loop_start\n\t\tfor ( int i = 0; i < UNION_CLIPPING_PLANES; i ++ ) {\n\t\t\tplane = clippingPlanes[ i ];\n\t\t\tdistanceToPlane = - dot( vClipPosition, plane.xyz ) + plane.w;\n\t\t\tdistanceGradient = fwidth( distanceToPlane ) / 2.0;\n\t\t\tclipOpacity *= smoothstep( - distanceGradient, distanceGradient, distanceToPlane );\n\t\t\tif ( clipOpacity == 0.0 ) discard;\n\t\t}\n\t\t#pragma unroll_loop_end\n\t\t#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n\t\t\tfloat unionClipOpacity = 1.0;\n\t\t\t#pragma unroll_loop_start\n\t\t\tfor ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n\t\t\t\tplane = clippingPlanes[ i ];\n\t\t\t\tdistanceToPlane = - dot( vClipPosition, plane.xyz ) + plane.w;\n\t\t\t\tdistanceGradient = fwidth( distanceToPlane ) / 2.0;\n\t\t\t\tunionClipOpacity *= 1.0 - smoothstep( - distanceGradient, distanceGradient, distanceToPlane );\n\t\t\t}\n\t\t\t#pragma unroll_loop_end\n\t\t\tclipOpacity *= 1.0 - unionClipOpacity;\n\t\t#endif\n\t\tdiffuseColor.a *= clipOpacity;\n\t\tif ( diffuseColor.a == 0.0 ) discard;\n\t#else\n\t\t#pragma unroll_loop_start\n\t\tfor ( int i = 0; i < UNION_CLIPPING_PLANES; i ++ ) {\n\t\t\tplane = clippingPlanes[ i ];\n\t\t\tif ( dot( vClipPosition, plane.xyz ) > plane.w ) discard;\n\t\t}\n\t\t#pragma unroll_loop_end\n\t\t#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n\t\t\tbool clipped = true;\n\t\t\t#pragma unroll_loop_start\n\t\t\tfor ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n\t\t\t\tplane = clippingPlanes[ i ];\n\t\t\t\tclipped = ( dot( vClipPosition, plane.xyz ) > plane.w ) && clipped;\n\t\t\t}\n\t\t\t#pragma unroll_loop_end\n\t\t\tif ( clipped ) discard;\n\t\t#endif\n\t#endif\n#endif"; var clipping_planes_pars_fragment = "#if NUM_CLIPPING_PLANES > 0\n\tvarying vec3 vClipPosition;\n\tuniform vec4 clippingPlanes[ NUM_CLIPPING_PLANES ];\n#endif"; var clipping_planes_pars_vertex = "#if NUM_CLIPPING_PLANES > 0\n\tvarying vec3 vClipPosition;\n#endif"; var clipping_planes_vertex = "#if NUM_CLIPPING_PLANES > 0\n\tvClipPosition = - mvPosition.xyz;\n#endif"; var color_fragment = "#if defined( USE_COLOR_ALPHA )\n\tdiffuseColor *= vColor;\n#elif defined( USE_COLOR )\n\tdiffuseColor.rgb *= vColor;\n#endif"; var color_pars_fragment = "#if defined( USE_COLOR_ALPHA )\n\tvarying vec4 vColor;\n#elif defined( USE_COLOR )\n\tvarying vec3 vColor;\n#endif"; var color_pars_vertex = "#if defined( USE_COLOR_ALPHA )\n\tvarying vec4 vColor;\n#elif defined( USE_COLOR ) || defined( USE_INSTANCING_COLOR ) || defined( USE_BATCHING_COLOR )\n\tvarying vec3 vColor;\n#endif"; var color_vertex = "#if defined( USE_COLOR_ALPHA )\n\tvColor = vec4( 1.0 );\n#elif defined( USE_COLOR ) || defined( USE_INSTANCING_COLOR ) || defined( USE_BATCHING_COLOR )\n\tvColor = vec3( 1.0 );\n#endif\n#ifdef USE_COLOR\n\tvColor *= color;\n#endif\n#ifdef USE_INSTANCING_COLOR\n\tvColor.xyz *= instanceColor.xyz;\n#endif\n#ifdef USE_BATCHING_COLOR\n\tvec3 batchingColor = getBatchingColor( getIndirectIndex( gl_DrawID ) );\n\tvColor.xyz *= batchingColor.xyz;\n#endif"; var common = "#define PI 3.141592653589793\n#define PI2 6.283185307179586\n#define PI_HALF 1.5707963267948966\n#define RECIPROCAL_PI 0.3183098861837907\n#define RECIPROCAL_PI2 0.15915494309189535\n#define EPSILON 1e-6\n#ifndef saturate\n#define saturate( a ) clamp( a, 0.0, 1.0 )\n#endif\n#define whiteComplement( a ) ( 1.0 - saturate( a ) )\nfloat pow2( const in float x ) { return x*x; }\nvec3 pow2( const in vec3 x ) { return x*x; }\nfloat pow3( const in float x ) { return x*x*x; }\nfloat pow4( const in float x ) { float x2 = x*x; return x2*x2; }\nfloat max3( const in vec3 v ) { return max( max( v.x, v.y ), v.z ); }\nfloat average( const in vec3 v ) { return dot( v, vec3( 0.3333333 ) ); }\nhighp float rand( const in vec2 uv ) {\n\tconst highp float a = 12.9898, b = 78.233, c = 43758.5453;\n\thighp float dt = dot( uv.xy, vec2( a,b ) ), sn = mod( dt, PI );\n\treturn fract( sin( sn ) * c );\n}\n#ifdef HIGH_PRECISION\n\tfloat precisionSafeLength( vec3 v ) { return length( v ); }\n#else\n\tfloat precisionSafeLength( vec3 v ) {\n\t\tfloat maxComponent = max3( abs( v ) );\n\t\treturn length( v / maxComponent ) * maxComponent;\n\t}\n#endif\nstruct IncidentLight {\n\tvec3 color;\n\tvec3 direction;\n\tbool visible;\n};\nstruct ReflectedLight {\n\tvec3 directDiffuse;\n\tvec3 directSpecular;\n\tvec3 indirectDiffuse;\n\tvec3 indirectSpecular;\n};\n#ifdef USE_ALPHAHASH\n\tvarying vec3 vPosition;\n#endif\nvec3 transformDirection( in vec3 dir, in mat4 matrix ) {\n\treturn normalize( ( matrix * vec4( dir, 0.0 ) ).xyz );\n}\nvec3 inverseTransformDirection( in vec3 dir, in mat4 matrix ) {\n\treturn normalize( ( vec4( dir, 0.0 ) * matrix ).xyz );\n}\nmat3 transposeMat3( const in mat3 m ) {\n\tmat3 tmp;\n\ttmp[ 0 ] = vec3( m[ 0 ].x, m[ 1 ].x, m[ 2 ].x );\n\ttmp[ 1 ] = vec3( m[ 0 ].y, m[ 1 ].y, m[ 2 ].y );\n\ttmp[ 2 ] = vec3( m[ 0 ].z, m[ 1 ].z, m[ 2 ].z );\n\treturn tmp;\n}\nbool isPerspectiveMatrix( mat4 m ) {\n\treturn m[ 2 ][ 3 ] == - 1.0;\n}\nvec2 equirectUv( in vec3 dir ) {\n\tfloat u = atan( dir.z, dir.x ) * RECIPROCAL_PI2 + 0.5;\n\tfloat v = asin( clamp( dir.y, - 1.0, 1.0 ) ) * RECIPROCAL_PI + 0.5;\n\treturn vec2( u, v );\n}\nvec3 BRDF_Lambert( const in vec3 diffuseColor ) {\n\treturn RECIPROCAL_PI * diffuseColor;\n}\nvec3 F_Schlick( const in vec3 f0, const in float f90, const in float dotVH ) {\n\tfloat fresnel = exp2( ( - 5.55473 * dotVH - 6.98316 ) * dotVH );\n\treturn f0 * ( 1.0 - fresnel ) + ( f90 * fresnel );\n}\nfloat F_Schlick( const in float f0, const in float f90, const in float dotVH ) {\n\tfloat fresnel = exp2( ( - 5.55473 * dotVH - 6.98316 ) * dotVH );\n\treturn f0 * ( 1.0 - fresnel ) + ( f90 * fresnel );\n} // validated"; var cube_uv_reflection_fragment = "#ifdef ENVMAP_TYPE_CUBE_UV\n\t#define cubeUV_minMipLevel 4.0\n\t#define cubeUV_minTileSize 16.0\n\tfloat getFace( vec3 direction ) {\n\t\tvec3 absDirection = abs( direction );\n\t\tfloat face = - 1.0;\n\t\tif ( absDirection.x > absDirection.z ) {\n\t\t\tif ( absDirection.x > absDirection.y )\n\t\t\t\tface = direction.x > 0.0 ? 0.0 : 3.0;\n\t\t\telse\n\t\t\t\tface = direction.y > 0.0 ? 1.0 : 4.0;\n\t\t} else {\n\t\t\tif ( absDirection.z > absDirection.y )\n\t\t\t\tface = direction.z > 0.0 ? 2.0 : 5.0;\n\t\t\telse\n\t\t\t\tface = direction.y > 0.0 ? 1.0 : 4.0;\n\t\t}\n\t\treturn face;\n\t}\n\tvec2 getUV( vec3 direction, float face ) {\n\t\tvec2 uv;\n\t\tif ( face == 0.0 ) {\n\t\t\tuv = vec2( direction.z, direction.y ) / abs( direction.x );\n\t\t} else if ( face == 1.0 ) {\n\t\t\tuv = vec2( - direction.x, - direction.z ) / abs( direction.y );\n\t\t} else if ( face == 2.0 ) {\n\t\t\tuv = vec2( - direction.x, direction.y ) / abs( direction.z );\n\t\t} else if ( face == 3.0 ) {\n\t\t\tuv = vec2( - direction.z, direction.y ) / abs( direction.x );\n\t\t} else if ( face == 4.0 ) {\n\t\t\tuv = vec2( - direction.x, direction.z ) / abs( direction.y );\n\t\t} else {\n\t\t\tuv = vec2( direction.x, direction.y ) / abs( direction.z );\n\t\t}\n\t\treturn 0.5 * ( uv + 1.0 );\n\t}\n\tvec3 bilinearCubeUV( sampler2D envMap, vec3 direction, float mipInt ) {\n\t\tfloat face = getFace( direction );\n\t\tfloat filterInt = max( cubeUV_minMipLevel - mipInt, 0.0 );\n\t\tmipInt = max( mipInt, cubeUV_minMipLevel );\n\t\tfloat faceSize = exp2( mipInt );\n\t\thighp vec2 uv = getUV( direction, face ) * ( faceSize - 2.0 ) + 1.0;\n\t\tif ( face > 2.0 ) {\n\t\t\tuv.y += faceSize;\n\t\t\tface -= 3.0;\n\t\t}\n\t\tuv.x += face * faceSize;\n\t\tuv.x += filterInt * 3.0 * cubeUV_minTileSize;\n\t\tuv.y += 4.0 * ( exp2( CUBEUV_MAX_MIP ) - faceSize );\n\t\tuv.x *= CUBEUV_TEXEL_WIDTH;\n\t\tuv.y *= CUBEUV_TEXEL_HEIGHT;\n\t\t#ifdef texture2DGradEXT\n\t\t\treturn texture2DGradEXT( envMap, uv, vec2( 0.0 ), vec2( 0.0 ) ).rgb;\n\t\t#else\n\t\t\treturn texture2D( envMap, uv ).rgb;\n\t\t#endif\n\t}\n\t#define cubeUV_r0 1.0\n\t#define cubeUV_m0 - 2.0\n\t#define cubeUV_r1 0.8\n\t#define cubeUV_m1 - 1.0\n\t#define cubeUV_r4 0.4\n\t#define cubeUV_m4 2.0\n\t#define cubeUV_r5 0.305\n\t#define cubeUV_m5 3.0\n\t#define cubeUV_r6 0.21\n\t#define cubeUV_m6 4.0\n\tfloat roughnessToMip( float roughness ) {\n\t\tfloat mip = 0.0;\n\t\tif ( roughness >= cubeUV_r1 ) {\n\t\t\tmip = ( cubeUV_r0 - roughness ) * ( cubeUV_m1 - cubeUV_m0 ) / ( cubeUV_r0 - cubeUV_r1 ) + cubeUV_m0;\n\t\t} else if ( roughness >= cubeUV_r4 ) {\n\t\t\tmip = ( cubeUV_r1 - roughness ) * ( cubeUV_m4 - cubeUV_m1 ) / ( cubeUV_r1 - cubeUV_r4 ) + cubeUV_m1;\n\t\t} else if ( roughness >= cubeUV_r5 ) {\n\t\t\tmip = ( cubeUV_r4 - roughness ) * ( cubeUV_m5 - cubeUV_m4 ) / ( cubeUV_r4 - cubeUV_r5 ) + cubeUV_m4;\n\t\t} else if ( roughness >= cubeUV_r6 ) {\n\t\t\tmip = ( cubeUV_r5 - roughness ) * ( cubeUV_m6 - cubeUV_m5 ) / ( cubeUV_r5 - cubeUV_r6 ) + cubeUV_m5;\n\t\t} else {\n\t\t\tmip = - 2.0 * log2( 1.16 * roughness );\t\t}\n\t\treturn mip;\n\t}\n\tvec4 textureCubeUV( sampler2D envMap, vec3 sampleDir, float roughness ) {\n\t\tfloat mip = clamp( roughnessToMip( roughness ), cubeUV_m0, CUBEUV_MAX_MIP );\n\t\tfloat mipF = fract( mip );\n\t\tfloat mipInt = floor( mip );\n\t\tvec3 color0 = bilinearCubeUV( envMap, sampleDir, mipInt );\n\t\tif ( mipF == 0.0 ) {\n\t\t\treturn vec4( color0, 1.0 );\n\t\t} else {\n\t\t\tvec3 color1 = bilinearCubeUV( envMap, sampleDir, mipInt + 1.0 );\n\t\t\treturn vec4( mix( color0, color1, mipF ), 1.0 );\n\t\t}\n\t}\n#endif"; var defaultnormal_vertex = "vec3 transformedNormal = objectNormal;\n#ifdef USE_TANGENT\n\tvec3 transformedTangent = objectTangent;\n#endif\n#ifdef USE_BATCHING\n\tmat3 bm = mat3( batchingMatrix );\n\ttransformedNormal /= vec3( dot( bm[ 0 ], bm[ 0 ] ), dot( bm[ 1 ], bm[ 1 ] ), dot( bm[ 2 ], bm[ 2 ] ) );\n\ttransformedNormal = bm * transformedNormal;\n\t#ifdef USE_TANGENT\n\t\ttransformedTangent = bm * transformedTangent;\n\t#endif\n#endif\n#ifdef USE_INSTANCING\n\tmat3 im = mat3( instanceMatrix );\n\ttransformedNormal /= vec3( dot( im[ 0 ], im[ 0 ] ), dot( im[ 1 ], im[ 1 ] ), dot( im[ 2 ], im[ 2 ] ) );\n\ttransformedNormal = im * transformedNormal;\n\t#ifdef USE_TANGENT\n\t\ttransformedTangent = im * transformedTangent;\n\t#endif\n#endif\ntransformedNormal = normalMatrix * transformedNormal;\n#ifdef FLIP_SIDED\n\ttransformedNormal = - transformedNormal;\n#endif\n#ifdef USE_TANGENT\n\ttransformedTangent = ( modelViewMatrix * vec4( transformedTangent, 0.0 ) ).xyz;\n\t#ifdef FLIP_SIDED\n\t\ttransformedTangent = - transformedTangent;\n\t#endif\n#endif"; var displacementmap_pars_vertex = "#ifdef USE_DISPLACEMENTMAP\n\tuniform sampler2D displacementMap;\n\tuniform float displacementScale;\n\tuniform float displacementBias;\n#endif"; var displacementmap_vertex = "#ifdef USE_DISPLACEMENTMAP\n\ttransformed += normalize( objectNormal ) * ( texture2D( displacementMap, vDisplacementMapUv ).x * displacementScale + displacementBias );\n#endif"; var emissivemap_fragment = "#ifdef USE_EMISSIVEMAP\n\tvec4 emissiveColor = texture2D( emissiveMap, vEmissiveMapUv );\n\t#ifdef DECODE_VIDEO_TEXTURE_EMISSIVE\n\t\temissiveColor = sRGBTransferEOTF( emissiveColor );\n\t#endif\n\ttotalEmissiveRadiance *= emissiveColor.rgb;\n#endif"; var emissivemap_pars_fragment = "#ifdef USE_EMISSIVEMAP\n\tuniform sampler2D emissiveMap;\n#endif"; var colorspace_fragment = "gl_FragColor = linearToOutputTexel( gl_FragColor );"; var colorspace_pars_fragment = "vec4 LinearTransferOETF( in vec4 value ) {\n\treturn value;\n}\nvec4 sRGBTransferEOTF( in vec4 value ) {\n\treturn vec4( mix( pow( value.rgb * 0.9478672986 + vec3( 0.0521327014 ), vec3( 2.4 ) ), value.rgb * 0.0773993808, vec3( lessThanEqual( value.rgb, vec3( 0.04045 ) ) ) ), value.a );\n}\nvec4 sRGBTransferOETF( in vec4 value ) {\n\treturn vec4( mix( pow( value.rgb, vec3( 0.41666 ) ) * 1.055 - vec3( 0.055 ), value.rgb * 12.92, vec3( lessThanEqual( value.rgb, vec3( 0.0031308 ) ) ) ), value.a );\n}"; var envmap_fragment = "#ifdef USE_ENVMAP\n\t#ifdef ENV_WORLDPOS\n\t\tvec3 cameraToFrag;\n\t\tif ( isOrthographic ) {\n\t\t\tcameraToFrag = normalize( vec3( - viewMatrix[ 0 ][ 2 ], - viewMatrix[ 1 ][ 2 ], - viewMatrix[ 2 ][ 2 ] ) );\n\t\t} else {\n\t\t\tcameraToFrag = normalize( vWorldPosition - cameraPosition );\n\t\t}\n\t\tvec3 worldNormal = inverseTransformDirection( normal, viewMatrix );\n\t\t#ifdef ENVMAP_MODE_REFLECTION\n\t\t\tvec3 reflectVec = reflect( cameraToFrag, worldNormal );\n\t\t#else\n\t\t\tvec3 reflectVec = refract( cameraToFrag, worldNormal, refractionRatio );\n\t\t#endif\n\t#else\n\t\tvec3 reflectVec = vReflect;\n\t#endif\n\t#ifdef ENVMAP_TYPE_CUBE\n\t\tvec4 envColor = textureCube( envMap, envMapRotation * vec3( flipEnvMap * reflectVec.x, reflectVec.yz ) );\n\t#else\n\t\tvec4 envColor = vec4( 0.0 );\n\t#endif\n\t#ifdef ENVMAP_BLENDING_MULTIPLY\n\t\toutgoingLight = mix( outgoingLight, outgoingLight * envColor.xyz, specularStrength * reflectivity );\n\t#elif defined( ENVMAP_BLENDING_MIX )\n\t\toutgoingLight = mix( outgoingLight, envColor.xyz, specularStrength * reflectivity );\n\t#elif defined( ENVMAP_BLENDING_ADD )\n\t\toutgoingLight += envColor.xyz * specularStrength * reflectivity;\n\t#endif\n#endif"; var envmap_common_pars_fragment = "#ifdef USE_ENVMAP\n\tuniform float envMapIntensity;\n\tuniform float flipEnvMap;\n\tuniform mat3 envMapRotation;\n\t#ifdef ENVMAP_TYPE_CUBE\n\t\tuniform samplerCube envMap;\n\t#else\n\t\tuniform sampler2D envMap;\n\t#endif\n\t\n#endif"; var envmap_pars_fragment = "#ifdef USE_ENVMAP\n\tuniform float reflectivity;\n\t#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG ) || defined( LAMBERT )\n\t\t#define ENV_WORLDPOS\n\t#endif\n\t#ifdef ENV_WORLDPOS\n\t\tvarying vec3 vWorldPosition;\n\t\tuniform float refractionRatio;\n\t#else\n\t\tvarying vec3 vReflect;\n\t#endif\n#endif"; var envmap_pars_vertex = "#ifdef USE_ENVMAP\n\t#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG ) || defined( LAMBERT )\n\t\t#define ENV_WORLDPOS\n\t#endif\n\t#ifdef ENV_WORLDPOS\n\t\t\n\t\tvarying vec3 vWorldPosition;\n\t#else\n\t\tvarying vec3 vReflect;\n\t\tuniform float refractionRatio;\n\t#endif\n#endif"; var envmap_vertex = "#ifdef USE_ENVMAP\n\t#ifdef ENV_WORLDPOS\n\t\tvWorldPosition = worldPosition.xyz;\n\t#else\n\t\tvec3 cameraToVertex;\n\t\tif ( isOrthographic ) {\n\t\t\tcameraToVertex = normalize( vec3( - viewMatrix[ 0 ][ 2 ], - viewMatrix[ 1 ][ 2 ], - viewMatrix[ 2 ][ 2 ] ) );\n\t\t} else {\n\t\t\tcameraToVertex = normalize( worldPosition.xyz - cameraPosition );\n\t\t}\n\t\tvec3 worldNormal = inverseTransformDirection( transformedNormal, viewMatrix );\n\t\t#ifdef ENVMAP_MODE_REFLECTION\n\t\t\tvReflect = reflect( cameraToVertex, worldNormal );\n\t\t#else\n\t\t\tvReflect = refract( cameraToVertex, worldNormal, refractionRatio );\n\t\t#endif\n\t#endif\n#endif"; var fog_vertex = "#ifdef USE_FOG\n\tvFogDepth = - mvPosition.z;\n#endif"; var fog_pars_vertex = "#ifdef USE_FOG\n\tvarying float vFogDepth;\n#endif"; var fog_fragment = "#ifdef USE_FOG\n\t#ifdef FOG_EXP2\n\t\tfloat fogFactor = 1.0 - exp( - fogDensity * fogDensity * vFogDepth * vFogDepth );\n\t#else\n\t\tfloat fogFactor = smoothstep( fogNear, fogFar, vFogDepth );\n\t#endif\n\tgl_FragColor.rgb = mix( gl_FragColor.rgb, fogColor, fogFactor );\n#endif"; var fog_pars_fragment = "#ifdef USE_FOG\n\tuniform vec3 fogColor;\n\tvarying float vFogDepth;\n\t#ifdef FOG_EXP2\n\t\tuniform float fogDensity;\n\t#else\n\t\tuniform float fogNear;\n\t\tuniform float fogFar;\n\t#endif\n#endif"; var gradientmap_pars_fragment = "#ifdef USE_GRADIENTMAP\n\tuniform sampler2D gradientMap;\n#endif\nvec3 getGradientIrradiance( vec3 normal, vec3 lightDirection ) {\n\tfloat dotNL = dot( normal, lightDirection );\n\tvec2 coord = vec2( dotNL * 0.5 + 0.5, 0.0 );\n\t#ifdef USE_GRADIENTMAP\n\t\treturn vec3( texture2D( gradientMap, coord ).r );\n\t#else\n\t\tvec2 fw = fwidth( coord ) * 0.5;\n\t\treturn mix( vec3( 0.7 ), vec3( 1.0 ), smoothstep( 0.7 - fw.x, 0.7 + fw.x, coord.x ) );\n\t#endif\n}"; var lightmap_pars_fragment = "#ifdef USE_LIGHTMAP\n\tuniform sampler2D lightMap;\n\tuniform float lightMapIntensity;\n#endif"; var lights_lambert_fragment = "LambertMaterial material;\nmaterial.diffuseColor = diffuseColor.rgb;\nmaterial.specularStrength = specularStrength;"; var lights_lambert_pars_fragment = "varying vec3 vViewPosition;\nstruct LambertMaterial {\n\tvec3 diffuseColor;\n\tfloat specularStrength;\n};\nvoid RE_Direct_Lambert( const in IncidentLight directLight, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in LambertMaterial material, inout ReflectedLight reflectedLight ) {\n\tfloat dotNL = saturate( dot( geometryNormal, directLight.direction ) );\n\tvec3 irradiance = dotNL * directLight.color;\n\treflectedLight.directDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );\n}\nvoid RE_IndirectDiffuse_Lambert( const in vec3 irradiance, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in LambertMaterial material, inout ReflectedLight reflectedLight ) {\n\treflectedLight.indirectDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );\n}\n#define RE_Direct\t\t\t\tRE_Direct_Lambert\n#define RE_IndirectDiffuse\t\tRE_IndirectDiffuse_Lambert"; var lights_pars_begin = "uniform bool receiveShadow;\nuniform vec3 ambientLightColor;\n#if defined( USE_LIGHT_PROBES )\n\tuniform vec3 lightProbe[ 9 ];\n#endif\nvec3 shGetIrradianceAt( in vec3 normal, in vec3 shCoefficients[ 9 ] ) {\n\tfloat x = normal.x, y = normal.y, z = normal.z;\n\tvec3 result = shCoefficients[ 0 ] * 0.886227;\n\tresult += shCoefficients[ 1 ] * 2.0 * 0.511664 * y;\n\tresult += shCoefficients[ 2 ] * 2.0 * 0.511664 * z;\n\tresult += shCoefficients[ 3 ] * 2.0 * 0.511664 * x;\n\tresult += shCoefficients[ 4 ] * 2.0 * 0.429043 * x * y;\n\tresult += shCoefficients[ 5 ] * 2.0 * 0.429043 * y * z;\n\tresult += shCoefficients[ 6 ] * ( 0.743125 * z * z - 0.247708 );\n\tresult += shCoefficients[ 7 ] * 2.0 * 0.429043 * x * z;\n\tresult += shCoefficients[ 8 ] * 0.429043 * ( x * x - y * y );\n\treturn result;\n}\nvec3 getLightProbeIrradiance( const in vec3 lightProbe[ 9 ], const in vec3 normal ) {\n\tvec3 worldNormal = inverseTransformDirection( normal, viewMatrix );\n\tvec3 irradiance = shGetIrradianceAt( worldNormal, lightProbe );\n\treturn irradiance;\n}\nvec3 getAmbientLightIrradiance( const in vec3 ambientLightColor ) {\n\tvec3 irradiance = ambientLightColor;\n\treturn irradiance;\n}\nfloat getDistanceAttenuation( const in float lightDistance, const in float cutoffDistance, const in float decayExponent ) {\n\tfloat distanceFalloff = 1.0 / max( pow( lightDistance, decayExponent ), 0.01 );\n\tif ( cutoffDistance > 0.0 ) {\n\t\tdistanceFalloff *= pow2( saturate( 1.0 - pow4( lightDistance / cutoffDistance ) ) );\n\t}\n\treturn distanceFalloff;\n}\nfloat getSpotAttenuation( const in float coneCosine, const in float penumbraCosine, const in float angleCosine ) {\n\treturn smoothstep( coneCosine, penumbraCosine, angleCosine );\n}\n#if NUM_DIR_LIGHTS > 0\n\tstruct DirectionalLight {\n\t\tvec3 direction;\n\t\tvec3 color;\n\t};\n\tuniform DirectionalLight directionalLights[ NUM_DIR_LIGHTS ];\n\tvoid getDirectionalLightInfo( const in DirectionalLight directionalLight, out IncidentLight light ) {\n\t\tlight.color = directionalLight.color;\n\t\tlight.direction = directionalLight.direction;\n\t\tlight.visible = true;\n\t}\n#endif\n#if NUM_POINT_LIGHTS > 0\n\tstruct PointLight {\n\t\tvec3 position;\n\t\tvec3 color;\n\t\tfloat distance;\n\t\tfloat decay;\n\t};\n\tuniform PointLight pointLights[ NUM_POINT_LIGHTS ];\n\tvoid getPointLightInfo( const in PointLight pointLight, const in vec3 geometryPosition, out IncidentLight light ) {\n\t\tvec3 lVector = pointLight.position - geometryPosition;\n\t\tlight.direction = normalize( lVector );\n\t\tfloat lightDistance = length( lVector );\n\t\tlight.color = pointLight.color;\n\t\tlight.color *= getDistanceAttenuation( lightDistance, pointLight.distance, pointLight.decay );\n\t\tlight.visible = ( light.color != vec3( 0.0 ) );\n\t}\n#endif\n#if NUM_SPOT_LIGHTS > 0\n\tstruct SpotLight {\n\t\tvec3 position;\n\t\tvec3 direction;\n\t\tvec3 color;\n\t\tfloat distance;\n\t\tfloat decay;\n\t\tfloat coneCos;\n\t\tfloat penumbraCos;\n\t};\n\tuniform SpotLight spotLights[ NUM_SPOT_LIGHTS ];\n\tvoid getSpotLightInfo( const in SpotLight spotLight, const in vec3 geometryPosition, out IncidentLight light ) {\n\t\tvec3 lVector = spotLight.position - geometryPosition;\n\t\tlight.direction = normalize( lVector );\n\t\tfloat angleCos = dot( light.direction, spotLight.direction );\n\t\tfloat spotAttenuation = getSpotAttenuation( spotLight.coneCos, spotLight.penumbraCos, angleCos );\n\t\tif ( spotAttenuation > 0.0 ) {\n\t\t\tfloat lightDistance = length( lVector );\n\t\t\tlight.color = spotLight.color * spotAttenuation;\n\t\t\tlight.color *= getDistanceAttenuation( lightDistance, spotLight.distance, spotLight.decay );\n\t\t\tlight.visible = ( light.color != vec3( 0.0 ) );\n\t\t} else {\n\t\t\tlight.color = vec3( 0.0 );\n\t\t\tlight.visible = false;\n\t\t}\n\t}\n#endif\n#if NUM_RECT_AREA_LIGHTS > 0\n\tstruct RectAreaLight {\n\t\tvec3 color;\n\t\tvec3 position;\n\t\tvec3 halfWidth;\n\t\tvec3 halfHeight;\n\t};\n\tuniform sampler2D ltc_1;\tuniform sampler2D ltc_2;\n\tuniform RectAreaLight rectAreaLights[ NUM_RECT_AREA_LIGHTS ];\n#endif\n#if NUM_HEMI_LIGHTS > 0\n\tstruct HemisphereLight {\n\t\tvec3 direction;\n\t\tvec3 skyColor;\n\t\tvec3 groundColor;\n\t};\n\tuniform HemisphereLight hemisphereLights[ NUM_HEMI_LIGHTS ];\n\tvec3 getHemisphereLightIrradiance( const in HemisphereLight hemiLight, const in vec3 normal ) {\n\t\tfloat dotNL = dot( normal, hemiLight.direction );\n\t\tfloat hemiDiffuseWeight = 0.5 * dotNL + 0.5;\n\t\tvec3 irradiance = mix( hemiLight.groundColor, hemiLight.skyColor, hemiDiffuseWeight );\n\t\treturn irradiance;\n\t}\n#endif"; var envmap_physical_pars_fragment = "#ifdef USE_ENVMAP\n\tvec3 getIBLIrradiance( const in vec3 normal ) {\n\t\t#ifdef ENVMAP_TYPE_CUBE_UV\n\t\t\tvec3 worldNormal = inverseTransformDirection( normal, viewMatrix );\n\t\t\tvec4 envMapColor = textureCubeUV( envMap, envMapRotation * worldNormal, 1.0 );\n\t\t\treturn PI * envMapColor.rgb * envMapIntensity;\n\t\t#else\n\t\t\treturn vec3( 0.0 );\n\t\t#endif\n\t}\n\tvec3 getIBLRadiance( const in vec3 viewDir, const in vec3 normal, const in float roughness ) {\n\t\t#ifdef ENVMAP_TYPE_CUBE_UV\n\t\t\tvec3 reflectVec = reflect( - viewDir, normal );\n\t\t\treflectVec = normalize( mix( reflectVec, normal, roughness * roughness) );\n\t\t\treflectVec = inverseTransformDirection( reflectVec, viewMatrix );\n\t\t\tvec4 envMapColor = textureCubeUV( envMap, envMapRotation * reflectVec, roughness );\n\t\t\treturn envMapColor.rgb * envMapIntensity;\n\t\t#else\n\t\t\treturn vec3( 0.0 );\n\t\t#endif\n\t}\n\t#ifdef USE_ANISOTROPY\n\t\tvec3 getIBLAnisotropyRadiance( const in vec3 viewDir, const in vec3 normal, const in float roughness, const in vec3 bitangent, const in float anisotropy ) {\n\t\t\t#ifdef ENVMAP_TYPE_CUBE_UV\n\t\t\t\tvec3 bentNormal = cross( bitangent, viewDir );\n\t\t\t\tbentNormal = normalize( cross( bentNormal, bitangent ) );\n\t\t\t\tbentNormal = normalize( mix( bentNormal, normal, pow2( pow2( 1.0 - anisotropy * ( 1.0 - roughness ) ) ) ) );\n\t\t\t\treturn getIBLRadiance( viewDir, bentNormal, roughness );\n\t\t\t#else\n\t\t\t\treturn vec3( 0.0 );\n\t\t\t#endif\n\t\t}\n\t#endif\n#endif"; var lights_toon_fragment = "ToonMaterial material;\nmaterial.diffuseColor = diffuseColor.rgb;"; var lights_toon_pars_fragment = "varying vec3 vViewPosition;\nstruct ToonMaterial {\n\tvec3 diffuseColor;\n};\nvoid RE_Direct_Toon( const in IncidentLight directLight, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in ToonMaterial material, inout ReflectedLight reflectedLight ) {\n\tvec3 irradiance = getGradientIrradiance( geometryNormal, directLight.direction ) * directLight.color;\n\treflectedLight.directDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );\n}\nvoid RE_IndirectDiffuse_Toon( const in vec3 irradiance, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in ToonMaterial material, inout ReflectedLight reflectedLight ) {\n\treflectedLight.indirectDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );\n}\n#define RE_Direct\t\t\t\tRE_Direct_Toon\n#define RE_IndirectDiffuse\t\tRE_IndirectDiffuse_Toon"; var lights_phong_fragment = "BlinnPhongMaterial material;\nmaterial.diffuseColor = diffuseColor.rgb;\nmaterial.specularColor = specular;\nmaterial.specularShininess = shininess;\nmaterial.specularStrength = specularStrength;"; var lights_phong_pars_fragment = "varying vec3 vViewPosition;\nstruct BlinnPhongMaterial {\n\tvec3 diffuseColor;\n\tvec3 specularColor;\n\tfloat specularShininess;\n\tfloat specularStrength;\n};\nvoid RE_Direct_BlinnPhong( const in IncidentLight directLight, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in BlinnPhongMaterial material, inout ReflectedLight reflectedLight ) {\n\tfloat dotNL = saturate( dot( geometryNormal, directLight.direction ) );\n\tvec3 irradiance = dotNL * directLight.color;\n\treflectedLight.directDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );\n\treflectedLight.directSpecular += irradiance * BRDF_BlinnPhong( directLight.direction, geometryViewDir, geometryNormal, material.specularColor, material.specularShininess ) * material.specularStrength;\n}\nvoid RE_IndirectDiffuse_BlinnPhong( const in vec3 irradiance, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in BlinnPhongMaterial material, inout ReflectedLight reflectedLight ) {\n\treflectedLight.indirectDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );\n}\n#define RE_Direct\t\t\t\tRE_Direct_BlinnPhong\n#define RE_IndirectDiffuse\t\tRE_IndirectDiffuse_BlinnPhong"; var lights_physical_fragment = "PhysicalMaterial material;\nmaterial.diffuseColor = diffuseColor.rgb * ( 1.0 - metalnessFactor );\nvec3 dxy = max( abs( dFdx( nonPerturbedNormal ) ), abs( dFdy( nonPerturbedNormal ) ) );\nfloat geometryRoughness = max( max( dxy.x, dxy.y ), dxy.z );\nmaterial.roughness = max( roughnessFactor, 0.0525 );material.roughness += geometryRoughness;\nmaterial.roughness = min( material.roughness, 1.0 );\n#ifdef IOR\n\tmaterial.ior = ior;\n\t#ifdef USE_SPECULAR\n\t\tfloat specularIntensityFactor = specularIntensity;\n\t\tvec3 specularColorFactor = specularColor;\n\t\t#ifdef USE_SPECULAR_COLORMAP\n\t\t\tspecularColorFactor *= texture2D( specularColorMap, vSpecularColorMapUv ).rgb;\n\t\t#endif\n\t\t#ifdef USE_SPECULAR_INTENSITYMAP\n\t\t\tspecularIntensityFactor *= texture2D( specularIntensityMap, vSpecularIntensityMapUv ).a;\n\t\t#endif\n\t\tmaterial.specularF90 = mix( specularIntensityFactor, 1.0, metalnessFactor );\n\t#else\n\t\tfloat specularIntensityFactor = 1.0;\n\t\tvec3 specularColorFactor = vec3( 1.0 );\n\t\tmaterial.specularF90 = 1.0;\n\t#endif\n\tmaterial.specularColor = mix( min( pow2( ( material.ior - 1.0 ) / ( material.ior + 1.0 ) ) * specularColorFactor, vec3( 1.0 ) ) * specularIntensityFactor, diffuseColor.rgb, metalnessFactor );\n#else\n\tmaterial.specularColor = mix( vec3( 0.04 ), diffuseColor.rgb, metalnessFactor );\n\tmaterial.specularF90 = 1.0;\n#endif\n#ifdef USE_CLEARCOAT\n\tmaterial.clearcoat = clearcoat;\n\tmaterial.clearcoatRoughness = clearcoatRoughness;\n\tmaterial.clearcoatF0 = vec3( 0.04 );\n\tmaterial.clearcoatF90 = 1.0;\n\t#ifdef USE_CLEARCOATMAP\n\t\tmaterial.clearcoat *= texture2D( clearcoatMap, vClearcoatMapUv ).x;\n\t#endif\n\t#ifdef USE_CLEARCOAT_ROUGHNESSMAP\n\t\tmaterial.clearcoatRoughness *= texture2D( clearcoatRoughnessMap, vClearcoatRoughnessMapUv ).y;\n\t#endif\n\tmaterial.clearcoat = saturate( material.clearcoat );\tmaterial.clearcoatRoughness = max( material.clearcoatRoughness, 0.0525 );\n\tmaterial.clearcoatRoughness += geometryRoughness;\n\tmaterial.clearcoatRoughness = min( material.clearcoatRoughness, 1.0 );\n#endif\n#ifdef USE_DISPERSION\n\tmaterial.dispersion = dispersion;\n#endif\n#ifdef USE_IRIDESCENCE\n\tmaterial.iridescence = iridescence;\n\tmaterial.iridescenceIOR = iridescenceIOR;\n\t#ifdef USE_IRIDESCENCEMAP\n\t\tmaterial.iridescence *= texture2D( iridescenceMap, vIridescenceMapUv ).r;\n\t#endif\n\t#ifdef USE_IRIDESCENCE_THICKNESSMAP\n\t\tmaterial.iridescenceThickness = (iridescenceThicknessMaximum - iridescenceThicknessMinimum) * texture2D( iridescenceThicknessMap, vIridescenceThicknessMapUv ).g + iridescenceThicknessMinimum;\n\t#else\n\t\tmaterial.iridescenceThickness = iridescenceThicknessMaximum;\n\t#endif\n#endif\n#ifdef USE_SHEEN\n\tmaterial.sheenColor = sheenColor;\n\t#ifdef USE_SHEEN_COLORMAP\n\t\tmaterial.sheenColor *= texture2D( sheenColorMap, vSheenColorMapUv ).rgb;\n\t#endif\n\tmaterial.sheenRoughness = clamp( sheenRoughness, 0.07, 1.0 );\n\t#ifdef USE_SHEEN_ROUGHNESSMAP\n\t\tmaterial.sheenRoughness *= texture2D( sheenRoughnessMap, vSheenRoughnessMapUv ).a;\n\t#endif\n#endif\n#ifdef USE_ANISOTROPY\n\t#ifdef USE_ANISOTROPYMAP\n\t\tmat2 anisotropyMat = mat2( anisotropyVector.x, anisotropyVector.y, - anisotropyVector.y, anisotropyVector.x );\n\t\tvec3 anisotropyPolar = texture2D( anisotropyMap, vAnisotropyMapUv ).rgb;\n\t\tvec2 anisotropyV = anisotropyMat * normalize( 2.0 * anisotropyPolar.rg - vec2( 1.0 ) ) * anisotropyPolar.b;\n\t#else\n\t\tvec2 anisotropyV = anisotropyVector;\n\t#endif\n\tmaterial.anisotropy = length( anisotropyV );\n\tif( material.anisotropy == 0.0 ) {\n\t\tanisotropyV = vec2( 1.0, 0.0 );\n\t} else {\n\t\tanisotropyV /= material.anisotropy;\n\t\tmaterial.anisotropy = saturate( material.anisotropy );\n\t}\n\tmaterial.alphaT = mix( pow2( material.roughness ), 1.0, pow2( material.anisotropy ) );\n\tmaterial.anisotropyT = tbn[ 0 ] * anisotropyV.x + tbn[ 1 ] * anisotropyV.y;\n\tmaterial.anisotropyB = tbn[ 1 ] * anisotropyV.x - tbn[ 0 ] * anisotropyV.y;\n#endif"; var lights_physical_pars_fragment = "struct PhysicalMaterial {\n\tvec3 diffuseColor;\n\tfloat roughness;\n\tvec3 specularColor;\n\tfloat specularF90;\n\tfloat dispersion;\n\t#ifdef USE_CLEARCOAT\n\t\tfloat clearcoat;\n\t\tfloat clearcoatRoughness;\n\t\tvec3 clearcoatF0;\n\t\tfloat clearcoatF90;\n\t#endif\n\t#ifdef USE_IRIDESCENCE\n\t\tfloat iridescence;\n\t\tfloat iridescenceIOR;\n\t\tfloat iridescenceThickness;\n\t\tvec3 iridescenceFresnel;\n\t\tvec3 iridescenceF0;\n\t#endif\n\t#ifdef USE_SHEEN\n\t\tvec3 sheenColor;\n\t\tfloat sheenRoughness;\n\t#endif\n\t#ifdef IOR\n\t\tfloat ior;\n\t#endif\n\t#ifdef USE_TRANSMISSION\n\t\tfloat transmission;\n\t\tfloat transmissionAlpha;\n\t\tfloat thickness;\n\t\tfloat attenuationDistance;\n\t\tvec3 attenuationColor;\n\t#endif\n\t#ifdef USE_ANISOTROPY\n\t\tfloat anisotropy;\n\t\tfloat alphaT;\n\t\tvec3 anisotropyT;\n\t\tvec3 anisotropyB;\n\t#endif\n};\nvec3 clearcoatSpecularDirect = vec3( 0.0 );\nvec3 clearcoatSpecularIndirect = vec3( 0.0 );\nvec3 sheenSpecularDirect = vec3( 0.0 );\nvec3 sheenSpecularIndirect = vec3(0.0 );\nvec3 Schlick_to_F0( const in vec3 f, const in float f90, const in float dotVH ) {\n float x = clamp( 1.0 - dotVH, 0.0, 1.0 );\n float x2 = x * x;\n float x5 = clamp( x * x2 * x2, 0.0, 0.9999 );\n return ( f - vec3( f90 ) * x5 ) / ( 1.0 - x5 );\n}\nfloat V_GGX_SmithCorrelated( const in float alpha, const in float dotNL, const in float dotNV ) {\n\tfloat a2 = pow2( alpha );\n\tfloat gv = dotNL * sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNV ) );\n\tfloat gl = dotNV * sqrt( a2 + ( 1.0 - a2 ) * pow2( dotNL ) );\n\treturn 0.5 / max( gv + gl, EPSILON );\n}\nfloat D_GGX( const in float alpha, const in float dotNH ) {\n\tfloat a2 = pow2( alpha );\n\tfloat denom = pow2( dotNH ) * ( a2 - 1.0 ) + 1.0;\n\treturn RECIPROCAL_PI * a2 / pow2( denom );\n}\n#ifdef USE_ANISOTROPY\n\tfloat V_GGX_SmithCorrelated_Anisotropic( const in float alphaT, const in float alphaB, const in float dotTV, const in float dotBV, const in float dotTL, const in float dotBL, const in float dotNV, const in float dotNL ) {\n\t\tfloat gv = dotNL * length( vec3( alphaT * dotTV, alphaB * dotBV, dotNV ) );\n\t\tfloat gl = dotNV * length( vec3( alphaT * dotTL, alphaB * dotBL, dotNL ) );\n\t\tfloat v = 0.5 / ( gv + gl );\n\t\treturn saturate(v);\n\t}\n\tfloat D_GGX_Anisotropic( const in float alphaT, const in float alphaB, const in float dotNH, const in float dotTH, const in float dotBH ) {\n\t\tfloat a2 = alphaT * alphaB;\n\t\thighp vec3 v = vec3( alphaB * dotTH, alphaT * dotBH, a2 * dotNH );\n\t\thighp float v2 = dot( v, v );\n\t\tfloat w2 = a2 / v2;\n\t\treturn RECIPROCAL_PI * a2 * pow2 ( w2 );\n\t}\n#endif\n#ifdef USE_CLEARCOAT\n\tvec3 BRDF_GGX_Clearcoat( const in vec3 lightDir, const in vec3 viewDir, const in vec3 normal, const in PhysicalMaterial material) {\n\t\tvec3 f0 = material.clearcoatF0;\n\t\tfloat f90 = material.clearcoatF90;\n\t\tfloat roughness = material.clearcoatRoughness;\n\t\tfloat alpha = pow2( roughness );\n\t\tvec3 halfDir = normalize( lightDir + viewDir );\n\t\tfloat dotNL = saturate( dot( normal, lightDir ) );\n\t\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\t\tfloat dotNH = saturate( dot( normal, halfDir ) );\n\t\tfloat dotVH = saturate( dot( viewDir, halfDir ) );\n\t\tvec3 F = F_Schlick( f0, f90, dotVH );\n\t\tfloat V = V_GGX_SmithCorrelated( alpha, dotNL, dotNV );\n\t\tfloat D = D_GGX( alpha, dotNH );\n\t\treturn F * ( V * D );\n\t}\n#endif\nvec3 BRDF_GGX( const in vec3 lightDir, const in vec3 viewDir, const in vec3 normal, const in PhysicalMaterial material ) {\n\tvec3 f0 = material.specularColor;\n\tfloat f90 = material.specularF90;\n\tfloat roughness = material.roughness;\n\tfloat alpha = pow2( roughness );\n\tvec3 halfDir = normalize( lightDir + viewDir );\n\tfloat dotNL = saturate( dot( normal, lightDir ) );\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tfloat dotNH = saturate( dot( normal, halfDir ) );\n\tfloat dotVH = saturate( dot( viewDir, halfDir ) );\n\tvec3 F = F_Schlick( f0, f90, dotVH );\n\t#ifdef USE_IRIDESCENCE\n\t\tF = mix( F, material.iridescenceFresnel, material.iridescence );\n\t#endif\n\t#ifdef USE_ANISOTROPY\n\t\tfloat dotTL = dot( material.anisotropyT, lightDir );\n\t\tfloat dotTV = dot( material.anisotropyT, viewDir );\n\t\tfloat dotTH = dot( material.anisotropyT, halfDir );\n\t\tfloat dotBL = dot( material.anisotropyB, lightDir );\n\t\tfloat dotBV = dot( material.anisotropyB, viewDir );\n\t\tfloat dotBH = dot( material.anisotropyB, halfDir );\n\t\tfloat V = V_GGX_SmithCorrelated_Anisotropic( material.alphaT, alpha, dotTV, dotBV, dotTL, dotBL, dotNV, dotNL );\n\t\tfloat D = D_GGX_Anisotropic( material.alphaT, alpha, dotNH, dotTH, dotBH );\n\t#else\n\t\tfloat V = V_GGX_SmithCorrelated( alpha, dotNL, dotNV );\n\t\tfloat D = D_GGX( alpha, dotNH );\n\t#endif\n\treturn F * ( V * D );\n}\nvec2 LTC_Uv( const in vec3 N, const in vec3 V, const in float roughness ) {\n\tconst float LUT_SIZE = 64.0;\n\tconst float LUT_SCALE = ( LUT_SIZE - 1.0 ) / LUT_SIZE;\n\tconst float LUT_BIAS = 0.5 / LUT_SIZE;\n\tfloat dotNV = saturate( dot( N, V ) );\n\tvec2 uv = vec2( roughness, sqrt( 1.0 - dotNV ) );\n\tuv = uv * LUT_SCALE + LUT_BIAS;\n\treturn uv;\n}\nfloat LTC_ClippedSphereFormFactor( const in vec3 f ) {\n\tfloat l = length( f );\n\treturn max( ( l * l + f.z ) / ( l + 1.0 ), 0.0 );\n}\nvec3 LTC_EdgeVectorFormFactor( const in vec3 v1, const in vec3 v2 ) {\n\tfloat x = dot( v1, v2 );\n\tfloat y = abs( x );\n\tfloat a = 0.8543985 + ( 0.4965155 + 0.0145206 * y ) * y;\n\tfloat b = 3.4175940 + ( 4.1616724 + y ) * y;\n\tfloat v = a / b;\n\tfloat theta_sintheta = ( x > 0.0 ) ? v : 0.5 * inversesqrt( max( 1.0 - x * x, 1e-7 ) ) - v;\n\treturn cross( v1, v2 ) * theta_sintheta;\n}\nvec3 LTC_Evaluate( const in vec3 N, const in vec3 V, const in vec3 P, const in mat3 mInv, const in vec3 rectCoords[ 4 ] ) {\n\tvec3 v1 = rectCoords[ 1 ] - rectCoords[ 0 ];\n\tvec3 v2 = rectCoords[ 3 ] - rectCoords[ 0 ];\n\tvec3 lightNormal = cross( v1, v2 );\n\tif( dot( lightNormal, P - rectCoords[ 0 ] ) < 0.0 ) return vec3( 0.0 );\n\tvec3 T1, T2;\n\tT1 = normalize( V - N * dot( V, N ) );\n\tT2 = - cross( N, T1 );\n\tmat3 mat = mInv * transposeMat3( mat3( T1, T2, N ) );\n\tvec3 coords[ 4 ];\n\tcoords[ 0 ] = mat * ( rectCoords[ 0 ] - P );\n\tcoords[ 1 ] = mat * ( rectCoords[ 1 ] - P );\n\tcoords[ 2 ] = mat * ( rectCoords[ 2 ] - P );\n\tcoords[ 3 ] = mat * ( rectCoords[ 3 ] - P );\n\tcoords[ 0 ] = normalize( coords[ 0 ] );\n\tcoords[ 1 ] = normalize( coords[ 1 ] );\n\tcoords[ 2 ] = normalize( coords[ 2 ] );\n\tcoords[ 3 ] = normalize( coords[ 3 ] );\n\tvec3 vectorFormFactor = vec3( 0.0 );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 0 ], coords[ 1 ] );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 1 ], coords[ 2 ] );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 2 ], coords[ 3 ] );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 3 ], coords[ 0 ] );\n\tfloat result = LTC_ClippedSphereFormFactor( vectorFormFactor );\n\treturn vec3( result );\n}\n#if defined( USE_SHEEN )\nfloat D_Charlie( float roughness, float dotNH ) {\n\tfloat alpha = pow2( roughness );\n\tfloat invAlpha = 1.0 / alpha;\n\tfloat cos2h = dotNH * dotNH;\n\tfloat sin2h = max( 1.0 - cos2h, 0.0078125 );\n\treturn ( 2.0 + invAlpha ) * pow( sin2h, invAlpha * 0.5 ) / ( 2.0 * PI );\n}\nfloat V_Neubelt( float dotNV, float dotNL ) {\n\treturn saturate( 1.0 / ( 4.0 * ( dotNL + dotNV - dotNL * dotNV ) ) );\n}\nvec3 BRDF_Sheen( const in vec3 lightDir, const in vec3 viewDir, const in vec3 normal, vec3 sheenColor, const in float sheenRoughness ) {\n\tvec3 halfDir = normalize( lightDir + viewDir );\n\tfloat dotNL = saturate( dot( normal, lightDir ) );\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tfloat dotNH = saturate( dot( normal, halfDir ) );\n\tfloat D = D_Charlie( sheenRoughness, dotNH );\n\tfloat V = V_Neubelt( dotNV, dotNL );\n\treturn sheenColor * ( D * V );\n}\n#endif\nfloat IBLSheenBRDF( const in vec3 normal, const in vec3 viewDir, const in float roughness ) {\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tfloat r2 = roughness * roughness;\n\tfloat a = roughness < 0.25 ? -339.2 * r2 + 161.4 * roughness - 25.9 : -8.48 * r2 + 14.3 * roughness - 9.95;\n\tfloat b = roughness < 0.25 ? 44.0 * r2 - 23.7 * roughness + 3.26 : 1.97 * r2 - 3.27 * roughness + 0.72;\n\tfloat DG = exp( a * dotNV + b ) + ( roughness < 0.25 ? 0.0 : 0.1 * ( roughness - 0.25 ) );\n\treturn saturate( DG * RECIPROCAL_PI );\n}\nvec2 DFGApprox( const in vec3 normal, const in vec3 viewDir, const in float roughness ) {\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tconst vec4 c0 = vec4( - 1, - 0.0275, - 0.572, 0.022 );\n\tconst vec4 c1 = vec4( 1, 0.0425, 1.04, - 0.04 );\n\tvec4 r = roughness * c0 + c1;\n\tfloat a004 = min( r.x * r.x, exp2( - 9.28 * dotNV ) ) * r.x + r.y;\n\tvec2 fab = vec2( - 1.04, 1.04 ) * a004 + r.zw;\n\treturn fab;\n}\nvec3 EnvironmentBRDF( const in vec3 normal, const in vec3 viewDir, const in vec3 specularColor, const in float specularF90, const in float roughness ) {\n\tvec2 fab = DFGApprox( normal, viewDir, roughness );\n\treturn specularColor * fab.x + specularF90 * fab.y;\n}\n#ifdef USE_IRIDESCENCE\nvoid computeMultiscatteringIridescence( const in vec3 normal, const in vec3 viewDir, const in vec3 specularColor, const in float specularF90, const in float iridescence, const in vec3 iridescenceF0, const in float roughness, inout vec3 singleScatter, inout vec3 multiScatter ) {\n#else\nvoid computeMultiscattering( const in vec3 normal, const in vec3 viewDir, const in vec3 specularColor, const in float specularF90, const in float roughness, inout vec3 singleScatter, inout vec3 multiScatter ) {\n#endif\n\tvec2 fab = DFGApprox( normal, viewDir, roughness );\n\t#ifdef USE_IRIDESCENCE\n\t\tvec3 Fr = mix( specularColor, iridescenceF0, iridescence );\n\t#else\n\t\tvec3 Fr = specularColor;\n\t#endif\n\tvec3 FssEss = Fr * fab.x + specularF90 * fab.y;\n\tfloat Ess = fab.x + fab.y;\n\tfloat Ems = 1.0 - Ess;\n\tvec3 Favg = Fr + ( 1.0 - Fr ) * 0.047619;\tvec3 Fms = FssEss * Favg / ( 1.0 - Ems * Favg );\n\tsingleScatter += FssEss;\n\tmultiScatter += Fms * Ems;\n}\n#if NUM_RECT_AREA_LIGHTS > 0\n\tvoid RE_Direct_RectArea_Physical( const in RectAreaLight rectAreaLight, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {\n\t\tvec3 normal = geometryNormal;\n\t\tvec3 viewDir = geometryViewDir;\n\t\tvec3 position = geometryPosition;\n\t\tvec3 lightPos = rectAreaLight.position;\n\t\tvec3 halfWidth = rectAreaLight.halfWidth;\n\t\tvec3 halfHeight = rectAreaLight.halfHeight;\n\t\tvec3 lightColor = rectAreaLight.color;\n\t\tfloat roughness = material.roughness;\n\t\tvec3 rectCoords[ 4 ];\n\t\trectCoords[ 0 ] = lightPos + halfWidth - halfHeight;\t\trectCoords[ 1 ] = lightPos - halfWidth - halfHeight;\n\t\trectCoords[ 2 ] = lightPos - halfWidth + halfHeight;\n\t\trectCoords[ 3 ] = lightPos + halfWidth + halfHeight;\n\t\tvec2 uv = LTC_Uv( normal, viewDir, roughness );\n\t\tvec4 t1 = texture2D( ltc_1, uv );\n\t\tvec4 t2 = texture2D( ltc_2, uv );\n\t\tmat3 mInv = mat3(\n\t\t\tvec3( t1.x, 0, t1.y ),\n\t\t\tvec3( 0, 1, 0 ),\n\t\t\tvec3( t1.z, 0, t1.w )\n\t\t);\n\t\tvec3 fresnel = ( material.specularColor * t2.x + ( vec3( 1.0 ) - material.specularColor ) * t2.y );\n\t\treflectedLight.directSpecular += lightColor * fresnel * LTC_Evaluate( normal, viewDir, position, mInv, rectCoords );\n\t\treflectedLight.directDiffuse += lightColor * material.diffuseColor * LTC_Evaluate( normal, viewDir, position, mat3( 1.0 ), rectCoords );\n\t}\n#endif\nvoid RE_Direct_Physical( const in IncidentLight directLight, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {\n\tfloat dotNL = saturate( dot( geometryNormal, directLight.direction ) );\n\tvec3 irradiance = dotNL * directLight.color;\n\t#ifdef USE_CLEARCOAT\n\t\tfloat dotNLcc = saturate( dot( geometryClearcoatNormal, directLight.direction ) );\n\t\tvec3 ccIrradiance = dotNLcc * directLight.color;\n\t\tclearcoatSpecularDirect += ccIrradiance * BRDF_GGX_Clearcoat( directLight.direction, geometryViewDir, geometryClearcoatNormal, material );\n\t#endif\n\t#ifdef USE_SHEEN\n\t\tsheenSpecularDirect += irradiance * BRDF_Sheen( directLight.direction, geometryViewDir, geometryNormal, material.sheenColor, material.sheenRoughness );\n\t#endif\n\treflectedLight.directSpecular += irradiance * BRDF_GGX( directLight.direction, geometryViewDir, geometryNormal, material );\n\treflectedLight.directDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );\n}\nvoid RE_IndirectDiffuse_Physical( const in vec3 irradiance, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {\n\treflectedLight.indirectDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );\n}\nvoid RE_IndirectSpecular_Physical( const in vec3 radiance, const in vec3 irradiance, const in vec3 clearcoatRadiance, const in vec3 geometryPosition, const in vec3 geometryNormal, const in vec3 geometryViewDir, const in vec3 geometryClearcoatNormal, const in PhysicalMaterial material, inout ReflectedLight reflectedLight) {\n\t#ifdef USE_CLEARCOAT\n\t\tclearcoatSpecularIndirect += clearcoatRadiance * EnvironmentBRDF( geometryClearcoatNormal, geometryViewDir, material.clearcoatF0, material.clearcoatF90, material.clearcoatRoughness );\n\t#endif\n\t#ifdef USE_SHEEN\n\t\tsheenSpecularIndirect += irradiance * material.sheenColor * IBLSheenBRDF( geometryNormal, geometryViewDir, material.sheenRoughness );\n\t#endif\n\tvec3 singleScattering = vec3( 0.0 );\n\tvec3 multiScattering = vec3( 0.0 );\n\tvec3 cosineWeightedIrradiance = irradiance * RECIPROCAL_PI;\n\t#ifdef USE_IRIDESCENCE\n\t\tcomputeMultiscatteringIridescence( geometryNormal, geometryViewDir, material.specularColor, material.specularF90, material.iridescence, material.iridescenceFresnel, material.roughness, singleScattering, multiScattering );\n\t#else\n\t\tcomputeMultiscattering( geometryNormal, geometryViewDir, material.specularColor, material.specularF90, material.roughness, singleScattering, multiScattering );\n\t#endif\n\tvec3 totalScattering = singleScattering + multiScattering;\n\tvec3 diffuse = material.diffuseColor * ( 1.0 - max( max( totalScattering.r, totalScattering.g ), totalScattering.b ) );\n\treflectedLight.indirectSpecular += radiance * singleScattering;\n\treflectedLight.indirectSpecular += multiScattering * cosineWeightedIrradiance;\n\treflectedLight.indirectDiffuse += diffuse * cosineWeightedIrradiance;\n}\n#define RE_Direct\t\t\t\tRE_Direct_Physical\n#define RE_Direct_RectArea\t\tRE_Direct_RectArea_Physical\n#define RE_IndirectDiffuse\t\tRE_IndirectDiffuse_Physical\n#define RE_IndirectSpecular\t\tRE_IndirectSpecular_Physical\nfloat computeSpecularOcclusion( const in float dotNV, const in float ambientOcclusion, const in float roughness ) {\n\treturn saturate( pow( dotNV + ambientOcclusion, exp2( - 16.0 * roughness - 1.0 ) ) - 1.0 + ambientOcclusion );\n}"; var lights_fragment_begin = "\nvec3 geometryPosition = - vViewPosition;\nvec3 geometryNormal = normal;\nvec3 geometryViewDir = ( isOrthographic ) ? vec3( 0, 0, 1 ) : normalize( vViewPosition );\nvec3 geometryClearcoatNormal = vec3( 0.0 );\n#ifdef USE_CLEARCOAT\n\tgeometryClearcoatNormal = clearcoatNormal;\n#endif\n#ifdef USE_IRIDESCENCE\n\tfloat dotNVi = saturate( dot( normal, geometryViewDir ) );\n\tif ( material.iridescenceThickness == 0.0 ) {\n\t\tmaterial.iridescence = 0.0;\n\t} else {\n\t\tmaterial.iridescence = saturate( material.iridescence );\n\t}\n\tif ( material.iridescence > 0.0 ) {\n\t\tmaterial.iridescenceFresnel = evalIridescence( 1.0, material.iridescenceIOR, dotNVi, material.iridescenceThickness, material.specularColor );\n\t\tmaterial.iridescenceF0 = Schlick_to_F0( material.iridescenceFresnel, 1.0, dotNVi );\n\t}\n#endif\nIncidentLight directLight;\n#if ( NUM_POINT_LIGHTS > 0 ) && defined( RE_Direct )\n\tPointLight pointLight;\n\t#if defined( USE_SHADOWMAP ) && NUM_POINT_LIGHT_SHADOWS > 0\n\tPointLightShadow pointLightShadow;\n\t#endif\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {\n\t\tpointLight = pointLights[ i ];\n\t\tgetPointLightInfo( pointLight, geometryPosition, directLight );\n\t\t#if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_POINT_LIGHT_SHADOWS )\n\t\tpointLightShadow = pointLightShadows[ i ];\n\t\tdirectLight.color *= ( directLight.visible && receiveShadow ) ? getPointShadow( pointShadowMap[ i ], pointLightShadow.shadowMapSize, pointLightShadow.shadowIntensity, pointLightShadow.shadowBias, pointLightShadow.shadowRadius, vPointShadowCoord[ i ], pointLightShadow.shadowCameraNear, pointLightShadow.shadowCameraFar ) : 1.0;\n\t\t#endif\n\t\tRE_Direct( directLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if ( NUM_SPOT_LIGHTS > 0 ) && defined( RE_Direct )\n\tSpotLight spotLight;\n\tvec4 spotColor;\n\tvec3 spotLightCoord;\n\tbool inSpotLightMap;\n\t#if defined( USE_SHADOWMAP ) && NUM_SPOT_LIGHT_SHADOWS > 0\n\tSpotLightShadow spotLightShadow;\n\t#endif\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {\n\t\tspotLight = spotLights[ i ];\n\t\tgetSpotLightInfo( spotLight, geometryPosition, directLight );\n\t\t#if ( UNROLLED_LOOP_INDEX < NUM_SPOT_LIGHT_SHADOWS_WITH_MAPS )\n\t\t#define SPOT_LIGHT_MAP_INDEX UNROLLED_LOOP_INDEX\n\t\t#elif ( UNROLLED_LOOP_INDEX < NUM_SPOT_LIGHT_SHADOWS )\n\t\t#define SPOT_LIGHT_MAP_INDEX NUM_SPOT_LIGHT_MAPS\n\t\t#else\n\t\t#define SPOT_LIGHT_MAP_INDEX ( UNROLLED_LOOP_INDEX - NUM_SPOT_LIGHT_SHADOWS + NUM_SPOT_LIGHT_SHADOWS_WITH_MAPS )\n\t\t#endif\n\t\t#if ( SPOT_LIGHT_MAP_INDEX < NUM_SPOT_LIGHT_MAPS )\n\t\t\tspotLightCoord = vSpotLightCoord[ i ].xyz / vSpotLightCoord[ i ].w;\n\t\t\tinSpotLightMap = all( lessThan( abs( spotLightCoord * 2. - 1. ), vec3( 1.0 ) ) );\n\t\t\tspotColor = texture2D( spotLightMap[ SPOT_LIGHT_MAP_INDEX ], spotLightCoord.xy );\n\t\t\tdirectLight.color = inSpotLightMap ? directLight.color * spotColor.rgb : directLight.color;\n\t\t#endif\n\t\t#undef SPOT_LIGHT_MAP_INDEX\n\t\t#if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_SPOT_LIGHT_SHADOWS )\n\t\tspotLightShadow = spotLightShadows[ i ];\n\t\tdirectLight.color *= ( directLight.visible && receiveShadow ) ? getShadow( spotShadowMap[ i ], spotLightShadow.shadowMapSize, spotLightShadow.shadowIntensity, spotLightShadow.shadowBias, spotLightShadow.shadowRadius, vSpotLightCoord[ i ] ) : 1.0;\n\t\t#endif\n\t\tRE_Direct( directLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if ( NUM_DIR_LIGHTS > 0 ) && defined( RE_Direct )\n\tDirectionalLight directionalLight;\n\t#if defined( USE_SHADOWMAP ) && NUM_DIR_LIGHT_SHADOWS > 0\n\tDirectionalLightShadow directionalLightShadow;\n\t#endif\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {\n\t\tdirectionalLight = directionalLights[ i ];\n\t\tgetDirectionalLightInfo( directionalLight, directLight );\n\t\t#if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_DIR_LIGHT_SHADOWS )\n\t\tdirectionalLightShadow = directionalLightShadows[ i ];\n\t\tdirectLight.color *= ( directLight.visible && receiveShadow ) ? getShadow( directionalShadowMap[ i ], directionalLightShadow.shadowMapSize, directionalLightShadow.shadowIntensity, directionalLightShadow.shadowBias, directionalLightShadow.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;\n\t\t#endif\n\t\tRE_Direct( directLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if ( NUM_RECT_AREA_LIGHTS > 0 ) && defined( RE_Direct_RectArea )\n\tRectAreaLight rectAreaLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_RECT_AREA_LIGHTS; i ++ ) {\n\t\trectAreaLight = rectAreaLights[ i ];\n\t\tRE_Direct_RectArea( rectAreaLight, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if defined( RE_IndirectDiffuse )\n\tvec3 iblIrradiance = vec3( 0.0 );\n\tvec3 irradiance = getAmbientLightIrradiance( ambientLightColor );\n\t#if defined( USE_LIGHT_PROBES )\n\t\tirradiance += getLightProbeIrradiance( lightProbe, geometryNormal );\n\t#endif\n\t#if ( NUM_HEMI_LIGHTS > 0 )\n\t\t#pragma unroll_loop_start\n\t\tfor ( int i = 0; i < NUM_HEMI_LIGHTS; i ++ ) {\n\t\t\tirradiance += getHemisphereLightIrradiance( hemisphereLights[ i ], geometryNormal );\n\t\t}\n\t\t#pragma unroll_loop_end\n\t#endif\n#endif\n#if defined( RE_IndirectSpecular )\n\tvec3 radiance = vec3( 0.0 );\n\tvec3 clearcoatRadiance = vec3( 0.0 );\n#endif"; var lights_fragment_maps = "#if defined( RE_IndirectDiffuse )\n\t#ifdef USE_LIGHTMAP\n\t\tvec4 lightMapTexel = texture2D( lightMap, vLightMapUv );\n\t\tvec3 lightMapIrradiance = lightMapTexel.rgb * lightMapIntensity;\n\t\tirradiance += lightMapIrradiance;\n\t#endif\n\t#if defined( USE_ENVMAP ) && defined( STANDARD ) && defined( ENVMAP_TYPE_CUBE_UV )\n\t\tiblIrradiance += getIBLIrradiance( geometryNormal );\n\t#endif\n#endif\n#if defined( USE_ENVMAP ) && defined( RE_IndirectSpecular )\n\t#ifdef USE_ANISOTROPY\n\t\tradiance += getIBLAnisotropyRadiance( geometryViewDir, geometryNormal, material.roughness, material.anisotropyB, material.anisotropy );\n\t#else\n\t\tradiance += getIBLRadiance( geometryViewDir, geometryNormal, material.roughness );\n\t#endif\n\t#ifdef USE_CLEARCOAT\n\t\tclearcoatRadiance += getIBLRadiance( geometryViewDir, geometryClearcoatNormal, material.clearcoatRoughness );\n\t#endif\n#endif"; var lights_fragment_end = "#if defined( RE_IndirectDiffuse )\n\tRE_IndirectDiffuse( irradiance, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );\n#endif\n#if defined( RE_IndirectSpecular )\n\tRE_IndirectSpecular( radiance, iblIrradiance, clearcoatRadiance, geometryPosition, geometryNormal, geometryViewDir, geometryClearcoatNormal, material, reflectedLight );\n#endif"; var logdepthbuf_fragment = "#if defined( USE_LOGDEPTHBUF )\n\tgl_FragDepth = vIsPerspective == 0.0 ? gl_FragCoord.z : log2( vFragDepth ) * logDepthBufFC * 0.5;\n#endif"; var logdepthbuf_pars_fragment = "#if defined( USE_LOGDEPTHBUF )\n\tuniform float logDepthBufFC;\n\tvarying float vFragDepth;\n\tvarying float vIsPerspective;\n#endif"; var logdepthbuf_pars_vertex = "#ifdef USE_LOGDEPTHBUF\n\tvarying float vFragDepth;\n\tvarying float vIsPerspective;\n#endif"; var logdepthbuf_vertex = "#ifdef USE_LOGDEPTHBUF\n\tvFragDepth = 1.0 + gl_Position.w;\n\tvIsPerspective = float( isPerspectiveMatrix( projectionMatrix ) );\n#endif"; var map_fragment = "#ifdef USE_MAP\n\tvec4 sampledDiffuseColor = texture2D( map, vMapUv );\n\t#ifdef DECODE_VIDEO_TEXTURE\n\t\tsampledDiffuseColor = sRGBTransferEOTF( sampledDiffuseColor );\n\t#endif\n\tdiffuseColor *= sampledDiffuseColor;\n#endif"; var map_pars_fragment = "#ifdef USE_MAP\n\tuniform sampler2D map;\n#endif"; var map_particle_fragment = "#if defined( USE_MAP ) || defined( USE_ALPHAMAP )\n\t#if defined( USE_POINTS_UV )\n\t\tvec2 uv = vUv;\n\t#else\n\t\tvec2 uv = ( uvTransform * vec3( gl_PointCoord.x, 1.0 - gl_PointCoord.y, 1 ) ).xy;\n\t#endif\n#endif\n#ifdef USE_MAP\n\tdiffuseColor *= texture2D( map, uv );\n#endif\n#ifdef USE_ALPHAMAP\n\tdiffuseColor.a *= texture2D( alphaMap, uv ).g;\n#endif"; var map_particle_pars_fragment = "#if defined( USE_POINTS_UV )\n\tvarying vec2 vUv;\n#else\n\t#if defined( USE_MAP ) || defined( USE_ALPHAMAP )\n\t\tuniform mat3 uvTransform;\n\t#endif\n#endif\n#ifdef USE_MAP\n\tuniform sampler2D map;\n#endif\n#ifdef USE_ALPHAMAP\n\tuniform sampler2D alphaMap;\n#endif"; var metalnessmap_fragment = "float metalnessFactor = metalness;\n#ifdef USE_METALNESSMAP\n\tvec4 texelMetalness = texture2D( metalnessMap, vMetalnessMapUv );\n\tmetalnessFactor *= texelMetalness.b;\n#endif"; var metalnessmap_pars_fragment = "#ifdef USE_METALNESSMAP\n\tuniform sampler2D metalnessMap;\n#endif"; var morphinstance_vertex = "#ifdef USE_INSTANCING_MORPH\n\tfloat morphTargetInfluences[ MORPHTARGETS_COUNT ];\n\tfloat morphTargetBaseInfluence = texelFetch( morphTexture, ivec2( 0, gl_InstanceID ), 0 ).r;\n\tfor ( int i = 0; i < MORPHTARGETS_COUNT; i ++ ) {\n\t\tmorphTargetInfluences[i] = texelFetch( morphTexture, ivec2( i + 1, gl_InstanceID ), 0 ).r;\n\t}\n#endif"; var morphcolor_vertex = "#if defined( USE_MORPHCOLORS )\n\tvColor *= morphTargetBaseInfluence;\n\tfor ( int i = 0; i < MORPHTARGETS_COUNT; i ++ ) {\n\t\t#if defined( USE_COLOR_ALPHA )\n\t\t\tif ( morphTargetInfluences[ i ] != 0.0 ) vColor += getMorph( gl_VertexID, i, 2 ) * morphTargetInfluences[ i ];\n\t\t#elif defined( USE_COLOR )\n\t\t\tif ( morphTargetInfluences[ i ] != 0.0 ) vColor += getMorph( gl_VertexID, i, 2 ).rgb * morphTargetInfluences[ i ];\n\t\t#endif\n\t}\n#endif"; var morphnormal_vertex = "#ifdef USE_MORPHNORMALS\n\tobjectNormal *= morphTargetBaseInfluence;\n\tfor ( int i = 0; i < MORPHTARGETS_COUNT; i ++ ) {\n\t\tif ( morphTargetInfluences[ i ] != 0.0 ) objectNormal += getMorph( gl_VertexID, i, 1 ).xyz * morphTargetInfluences[ i ];\n\t}\n#endif"; var morphtarget_pars_vertex = "#ifdef USE_MORPHTARGETS\n\t#ifndef USE_INSTANCING_MORPH\n\t\tuniform float morphTargetBaseInfluence;\n\t\tuniform float morphTargetInfluences[ MORPHTARGETS_COUNT ];\n\t#endif\n\tuniform sampler2DArray morphTargetsTexture;\n\tuniform ivec2 morphTargetsTextureSize;\n\tvec4 getMorph( const in int vertexIndex, const in int morphTargetIndex, const in int offset ) {\n\t\tint texelIndex = vertexIndex * MORPHTARGETS_TEXTURE_STRIDE + offset;\n\t\tint y = texelIndex / morphTargetsTextureSize.x;\n\t\tint x = texelIndex - y * morphTargetsTextureSize.x;\n\t\tivec3 morphUV = ivec3( x, y, morphTargetIndex );\n\t\treturn texelFetch( morphTargetsTexture, morphUV, 0 );\n\t}\n#endif"; var morphtarget_vertex = "#ifdef USE_MORPHTARGETS\n\ttransformed *= morphTargetBaseInfluence;\n\tfor ( int i = 0; i < MORPHTARGETS_COUNT; i ++ ) {\n\t\tif ( morphTargetInfluences[ i ] != 0.0 ) transformed += getMorph( gl_VertexID, i, 0 ).xyz * morphTargetInfluences[ i ];\n\t}\n#endif"; var normal_fragment_begin = "float faceDirection = gl_FrontFacing ? 1.0 : - 1.0;\n#ifdef FLAT_SHADED\n\tvec3 fdx = dFdx( vViewPosition );\n\tvec3 fdy = dFdy( vViewPosition );\n\tvec3 normal = normalize( cross( fdx, fdy ) );\n#else\n\tvec3 normal = normalize( vNormal );\n\t#ifdef DOUBLE_SIDED\n\t\tnormal *= faceDirection;\n\t#endif\n#endif\n#if defined( USE_NORMALMAP_TANGENTSPACE ) || defined( USE_CLEARCOAT_NORMALMAP ) || defined( USE_ANISOTROPY )\n\t#ifdef USE_TANGENT\n\t\tmat3 tbn = mat3( normalize( vTangent ), normalize( vBitangent ), normal );\n\t#else\n\t\tmat3 tbn = getTangentFrame( - vViewPosition, normal,\n\t\t#if defined( USE_NORMALMAP )\n\t\t\tvNormalMapUv\n\t\t#elif defined( USE_CLEARCOAT_NORMALMAP )\n\t\t\tvClearcoatNormalMapUv\n\t\t#else\n\t\t\tvUv\n\t\t#endif\n\t\t);\n\t#endif\n\t#if defined( DOUBLE_SIDED ) && ! defined( FLAT_SHADED )\n\t\ttbn[0] *= faceDirection;\n\t\ttbn[1] *= faceDirection;\n\t#endif\n#endif\n#ifdef USE_CLEARCOAT_NORMALMAP\n\t#ifdef USE_TANGENT\n\t\tmat3 tbn2 = mat3( normalize( vTangent ), normalize( vBitangent ), normal );\n\t#else\n\t\tmat3 tbn2 = getTangentFrame( - vViewPosition, normal, vClearcoatNormalMapUv );\n\t#endif\n\t#if defined( DOUBLE_SIDED ) && ! defined( FLAT_SHADED )\n\t\ttbn2[0] *= faceDirection;\n\t\ttbn2[1] *= faceDirection;\n\t#endif\n#endif\nvec3 nonPerturbedNormal = normal;"; var normal_fragment_maps = "#ifdef USE_NORMALMAP_OBJECTSPACE\n\tnormal = texture2D( normalMap, vNormalMapUv ).xyz * 2.0 - 1.0;\n\t#ifdef FLIP_SIDED\n\t\tnormal = - normal;\n\t#endif\n\t#ifdef DOUBLE_SIDED\n\t\tnormal = normal * faceDirection;\n\t#endif\n\tnormal = normalize( normalMatrix * normal );\n#elif defined( USE_NORMALMAP_TANGENTSPACE )\n\tvec3 mapN = texture2D( normalMap, vNormalMapUv ).xyz * 2.0 - 1.0;\n\tmapN.xy *= normalScale;\n\tnormal = normalize( tbn * mapN );\n#elif defined( USE_BUMPMAP )\n\tnormal = perturbNormalArb( - vViewPosition, normal, dHdxy_fwd(), faceDirection );\n#endif"; var normal_pars_fragment = "#ifndef FLAT_SHADED\n\tvarying vec3 vNormal;\n\t#ifdef USE_TANGENT\n\t\tvarying vec3 vTangent;\n\t\tvarying vec3 vBitangent;\n\t#endif\n#endif"; var normal_pars_vertex = "#ifndef FLAT_SHADED\n\tvarying vec3 vNormal;\n\t#ifdef USE_TANGENT\n\t\tvarying vec3 vTangent;\n\t\tvarying vec3 vBitangent;\n\t#endif\n#endif"; var normal_vertex = "#ifndef FLAT_SHADED\n\tvNormal = normalize( transformedNormal );\n\t#ifdef USE_TANGENT\n\t\tvTangent = normalize( transformedTangent );\n\t\tvBitangent = normalize( cross( vNormal, vTangent ) * tangent.w );\n\t#endif\n#endif"; var normalmap_pars_fragment = "#ifdef USE_NORMALMAP\n\tuniform sampler2D normalMap;\n\tuniform vec2 normalScale;\n#endif\n#ifdef USE_NORMALMAP_OBJECTSPACE\n\tuniform mat3 normalMatrix;\n#endif\n#if ! defined ( USE_TANGENT ) && ( defined ( USE_NORMALMAP_TANGENTSPACE ) || defined ( USE_CLEARCOAT_NORMALMAP ) || defined( USE_ANISOTROPY ) )\n\tmat3 getTangentFrame( vec3 eye_pos, vec3 surf_norm, vec2 uv ) {\n\t\tvec3 q0 = dFdx( eye_pos.xyz );\n\t\tvec3 q1 = dFdy( eye_pos.xyz );\n\t\tvec2 st0 = dFdx( uv.st );\n\t\tvec2 st1 = dFdy( uv.st );\n\t\tvec3 N = surf_norm;\n\t\tvec3 q1perp = cross( q1, N );\n\t\tvec3 q0perp = cross( N, q0 );\n\t\tvec3 T = q1perp * st0.x + q0perp * st1.x;\n\t\tvec3 B = q1perp * st0.y + q0perp * st1.y;\n\t\tfloat det = max( dot( T, T ), dot( B, B ) );\n\t\tfloat scale = ( det == 0.0 ) ? 0.0 : inversesqrt( det );\n\t\treturn mat3( T * scale, B * scale, N );\n\t}\n#endif"; var clearcoat_normal_fragment_begin = "#ifdef USE_CLEARCOAT\n\tvec3 clearcoatNormal = nonPerturbedNormal;\n#endif"; var clearcoat_normal_fragment_maps = "#ifdef USE_CLEARCOAT_NORMALMAP\n\tvec3 clearcoatMapN = texture2D( clearcoatNormalMap, vClearcoatNormalMapUv ).xyz * 2.0 - 1.0;\n\tclearcoatMapN.xy *= clearcoatNormalScale;\n\tclearcoatNormal = normalize( tbn2 * clearcoatMapN );\n#endif"; var clearcoat_pars_fragment = "#ifdef USE_CLEARCOATMAP\n\tuniform sampler2D clearcoatMap;\n#endif\n#ifdef USE_CLEARCOAT_NORMALMAP\n\tuniform sampler2D clearcoatNormalMap;\n\tuniform vec2 clearcoatNormalScale;\n#endif\n#ifdef USE_CLEARCOAT_ROUGHNESSMAP\n\tuniform sampler2D clearcoatRoughnessMap;\n#endif"; var iridescence_pars_fragment = "#ifdef USE_IRIDESCENCEMAP\n\tuniform sampler2D iridescenceMap;\n#endif\n#ifdef USE_IRIDESCENCE_THICKNESSMAP\n\tuniform sampler2D iridescenceThicknessMap;\n#endif"; var opaque_fragment = "#ifdef OPAQUE\ndiffuseColor.a = 1.0;\n#endif\n#ifdef USE_TRANSMISSION\ndiffuseColor.a *= material.transmissionAlpha;\n#endif\ngl_FragColor = vec4( outgoingLight, diffuseColor.a );"; var packing = "vec3 packNormalToRGB( const in vec3 normal ) {\n\treturn normalize( normal ) * 0.5 + 0.5;\n}\nvec3 unpackRGBToNormal( const in vec3 rgb ) {\n\treturn 2.0 * rgb.xyz - 1.0;\n}\nconst float PackUpscale = 256. / 255.;const float UnpackDownscale = 255. / 256.;const float ShiftRight8 = 1. / 256.;\nconst float Inv255 = 1. / 255.;\nconst vec4 PackFactors = vec4( 1.0, 256.0, 256.0 * 256.0, 256.0 * 256.0 * 256.0 );\nconst vec2 UnpackFactors2 = vec2( UnpackDownscale, 1.0 / PackFactors.g );\nconst vec3 UnpackFactors3 = vec3( UnpackDownscale / PackFactors.rg, 1.0 / PackFactors.b );\nconst vec4 UnpackFactors4 = vec4( UnpackDownscale / PackFactors.rgb, 1.0 / PackFactors.a );\nvec4 packDepthToRGBA( const in float v ) {\n\tif( v <= 0.0 )\n\t\treturn vec4( 0., 0., 0., 0. );\n\tif( v >= 1.0 )\n\t\treturn vec4( 1., 1., 1., 1. );\n\tfloat vuf;\n\tfloat af = modf( v * PackFactors.a, vuf );\n\tfloat bf = modf( vuf * ShiftRight8, vuf );\n\tfloat gf = modf( vuf * ShiftRight8, vuf );\n\treturn vec4( vuf * Inv255, gf * PackUpscale, bf * PackUpscale, af );\n}\nvec3 packDepthToRGB( const in float v ) {\n\tif( v <= 0.0 )\n\t\treturn vec3( 0., 0., 0. );\n\tif( v >= 1.0 )\n\t\treturn vec3( 1., 1., 1. );\n\tfloat vuf;\n\tfloat bf = modf( v * PackFactors.b, vuf );\n\tfloat gf = modf( vuf * ShiftRight8, vuf );\n\treturn vec3( vuf * Inv255, gf * PackUpscale, bf );\n}\nvec2 packDepthToRG( const in float v ) {\n\tif( v <= 0.0 )\n\t\treturn vec2( 0., 0. );\n\tif( v >= 1.0 )\n\t\treturn vec2( 1., 1. );\n\tfloat vuf;\n\tfloat gf = modf( v * 256., vuf );\n\treturn vec2( vuf * Inv255, gf );\n}\nfloat unpackRGBAToDepth( const in vec4 v ) {\n\treturn dot( v, UnpackFactors4 );\n}\nfloat unpackRGBToDepth( const in vec3 v ) {\n\treturn dot( v, UnpackFactors3 );\n}\nfloat unpackRGToDepth( const in vec2 v ) {\n\treturn v.r * UnpackFactors2.r + v.g * UnpackFactors2.g;\n}\nvec4 pack2HalfToRGBA( const in vec2 v ) {\n\tvec4 r = vec4( v.x, fract( v.x * 255.0 ), v.y, fract( v.y * 255.0 ) );\n\treturn vec4( r.x - r.y / 255.0, r.y, r.z - r.w / 255.0, r.w );\n}\nvec2 unpackRGBATo2Half( const in vec4 v ) {\n\treturn vec2( v.x + ( v.y / 255.0 ), v.z + ( v.w / 255.0 ) );\n}\nfloat viewZToOrthographicDepth( const in float viewZ, const in float near, const in float far ) {\n\treturn ( viewZ + near ) / ( near - far );\n}\nfloat orthographicDepthToViewZ( const in float depth, const in float near, const in float far ) {\n\treturn depth * ( near - far ) - near;\n}\nfloat viewZToPerspectiveDepth( const in float viewZ, const in float near, const in float far ) {\n\treturn ( ( near + viewZ ) * far ) / ( ( far - near ) * viewZ );\n}\nfloat perspectiveDepthToViewZ( const in float depth, const in float near, const in float far ) {\n\treturn ( near * far ) / ( ( far - near ) * depth - far );\n}"; var premultiplied_alpha_fragment = "#ifdef PREMULTIPLIED_ALPHA\n\tgl_FragColor.rgb *= gl_FragColor.a;\n#endif"; var project_vertex = "vec4 mvPosition = vec4( transformed, 1.0 );\n#ifdef USE_BATCHING\n\tmvPosition = batchingMatrix * mvPosition;\n#endif\n#ifdef USE_INSTANCING\n\tmvPosition = instanceMatrix * mvPosition;\n#endif\nmvPosition = modelViewMatrix * mvPosition;\ngl_Position = projectionMatrix * mvPosition;"; var dithering_fragment = "#ifdef DITHERING\n\tgl_FragColor.rgb = dithering( gl_FragColor.rgb );\n#endif"; var dithering_pars_fragment = "#ifdef DITHERING\n\tvec3 dithering( vec3 color ) {\n\t\tfloat grid_position = rand( gl_FragCoord.xy );\n\t\tvec3 dither_shift_RGB = vec3( 0.25 / 255.0, -0.25 / 255.0, 0.25 / 255.0 );\n\t\tdither_shift_RGB = mix( 2.0 * dither_shift_RGB, -2.0 * dither_shift_RGB, grid_position );\n\t\treturn color + dither_shift_RGB;\n\t}\n#endif"; var roughnessmap_fragment = "float roughnessFactor = roughness;\n#ifdef USE_ROUGHNESSMAP\n\tvec4 texelRoughness = texture2D( roughnessMap, vRoughnessMapUv );\n\troughnessFactor *= texelRoughness.g;\n#endif"; var roughnessmap_pars_fragment = "#ifdef USE_ROUGHNESSMAP\n\tuniform sampler2D roughnessMap;\n#endif"; var shadowmap_pars_fragment = "#if NUM_SPOT_LIGHT_COORDS > 0\n\tvarying vec4 vSpotLightCoord[ NUM_SPOT_LIGHT_COORDS ];\n#endif\n#if NUM_SPOT_LIGHT_MAPS > 0\n\tuniform sampler2D spotLightMap[ NUM_SPOT_LIGHT_MAPS ];\n#endif\n#ifdef USE_SHADOWMAP\n\t#if NUM_DIR_LIGHT_SHADOWS > 0\n\t\tuniform sampler2D directionalShadowMap[ NUM_DIR_LIGHT_SHADOWS ];\n\t\tvarying vec4 vDirectionalShadowCoord[ NUM_DIR_LIGHT_SHADOWS ];\n\t\tstruct DirectionalLightShadow {\n\t\t\tfloat shadowIntensity;\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t};\n\t\tuniform DirectionalLightShadow directionalLightShadows[ NUM_DIR_LIGHT_SHADOWS ];\n\t#endif\n\t#if NUM_SPOT_LIGHT_SHADOWS > 0\n\t\tuniform sampler2D spotShadowMap[ NUM_SPOT_LIGHT_SHADOWS ];\n\t\tstruct SpotLightShadow {\n\t\t\tfloat shadowIntensity;\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t};\n\t\tuniform SpotLightShadow spotLightShadows[ NUM_SPOT_LIGHT_SHADOWS ];\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0\n\t\tuniform sampler2D pointShadowMap[ NUM_POINT_LIGHT_SHADOWS ];\n\t\tvarying vec4 vPointShadowCoord[ NUM_POINT_LIGHT_SHADOWS ];\n\t\tstruct PointLightShadow {\n\t\t\tfloat shadowIntensity;\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t\tfloat shadowCameraNear;\n\t\t\tfloat shadowCameraFar;\n\t\t};\n\t\tuniform PointLightShadow pointLightShadows[ NUM_POINT_LIGHT_SHADOWS ];\n\t#endif\n\tfloat texture2DCompare( sampler2D depths, vec2 uv, float compare ) {\n\t\treturn step( compare, unpackRGBAToDepth( texture2D( depths, uv ) ) );\n\t}\n\tvec2 texture2DDistribution( sampler2D shadow, vec2 uv ) {\n\t\treturn unpackRGBATo2Half( texture2D( shadow, uv ) );\n\t}\n\tfloat VSMShadow (sampler2D shadow, vec2 uv, float compare ){\n\t\tfloat occlusion = 1.0;\n\t\tvec2 distribution = texture2DDistribution( shadow, uv );\n\t\tfloat hard_shadow = step( compare , distribution.x );\n\t\tif (hard_shadow != 1.0 ) {\n\t\t\tfloat distance = compare - distribution.x ;\n\t\t\tfloat variance = max( 0.00000, distribution.y * distribution.y );\n\t\t\tfloat softness_probability = variance / (variance + distance * distance );\t\t\tsoftness_probability = clamp( ( softness_probability - 0.3 ) / ( 0.95 - 0.3 ), 0.0, 1.0 );\t\t\tocclusion = clamp( max( hard_shadow, softness_probability ), 0.0, 1.0 );\n\t\t}\n\t\treturn occlusion;\n\t}\n\tfloat getShadow( sampler2D shadowMap, vec2 shadowMapSize, float shadowIntensity, float shadowBias, float shadowRadius, vec4 shadowCoord ) {\n\t\tfloat shadow = 1.0;\n\t\tshadowCoord.xyz /= shadowCoord.w;\n\t\tshadowCoord.z += shadowBias;\n\t\tbool inFrustum = shadowCoord.x >= 0.0 && shadowCoord.x <= 1.0 && shadowCoord.y >= 0.0 && shadowCoord.y <= 1.0;\n\t\tbool frustumTest = inFrustum && shadowCoord.z <= 1.0;\n\t\tif ( frustumTest ) {\n\t\t#if defined( SHADOWMAP_TYPE_PCF )\n\t\t\tvec2 texelSize = vec2( 1.0 ) / shadowMapSize;\n\t\t\tfloat dx0 = - texelSize.x * shadowRadius;\n\t\t\tfloat dy0 = - texelSize.y * shadowRadius;\n\t\t\tfloat dx1 = + texelSize.x * shadowRadius;\n\t\t\tfloat dy1 = + texelSize.y * shadowRadius;\n\t\t\tfloat dx2 = dx0 / 2.0;\n\t\t\tfloat dy2 = dy0 / 2.0;\n\t\t\tfloat dx3 = dx1 / 2.0;\n\t\t\tfloat dy3 = dy1 / 2.0;\n\t\t\tshadow = (\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, dy0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, dy0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx2, dy2 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy2 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx3, dy2 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx2, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy, shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx3, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx2, dy3 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy3 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx3, dy3 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, dy1 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy1 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, dy1 ), shadowCoord.z )\n\t\t\t) * ( 1.0 / 17.0 );\n\t\t#elif defined( SHADOWMAP_TYPE_PCF_SOFT )\n\t\t\tvec2 texelSize = vec2( 1.0 ) / shadowMapSize;\n\t\t\tfloat dx = texelSize.x;\n\t\t\tfloat dy = texelSize.y;\n\t\t\tvec2 uv = shadowCoord.xy;\n\t\t\tvec2 f = fract( uv * shadowMapSize + 0.5 );\n\t\t\tuv -= f * texelSize;\n\t\t\tshadow = (\n\t\t\t\ttexture2DCompare( shadowMap, uv, shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, uv + vec2( dx, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, uv + vec2( 0.0, dy ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, uv + texelSize, shadowCoord.z ) +\n\t\t\t\tmix( texture2DCompare( shadowMap, uv + vec2( -dx, 0.0 ), shadowCoord.z ),\n\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( 2.0 * dx, 0.0 ), shadowCoord.z ),\n\t\t\t\t\t f.x ) +\n\t\t\t\tmix( texture2DCompare( shadowMap, uv + vec2( -dx, dy ), shadowCoord.z ),\n\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( 2.0 * dx, dy ), shadowCoord.z ),\n\t\t\t\t\t f.x ) +\n\t\t\t\tmix( texture2DCompare( shadowMap, uv + vec2( 0.0, -dy ), shadowCoord.z ),\n\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( 0.0, 2.0 * dy ), shadowCoord.z ),\n\t\t\t\t\t f.y ) +\n\t\t\t\tmix( texture2DCompare( shadowMap, uv + vec2( dx, -dy ), shadowCoord.z ),\n\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( dx, 2.0 * dy ), shadowCoord.z ),\n\t\t\t\t\t f.y ) +\n\t\t\t\tmix( mix( texture2DCompare( shadowMap, uv + vec2( -dx, -dy ), shadowCoord.z ),\n\t\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( 2.0 * dx, -dy ), shadowCoord.z ),\n\t\t\t\t\t\t f.x ),\n\t\t\t\t\t mix( texture2DCompare( shadowMap, uv + vec2( -dx, 2.0 * dy ), shadowCoord.z ),\n\t\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( 2.0 * dx, 2.0 * dy ), shadowCoord.z ),\n\t\t\t\t\t\t f.x ),\n\t\t\t\t\t f.y )\n\t\t\t) * ( 1.0 / 9.0 );\n\t\t#elif defined( SHADOWMAP_TYPE_VSM )\n\t\t\tshadow = VSMShadow( shadowMap, shadowCoord.xy, shadowCoord.z );\n\t\t#else\n\t\t\tshadow = texture2DCompare( shadowMap, shadowCoord.xy, shadowCoord.z );\n\t\t#endif\n\t\t}\n\t\treturn mix( 1.0, shadow, shadowIntensity );\n\t}\n\tvec2 cubeToUV( vec3 v, float texelSizeY ) {\n\t\tvec3 absV = abs( v );\n\t\tfloat scaleToCube = 1.0 / max( absV.x, max( absV.y, absV.z ) );\n\t\tabsV *= scaleToCube;\n\t\tv *= scaleToCube * ( 1.0 - 2.0 * texelSizeY );\n\t\tvec2 planar = v.xy;\n\t\tfloat almostATexel = 1.5 * texelSizeY;\n\t\tfloat almostOne = 1.0 - almostATexel;\n\t\tif ( absV.z >= almostOne ) {\n\t\t\tif ( v.z > 0.0 )\n\t\t\t\tplanar.x = 4.0 - v.x;\n\t\t} else if ( absV.x >= almostOne ) {\n\t\t\tfloat signX = sign( v.x );\n\t\t\tplanar.x = v.z * signX + 2.0 * signX;\n\t\t} else if ( absV.y >= almostOne ) {\n\t\t\tfloat signY = sign( v.y );\n\t\t\tplanar.x = v.x + 2.0 * signY + 2.0;\n\t\t\tplanar.y = v.z * signY - 2.0;\n\t\t}\n\t\treturn vec2( 0.125, 0.25 ) * planar + vec2( 0.375, 0.75 );\n\t}\n\tfloat getPointShadow( sampler2D shadowMap, vec2 shadowMapSize, float shadowIntensity, float shadowBias, float shadowRadius, vec4 shadowCoord, float shadowCameraNear, float shadowCameraFar ) {\n\t\tfloat shadow = 1.0;\n\t\tvec3 lightToPosition = shadowCoord.xyz;\n\t\t\n\t\tfloat lightToPositionLength = length( lightToPosition );\n\t\tif ( lightToPositionLength - shadowCameraFar <= 0.0 && lightToPositionLength - shadowCameraNear >= 0.0 ) {\n\t\t\tfloat dp = ( lightToPositionLength - shadowCameraNear ) / ( shadowCameraFar - shadowCameraNear );\t\t\tdp += shadowBias;\n\t\t\tvec3 bd3D = normalize( lightToPosition );\n\t\t\tvec2 texelSize = vec2( 1.0 ) / ( shadowMapSize * vec2( 4.0, 2.0 ) );\n\t\t\t#if defined( SHADOWMAP_TYPE_PCF ) || defined( SHADOWMAP_TYPE_PCF_SOFT ) || defined( SHADOWMAP_TYPE_VSM )\n\t\t\t\tvec2 offset = vec2( - 1, 1 ) * shadowRadius * texelSize.y;\n\t\t\t\tshadow = (\n\t\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.xyy, texelSize.y ), dp ) +\n\t\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.yyy, texelSize.y ), dp ) +\n\t\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.xyx, texelSize.y ), dp ) +\n\t\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.yyx, texelSize.y ), dp ) +\n\t\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D, texelSize.y ), dp ) +\n\t\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.xxy, texelSize.y ), dp ) +\n\t\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.yxy, texelSize.y ), dp ) +\n\t\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.xxx, texelSize.y ), dp ) +\n\t\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.yxx, texelSize.y ), dp )\n\t\t\t\t) * ( 1.0 / 9.0 );\n\t\t\t#else\n\t\t\t\tshadow = texture2DCompare( shadowMap, cubeToUV( bd3D, texelSize.y ), dp );\n\t\t\t#endif\n\t\t}\n\t\treturn mix( 1.0, shadow, shadowIntensity );\n\t}\n#endif"; var shadowmap_pars_vertex = "#if NUM_SPOT_LIGHT_COORDS > 0\n\tuniform mat4 spotLightMatrix[ NUM_SPOT_LIGHT_COORDS ];\n\tvarying vec4 vSpotLightCoord[ NUM_SPOT_LIGHT_COORDS ];\n#endif\n#ifdef USE_SHADOWMAP\n\t#if NUM_DIR_LIGHT_SHADOWS > 0\n\t\tuniform mat4 directionalShadowMatrix[ NUM_DIR_LIGHT_SHADOWS ];\n\t\tvarying vec4 vDirectionalShadowCoord[ NUM_DIR_LIGHT_SHADOWS ];\n\t\tstruct DirectionalLightShadow {\n\t\t\tfloat shadowIntensity;\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t};\n\t\tuniform DirectionalLightShadow directionalLightShadows[ NUM_DIR_LIGHT_SHADOWS ];\n\t#endif\n\t#if NUM_SPOT_LIGHT_SHADOWS > 0\n\t\tstruct SpotLightShadow {\n\t\t\tfloat shadowIntensity;\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t};\n\t\tuniform SpotLightShadow spotLightShadows[ NUM_SPOT_LIGHT_SHADOWS ];\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0\n\t\tuniform mat4 pointShadowMatrix[ NUM_POINT_LIGHT_SHADOWS ];\n\t\tvarying vec4 vPointShadowCoord[ NUM_POINT_LIGHT_SHADOWS ];\n\t\tstruct PointLightShadow {\n\t\t\tfloat shadowIntensity;\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t\tfloat shadowCameraNear;\n\t\t\tfloat shadowCameraFar;\n\t\t};\n\t\tuniform PointLightShadow pointLightShadows[ NUM_POINT_LIGHT_SHADOWS ];\n\t#endif\n#endif"; var shadowmap_vertex = "#if ( defined( USE_SHADOWMAP ) && ( NUM_DIR_LIGHT_SHADOWS > 0 || NUM_POINT_LIGHT_SHADOWS > 0 ) ) || ( NUM_SPOT_LIGHT_COORDS > 0 )\n\tvec3 shadowWorldNormal = inverseTransformDirection( transformedNormal, viewMatrix );\n\tvec4 shadowWorldPosition;\n#endif\n#if defined( USE_SHADOWMAP )\n\t#if NUM_DIR_LIGHT_SHADOWS > 0\n\t\t#pragma unroll_loop_start\n\t\tfor ( int i = 0; i < NUM_DIR_LIGHT_SHADOWS; i ++ ) {\n\t\t\tshadowWorldPosition = worldPosition + vec4( shadowWorldNormal * directionalLightShadows[ i ].shadowNormalBias, 0 );\n\t\t\tvDirectionalShadowCoord[ i ] = directionalShadowMatrix[ i ] * shadowWorldPosition;\n\t\t}\n\t\t#pragma unroll_loop_end\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0\n\t\t#pragma unroll_loop_start\n\t\tfor ( int i = 0; i < NUM_POINT_LIGHT_SHADOWS; i ++ ) {\n\t\t\tshadowWorldPosition = worldPosition + vec4( shadowWorldNormal * pointLightShadows[ i ].shadowNormalBias, 0 );\n\t\t\tvPointShadowCoord[ i ] = pointShadowMatrix[ i ] * shadowWorldPosition;\n\t\t}\n\t\t#pragma unroll_loop_end\n\t#endif\n#endif\n#if NUM_SPOT_LIGHT_COORDS > 0\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_SPOT_LIGHT_COORDS; i ++ ) {\n\t\tshadowWorldPosition = worldPosition;\n\t\t#if ( defined( USE_SHADOWMAP ) && UNROLLED_LOOP_INDEX < NUM_SPOT_LIGHT_SHADOWS )\n\t\t\tshadowWorldPosition.xyz += shadowWorldNormal * spotLightShadows[ i ].shadowNormalBias;\n\t\t#endif\n\t\tvSpotLightCoord[ i ] = spotLightMatrix[ i ] * shadowWorldPosition;\n\t}\n\t#pragma unroll_loop_end\n#endif"; var shadowmask_pars_fragment = "float getShadowMask() {\n\tfloat shadow = 1.0;\n\t#ifdef USE_SHADOWMAP\n\t#if NUM_DIR_LIGHT_SHADOWS > 0\n\tDirectionalLightShadow directionalLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_DIR_LIGHT_SHADOWS; i ++ ) {\n\t\tdirectionalLight = directionalLightShadows[ i ];\n\t\tshadow *= receiveShadow ? getShadow( directionalShadowMap[ i ], directionalLight.shadowMapSize, directionalLight.shadowIntensity, directionalLight.shadowBias, directionalLight.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#if NUM_SPOT_LIGHT_SHADOWS > 0\n\tSpotLightShadow spotLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_SPOT_LIGHT_SHADOWS; i ++ ) {\n\t\tspotLight = spotLightShadows[ i ];\n\t\tshadow *= receiveShadow ? getShadow( spotShadowMap[ i ], spotLight.shadowMapSize, spotLight.shadowIntensity, spotLight.shadowBias, spotLight.shadowRadius, vSpotLightCoord[ i ] ) : 1.0;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0\n\tPointLightShadow pointLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_POINT_LIGHT_SHADOWS; i ++ ) {\n\t\tpointLight = pointLightShadows[ i ];\n\t\tshadow *= receiveShadow ? getPointShadow( pointShadowMap[ i ], pointLight.shadowMapSize, pointLight.shadowIntensity, pointLight.shadowBias, pointLight.shadowRadius, vPointShadowCoord[ i ], pointLight.shadowCameraNear, pointLight.shadowCameraFar ) : 1.0;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#endif\n\treturn shadow;\n}"; var skinbase_vertex = "#ifdef USE_SKINNING\n\tmat4 boneMatX = getBoneMatrix( skinIndex.x );\n\tmat4 boneMatY = getBoneMatrix( skinIndex.y );\n\tmat4 boneMatZ = getBoneMatrix( skinIndex.z );\n\tmat4 boneMatW = getBoneMatrix( skinIndex.w );\n#endif"; var skinning_pars_vertex = "#ifdef USE_SKINNING\n\tuniform mat4 bindMatrix;\n\tuniform mat4 bindMatrixInverse;\n\tuniform highp sampler2D boneTexture;\n\tmat4 getBoneMatrix( const in float i ) {\n\t\tint size = textureSize( boneTexture, 0 ).x;\n\t\tint j = int( i ) * 4;\n\t\tint x = j % size;\n\t\tint y = j / size;\n\t\tvec4 v1 = texelFetch( boneTexture, ivec2( x, y ), 0 );\n\t\tvec4 v2 = texelFetch( boneTexture, ivec2( x + 1, y ), 0 );\n\t\tvec4 v3 = texelFetch( boneTexture, ivec2( x + 2, y ), 0 );\n\t\tvec4 v4 = texelFetch( boneTexture, ivec2( x + 3, y ), 0 );\n\t\treturn mat4( v1, v2, v3, v4 );\n\t}\n#endif"; var skinning_vertex = "#ifdef USE_SKINNING\n\tvec4 skinVertex = bindMatrix * vec4( transformed, 1.0 );\n\tvec4 skinned = vec4( 0.0 );\n\tskinned += boneMatX * skinVertex * skinWeight.x;\n\tskinned += boneMatY * skinVertex * skinWeight.y;\n\tskinned += boneMatZ * skinVertex * skinWeight.z;\n\tskinned += boneMatW * skinVertex * skinWeight.w;\n\ttransformed = ( bindMatrixInverse * skinned ).xyz;\n#endif"; var skinnormal_vertex = "#ifdef USE_SKINNING\n\tmat4 skinMatrix = mat4( 0.0 );\n\tskinMatrix += skinWeight.x * boneMatX;\n\tskinMatrix += skinWeight.y * boneMatY;\n\tskinMatrix += skinWeight.z * boneMatZ;\n\tskinMatrix += skinWeight.w * boneMatW;\n\tskinMatrix = bindMatrixInverse * skinMatrix * bindMatrix;\n\tobjectNormal = vec4( skinMatrix * vec4( objectNormal, 0.0 ) ).xyz;\n\t#ifdef USE_TANGENT\n\t\tobjectTangent = vec4( skinMatrix * vec4( objectTangent, 0.0 ) ).xyz;\n\t#endif\n#endif"; var specularmap_fragment = "float specularStrength;\n#ifdef USE_SPECULARMAP\n\tvec4 texelSpecular = texture2D( specularMap, vSpecularMapUv );\n\tspecularStrength = texelSpecular.r;\n#else\n\tspecularStrength = 1.0;\n#endif"; var specularmap_pars_fragment = "#ifdef USE_SPECULARMAP\n\tuniform sampler2D specularMap;\n#endif"; var tonemapping_fragment = "#if defined( TONE_MAPPING )\n\tgl_FragColor.rgb = toneMapping( gl_FragColor.rgb );\n#endif"; var tonemapping_pars_fragment = "#ifndef saturate\n#define saturate( a ) clamp( a, 0.0, 1.0 )\n#endif\nuniform float toneMappingExposure;\nvec3 LinearToneMapping( vec3 color ) {\n\treturn saturate( toneMappingExposure * color );\n}\nvec3 ReinhardToneMapping( vec3 color ) {\n\tcolor *= toneMappingExposure;\n\treturn saturate( color / ( vec3( 1.0 ) + color ) );\n}\nvec3 CineonToneMapping( vec3 color ) {\n\tcolor *= toneMappingExposure;\n\tcolor = max( vec3( 0.0 ), color - 0.004 );\n\treturn pow( ( color * ( 6.2 * color + 0.5 ) ) / ( color * ( 6.2 * color + 1.7 ) + 0.06 ), vec3( 2.2 ) );\n}\nvec3 RRTAndODTFit( vec3 v ) {\n\tvec3 a = v * ( v + 0.0245786 ) - 0.000090537;\n\tvec3 b = v * ( 0.983729 * v + 0.4329510 ) + 0.238081;\n\treturn a / b;\n}\nvec3 ACESFilmicToneMapping( vec3 color ) {\n\tconst mat3 ACESInputMat = mat3(\n\t\tvec3( 0.59719, 0.07600, 0.02840 ),\t\tvec3( 0.35458, 0.90834, 0.13383 ),\n\t\tvec3( 0.04823, 0.01566, 0.83777 )\n\t);\n\tconst mat3 ACESOutputMat = mat3(\n\t\tvec3( 1.60475, -0.10208, -0.00327 ),\t\tvec3( -0.53108, 1.10813, -0.07276 ),\n\t\tvec3( -0.07367, -0.00605, 1.07602 )\n\t);\n\tcolor *= toneMappingExposure / 0.6;\n\tcolor = ACESInputMat * color;\n\tcolor = RRTAndODTFit( color );\n\tcolor = ACESOutputMat * color;\n\treturn saturate( color );\n}\nconst mat3 LINEAR_REC2020_TO_LINEAR_SRGB = mat3(\n\tvec3( 1.6605, - 0.1246, - 0.0182 ),\n\tvec3( - 0.5876, 1.1329, - 0.1006 ),\n\tvec3( - 0.0728, - 0.0083, 1.1187 )\n);\nconst mat3 LINEAR_SRGB_TO_LINEAR_REC2020 = mat3(\n\tvec3( 0.6274, 0.0691, 0.0164 ),\n\tvec3( 0.3293, 0.9195, 0.0880 ),\n\tvec3( 0.0433, 0.0113, 0.8956 )\n);\nvec3 agxDefaultContrastApprox( vec3 x ) {\n\tvec3 x2 = x * x;\n\tvec3 x4 = x2 * x2;\n\treturn + 15.5 * x4 * x2\n\t\t- 40.14 * x4 * x\n\t\t+ 31.96 * x4\n\t\t- 6.868 * x2 * x\n\t\t+ 0.4298 * x2\n\t\t+ 0.1191 * x\n\t\t- 0.00232;\n}\nvec3 AgXToneMapping( vec3 color ) {\n\tconst mat3 AgXInsetMatrix = mat3(\n\t\tvec3( 0.856627153315983, 0.137318972929847, 0.11189821299995 ),\n\t\tvec3( 0.0951212405381588, 0.761241990602591, 0.0767994186031903 ),\n\t\tvec3( 0.0482516061458583, 0.101439036467562, 0.811302368396859 )\n\t);\n\tconst mat3 AgXOutsetMatrix = mat3(\n\t\tvec3( 1.1271005818144368, - 0.1413297634984383, - 0.14132976349843826 ),\n\t\tvec3( - 0.11060664309660323, 1.157823702216272, - 0.11060664309660294 ),\n\t\tvec3( - 0.016493938717834573, - 0.016493938717834257, 1.2519364065950405 )\n\t);\n\tconst float AgxMinEv = - 12.47393;\tconst float AgxMaxEv = 4.026069;\n\tcolor *= toneMappingExposure;\n\tcolor = LINEAR_SRGB_TO_LINEAR_REC2020 * color;\n\tcolor = AgXInsetMatrix * color;\n\tcolor = max( color, 1e-10 );\tcolor = log2( color );\n\tcolor = ( color - AgxMinEv ) / ( AgxMaxEv - AgxMinEv );\n\tcolor = clamp( color, 0.0, 1.0 );\n\tcolor = agxDefaultContrastApprox( color );\n\tcolor = AgXOutsetMatrix * color;\n\tcolor = pow( max( vec3( 0.0 ), color ), vec3( 2.2 ) );\n\tcolor = LINEAR_REC2020_TO_LINEAR_SRGB * color;\n\tcolor = clamp( color, 0.0, 1.0 );\n\treturn color;\n}\nvec3 NeutralToneMapping( vec3 color ) {\n\tconst float StartCompression = 0.8 - 0.04;\n\tconst float Desaturation = 0.15;\n\tcolor *= toneMappingExposure;\n\tfloat x = min( color.r, min( color.g, color.b ) );\n\tfloat offset = x < 0.08 ? x - 6.25 * x * x : 0.04;\n\tcolor -= offset;\n\tfloat peak = max( color.r, max( color.g, color.b ) );\n\tif ( peak < StartCompression ) return color;\n\tfloat d = 1. - StartCompression;\n\tfloat newPeak = 1. - d * d / ( peak + d - StartCompression );\n\tcolor *= newPeak / peak;\n\tfloat g = 1. - 1. / ( Desaturation * ( peak - newPeak ) + 1. );\n\treturn mix( color, vec3( newPeak ), g );\n}\nvec3 CustomToneMapping( vec3 color ) { return color; }"; var transmission_fragment = "#ifdef USE_TRANSMISSION\n\tmaterial.transmission = transmission;\n\tmaterial.transmissionAlpha = 1.0;\n\tmaterial.thickness = thickness;\n\tmaterial.attenuationDistance = attenuationDistance;\n\tmaterial.attenuationColor = attenuationColor;\n\t#ifdef USE_TRANSMISSIONMAP\n\t\tmaterial.transmission *= texture2D( transmissionMap, vTransmissionMapUv ).r;\n\t#endif\n\t#ifdef USE_THICKNESSMAP\n\t\tmaterial.thickness *= texture2D( thicknessMap, vThicknessMapUv ).g;\n\t#endif\n\tvec3 pos = vWorldPosition;\n\tvec3 v = normalize( cameraPosition - pos );\n\tvec3 n = inverseTransformDirection( normal, viewMatrix );\n\tvec4 transmitted = getIBLVolumeRefraction(\n\t\tn, v, material.roughness, material.diffuseColor, material.specularColor, material.specularF90,\n\t\tpos, modelMatrix, viewMatrix, projectionMatrix, material.dispersion, material.ior, material.thickness,\n\t\tmaterial.attenuationColor, material.attenuationDistance );\n\tmaterial.transmissionAlpha = mix( material.transmissionAlpha, transmitted.a, material.transmission );\n\ttotalDiffuse = mix( totalDiffuse, transmitted.rgb, material.transmission );\n#endif"; var transmission_pars_fragment = "#ifdef USE_TRANSMISSION\n\tuniform float transmission;\n\tuniform float thickness;\n\tuniform float attenuationDistance;\n\tuniform vec3 attenuationColor;\n\t#ifdef USE_TRANSMISSIONMAP\n\t\tuniform sampler2D transmissionMap;\n\t#endif\n\t#ifdef USE_THICKNESSMAP\n\t\tuniform sampler2D thicknessMap;\n\t#endif\n\tuniform vec2 transmissionSamplerSize;\n\tuniform sampler2D transmissionSamplerMap;\n\tuniform mat4 modelMatrix;\n\tuniform mat4 projectionMatrix;\n\tvarying vec3 vWorldPosition;\n\tfloat w0( float a ) {\n\t\treturn ( 1.0 / 6.0 ) * ( a * ( a * ( - a + 3.0 ) - 3.0 ) + 1.0 );\n\t}\n\tfloat w1( float a ) {\n\t\treturn ( 1.0 / 6.0 ) * ( a * a * ( 3.0 * a - 6.0 ) + 4.0 );\n\t}\n\tfloat w2( float a ){\n\t\treturn ( 1.0 / 6.0 ) * ( a * ( a * ( - 3.0 * a + 3.0 ) + 3.0 ) + 1.0 );\n\t}\n\tfloat w3( float a ) {\n\t\treturn ( 1.0 / 6.0 ) * ( a * a * a );\n\t}\n\tfloat g0( float a ) {\n\t\treturn w0( a ) + w1( a );\n\t}\n\tfloat g1( float a ) {\n\t\treturn w2( a ) + w3( a );\n\t}\n\tfloat h0( float a ) {\n\t\treturn - 1.0 + w1( a ) / ( w0( a ) + w1( a ) );\n\t}\n\tfloat h1( float a ) {\n\t\treturn 1.0 + w3( a ) / ( w2( a ) + w3( a ) );\n\t}\n\tvec4 bicubic( sampler2D tex, vec2 uv, vec4 texelSize, float lod ) {\n\t\tuv = uv * texelSize.zw + 0.5;\n\t\tvec2 iuv = floor( uv );\n\t\tvec2 fuv = fract( uv );\n\t\tfloat g0x = g0( fuv.x );\n\t\tfloat g1x = g1( fuv.x );\n\t\tfloat h0x = h0( fuv.x );\n\t\tfloat h1x = h1( fuv.x );\n\t\tfloat h0y = h0( fuv.y );\n\t\tfloat h1y = h1( fuv.y );\n\t\tvec2 p0 = ( vec2( iuv.x + h0x, iuv.y + h0y ) - 0.5 ) * texelSize.xy;\n\t\tvec2 p1 = ( vec2( iuv.x + h1x, iuv.y + h0y ) - 0.5 ) * texelSize.xy;\n\t\tvec2 p2 = ( vec2( iuv.x + h0x, iuv.y + h1y ) - 0.5 ) * texelSize.xy;\n\t\tvec2 p3 = ( vec2( iuv.x + h1x, iuv.y + h1y ) - 0.5 ) * texelSize.xy;\n\t\treturn g0( fuv.y ) * ( g0x * textureLod( tex, p0, lod ) + g1x * textureLod( tex, p1, lod ) ) +\n\t\t\tg1( fuv.y ) * ( g0x * textureLod( tex, p2, lod ) + g1x * textureLod( tex, p3, lod ) );\n\t}\n\tvec4 textureBicubic( sampler2D sampler, vec2 uv, float lod ) {\n\t\tvec2 fLodSize = vec2( textureSize( sampler, int( lod ) ) );\n\t\tvec2 cLodSize = vec2( textureSize( sampler, int( lod + 1.0 ) ) );\n\t\tvec2 fLodSizeInv = 1.0 / fLodSize;\n\t\tvec2 cLodSizeInv = 1.0 / cLodSize;\n\t\tvec4 fSample = bicubic( sampler, uv, vec4( fLodSizeInv, fLodSize ), floor( lod ) );\n\t\tvec4 cSample = bicubic( sampler, uv, vec4( cLodSizeInv, cLodSize ), ceil( lod ) );\n\t\treturn mix( fSample, cSample, fract( lod ) );\n\t}\n\tvec3 getVolumeTransmissionRay( const in vec3 n, const in vec3 v, const in float thickness, const in float ior, const in mat4 modelMatrix ) {\n\t\tvec3 refractionVector = refract( - v, normalize( n ), 1.0 / ior );\n\t\tvec3 modelScale;\n\t\tmodelScale.x = length( vec3( modelMatrix[ 0 ].xyz ) );\n\t\tmodelScale.y = length( vec3( modelMatrix[ 1 ].xyz ) );\n\t\tmodelScale.z = length( vec3( modelMatrix[ 2 ].xyz ) );\n\t\treturn normalize( refractionVector ) * thickness * modelScale;\n\t}\n\tfloat applyIorToRoughness( const in float roughness, const in float ior ) {\n\t\treturn roughness * clamp( ior * 2.0 - 2.0, 0.0, 1.0 );\n\t}\n\tvec4 getTransmissionSample( const in vec2 fragCoord, const in float roughness, const in float ior ) {\n\t\tfloat lod = log2( transmissionSamplerSize.x ) * applyIorToRoughness( roughness, ior );\n\t\treturn textureBicubic( transmissionSamplerMap, fragCoord.xy, lod );\n\t}\n\tvec3 volumeAttenuation( const in float transmissionDistance, const in vec3 attenuationColor, const in float attenuationDistance ) {\n\t\tif ( isinf( attenuationDistance ) ) {\n\t\t\treturn vec3( 1.0 );\n\t\t} else {\n\t\t\tvec3 attenuationCoefficient = -log( attenuationColor ) / attenuationDistance;\n\t\t\tvec3 transmittance = exp( - attenuationCoefficient * transmissionDistance );\t\t\treturn transmittance;\n\t\t}\n\t}\n\tvec4 getIBLVolumeRefraction( const in vec3 n, const in vec3 v, const in float roughness, const in vec3 diffuseColor,\n\t\tconst in vec3 specularColor, const in float specularF90, const in vec3 position, const in mat4 modelMatrix,\n\t\tconst in mat4 viewMatrix, const in mat4 projMatrix, const in float dispersion, const in float ior, const in float thickness,\n\t\tconst in vec3 attenuationColor, const in float attenuationDistance ) {\n\t\tvec4 transmittedLight;\n\t\tvec3 transmittance;\n\t\t#ifdef USE_DISPERSION\n\t\t\tfloat halfSpread = ( ior - 1.0 ) * 0.025 * dispersion;\n\t\t\tvec3 iors = vec3( ior - halfSpread, ior, ior + halfSpread );\n\t\t\tfor ( int i = 0; i < 3; i ++ ) {\n\t\t\t\tvec3 transmissionRay = getVolumeTransmissionRay( n, v, thickness, iors[ i ], modelMatrix );\n\t\t\t\tvec3 refractedRayExit = position + transmissionRay;\n\t\t\t\tvec4 ndcPos = projMatrix * viewMatrix * vec4( refractedRayExit, 1.0 );\n\t\t\t\tvec2 refractionCoords = ndcPos.xy / ndcPos.w;\n\t\t\t\trefractionCoords += 1.0;\n\t\t\t\trefractionCoords /= 2.0;\n\t\t\t\tvec4 transmissionSample = getTransmissionSample( refractionCoords, roughness, iors[ i ] );\n\t\t\t\ttransmittedLight[ i ] = transmissionSample[ i ];\n\t\t\t\ttransmittedLight.a += transmissionSample.a;\n\t\t\t\ttransmittance[ i ] = diffuseColor[ i ] * volumeAttenuation( length( transmissionRay ), attenuationColor, attenuationDistance )[ i ];\n\t\t\t}\n\t\t\ttransmittedLight.a /= 3.0;\n\t\t#else\n\t\t\tvec3 transmissionRay = getVolumeTransmissionRay( n, v, thickness, ior, modelMatrix );\n\t\t\tvec3 refractedRayExit = position + transmissionRay;\n\t\t\tvec4 ndcPos = projMatrix * viewMatrix * vec4( refractedRayExit, 1.0 );\n\t\t\tvec2 refractionCoords = ndcPos.xy / ndcPos.w;\n\t\t\trefractionCoords += 1.0;\n\t\t\trefractionCoords /= 2.0;\n\t\t\ttransmittedLight = getTransmissionSample( refractionCoords, roughness, ior );\n\t\t\ttransmittance = diffuseColor * volumeAttenuation( length( transmissionRay ), attenuationColor, attenuationDistance );\n\t\t#endif\n\t\tvec3 attenuatedColor = transmittance * transmittedLight.rgb;\n\t\tvec3 F = EnvironmentBRDF( n, v, specularColor, specularF90, roughness );\n\t\tfloat transmittanceFactor = ( transmittance.r + transmittance.g + transmittance.b ) / 3.0;\n\t\treturn vec4( ( 1.0 - F ) * attenuatedColor, 1.0 - ( 1.0 - transmittedLight.a ) * transmittanceFactor );\n\t}\n#endif"; var uv_pars_fragment = "#if defined( USE_UV ) || defined( USE_ANISOTROPY )\n\tvarying vec2 vUv;\n#endif\n#ifdef USE_MAP\n\tvarying vec2 vMapUv;\n#endif\n#ifdef USE_ALPHAMAP\n\tvarying vec2 vAlphaMapUv;\n#endif\n#ifdef USE_LIGHTMAP\n\tvarying vec2 vLightMapUv;\n#endif\n#ifdef USE_AOMAP\n\tvarying vec2 vAoMapUv;\n#endif\n#ifdef USE_BUMPMAP\n\tvarying vec2 vBumpMapUv;\n#endif\n#ifdef USE_NORMALMAP\n\tvarying vec2 vNormalMapUv;\n#endif\n#ifdef USE_EMISSIVEMAP\n\tvarying vec2 vEmissiveMapUv;\n#endif\n#ifdef USE_METALNESSMAP\n\tvarying vec2 vMetalnessMapUv;\n#endif\n#ifdef USE_ROUGHNESSMAP\n\tvarying vec2 vRoughnessMapUv;\n#endif\n#ifdef USE_ANISOTROPYMAP\n\tvarying vec2 vAnisotropyMapUv;\n#endif\n#ifdef USE_CLEARCOATMAP\n\tvarying vec2 vClearcoatMapUv;\n#endif\n#ifdef USE_CLEARCOAT_NORMALMAP\n\tvarying vec2 vClearcoatNormalMapUv;\n#endif\n#ifdef USE_CLEARCOAT_ROUGHNESSMAP\n\tvarying vec2 vClearcoatRoughnessMapUv;\n#endif\n#ifdef USE_IRIDESCENCEMAP\n\tvarying vec2 vIridescenceMapUv;\n#endif\n#ifdef USE_IRIDESCENCE_THICKNESSMAP\n\tvarying vec2 vIridescenceThicknessMapUv;\n#endif\n#ifdef USE_SHEEN_COLORMAP\n\tvarying vec2 vSheenColorMapUv;\n#endif\n#ifdef USE_SHEEN_ROUGHNESSMAP\n\tvarying vec2 vSheenRoughnessMapUv;\n#endif\n#ifdef USE_SPECULARMAP\n\tvarying vec2 vSpecularMapUv;\n#endif\n#ifdef USE_SPECULAR_COLORMAP\n\tvarying vec2 vSpecularColorMapUv;\n#endif\n#ifdef USE_SPECULAR_INTENSITYMAP\n\tvarying vec2 vSpecularIntensityMapUv;\n#endif\n#ifdef USE_TRANSMISSIONMAP\n\tuniform mat3 transmissionMapTransform;\n\tvarying vec2 vTransmissionMapUv;\n#endif\n#ifdef USE_THICKNESSMAP\n\tuniform mat3 thicknessMapTransform;\n\tvarying vec2 vThicknessMapUv;\n#endif"; var uv_pars_vertex = "#if defined( USE_UV ) || defined( USE_ANISOTROPY )\n\tvarying vec2 vUv;\n#endif\n#ifdef USE_MAP\n\tuniform mat3 mapTransform;\n\tvarying vec2 vMapUv;\n#endif\n#ifdef USE_ALPHAMAP\n\tuniform mat3 alphaMapTransform;\n\tvarying vec2 vAlphaMapUv;\n#endif\n#ifdef USE_LIGHTMAP\n\tuniform mat3 lightMapTransform;\n\tvarying vec2 vLightMapUv;\n#endif\n#ifdef USE_AOMAP\n\tuniform mat3 aoMapTransform;\n\tvarying vec2 vAoMapUv;\n#endif\n#ifdef USE_BUMPMAP\n\tuniform mat3 bumpMapTransform;\n\tvarying vec2 vBumpMapUv;\n#endif\n#ifdef USE_NORMALMAP\n\tuniform mat3 normalMapTransform;\n\tvarying vec2 vNormalMapUv;\n#endif\n#ifdef USE_DISPLACEMENTMAP\n\tuniform mat3 displacementMapTransform;\n\tvarying vec2 vDisplacementMapUv;\n#endif\n#ifdef USE_EMISSIVEMAP\n\tuniform mat3 emissiveMapTransform;\n\tvarying vec2 vEmissiveMapUv;\n#endif\n#ifdef USE_METALNESSMAP\n\tuniform mat3 metalnessMapTransform;\n\tvarying vec2 vMetalnessMapUv;\n#endif\n#ifdef USE_ROUGHNESSMAP\n\tuniform mat3 roughnessMapTransform;\n\tvarying vec2 vRoughnessMapUv;\n#endif\n#ifdef USE_ANISOTROPYMAP\n\tuniform mat3 anisotropyMapTransform;\n\tvarying vec2 vAnisotropyMapUv;\n#endif\n#ifdef USE_CLEARCOATMAP\n\tuniform mat3 clearcoatMapTransform;\n\tvarying vec2 vClearcoatMapUv;\n#endif\n#ifdef USE_CLEARCOAT_NORMALMAP\n\tuniform mat3 clearcoatNormalMapTransform;\n\tvarying vec2 vClearcoatNormalMapUv;\n#endif\n#ifdef USE_CLEARCOAT_ROUGHNESSMAP\n\tuniform mat3 clearcoatRoughnessMapTransform;\n\tvarying vec2 vClearcoatRoughnessMapUv;\n#endif\n#ifdef USE_SHEEN_COLORMAP\n\tuniform mat3 sheenColorMapTransform;\n\tvarying vec2 vSheenColorMapUv;\n#endif\n#ifdef USE_SHEEN_ROUGHNESSMAP\n\tuniform mat3 sheenRoughnessMapTransform;\n\tvarying vec2 vSheenRoughnessMapUv;\n#endif\n#ifdef USE_IRIDESCENCEMAP\n\tuniform mat3 iridescenceMapTransform;\n\tvarying vec2 vIridescenceMapUv;\n#endif\n#ifdef USE_IRIDESCENCE_THICKNESSMAP\n\tuniform mat3 iridescenceThicknessMapTransform;\n\tvarying vec2 vIridescenceThicknessMapUv;\n#endif\n#ifdef USE_SPECULARMAP\n\tuniform mat3 specularMapTransform;\n\tvarying vec2 vSpecularMapUv;\n#endif\n#ifdef USE_SPECULAR_COLORMAP\n\tuniform mat3 specularColorMapTransform;\n\tvarying vec2 vSpecularColorMapUv;\n#endif\n#ifdef USE_SPECULAR_INTENSITYMAP\n\tuniform mat3 specularIntensityMapTransform;\n\tvarying vec2 vSpecularIntensityMapUv;\n#endif\n#ifdef USE_TRANSMISSIONMAP\n\tuniform mat3 transmissionMapTransform;\n\tvarying vec2 vTransmissionMapUv;\n#endif\n#ifdef USE_THICKNESSMAP\n\tuniform mat3 thicknessMapTransform;\n\tvarying vec2 vThicknessMapUv;\n#endif"; var uv_vertex = "#if defined( USE_UV ) || defined( USE_ANISOTROPY )\n\tvUv = vec3( uv, 1 ).xy;\n#endif\n#ifdef USE_MAP\n\tvMapUv = ( mapTransform * vec3( MAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_ALPHAMAP\n\tvAlphaMapUv = ( alphaMapTransform * vec3( ALPHAMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_LIGHTMAP\n\tvLightMapUv = ( lightMapTransform * vec3( LIGHTMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_AOMAP\n\tvAoMapUv = ( aoMapTransform * vec3( AOMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_BUMPMAP\n\tvBumpMapUv = ( bumpMapTransform * vec3( BUMPMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_NORMALMAP\n\tvNormalMapUv = ( normalMapTransform * vec3( NORMALMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_DISPLACEMENTMAP\n\tvDisplacementMapUv = ( displacementMapTransform * vec3( DISPLACEMENTMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_EMISSIVEMAP\n\tvEmissiveMapUv = ( emissiveMapTransform * vec3( EMISSIVEMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_METALNESSMAP\n\tvMetalnessMapUv = ( metalnessMapTransform * vec3( METALNESSMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_ROUGHNESSMAP\n\tvRoughnessMapUv = ( roughnessMapTransform * vec3( ROUGHNESSMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_ANISOTROPYMAP\n\tvAnisotropyMapUv = ( anisotropyMapTransform * vec3( ANISOTROPYMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_CLEARCOATMAP\n\tvClearcoatMapUv = ( clearcoatMapTransform * vec3( CLEARCOATMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_CLEARCOAT_NORMALMAP\n\tvClearcoatNormalMapUv = ( clearcoatNormalMapTransform * vec3( CLEARCOAT_NORMALMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_CLEARCOAT_ROUGHNESSMAP\n\tvClearcoatRoughnessMapUv = ( clearcoatRoughnessMapTransform * vec3( CLEARCOAT_ROUGHNESSMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_IRIDESCENCEMAP\n\tvIridescenceMapUv = ( iridescenceMapTransform * vec3( IRIDESCENCEMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_IRIDESCENCE_THICKNESSMAP\n\tvIridescenceThicknessMapUv = ( iridescenceThicknessMapTransform * vec3( IRIDESCENCE_THICKNESSMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_SHEEN_COLORMAP\n\tvSheenColorMapUv = ( sheenColorMapTransform * vec3( SHEEN_COLORMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_SHEEN_ROUGHNESSMAP\n\tvSheenRoughnessMapUv = ( sheenRoughnessMapTransform * vec3( SHEEN_ROUGHNESSMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_SPECULARMAP\n\tvSpecularMapUv = ( specularMapTransform * vec3( SPECULARMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_SPECULAR_COLORMAP\n\tvSpecularColorMapUv = ( specularColorMapTransform * vec3( SPECULAR_COLORMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_SPECULAR_INTENSITYMAP\n\tvSpecularIntensityMapUv = ( specularIntensityMapTransform * vec3( SPECULAR_INTENSITYMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_TRANSMISSIONMAP\n\tvTransmissionMapUv = ( transmissionMapTransform * vec3( TRANSMISSIONMAP_UV, 1 ) ).xy;\n#endif\n#ifdef USE_THICKNESSMAP\n\tvThicknessMapUv = ( thicknessMapTransform * vec3( THICKNESSMAP_UV, 1 ) ).xy;\n#endif"; var worldpos_vertex = "#if defined( USE_ENVMAP ) || defined( DISTANCE ) || defined ( USE_SHADOWMAP ) || defined ( USE_TRANSMISSION ) || NUM_SPOT_LIGHT_COORDS > 0\n\tvec4 worldPosition = vec4( transformed, 1.0 );\n\t#ifdef USE_BATCHING\n\t\tworldPosition = batchingMatrix * worldPosition;\n\t#endif\n\t#ifdef USE_INSTANCING\n\t\tworldPosition = instanceMatrix * worldPosition;\n\t#endif\n\tworldPosition = modelMatrix * worldPosition;\n#endif"; const vertex$h = "varying vec2 vUv;\nuniform mat3 uvTransform;\nvoid main() {\n\tvUv = ( uvTransform * vec3( uv, 1 ) ).xy;\n\tgl_Position = vec4( position.xy, 1.0, 1.0 );\n}"; const fragment$h = "uniform sampler2D t2D;\nuniform float backgroundIntensity;\nvarying vec2 vUv;\nvoid main() {\n\tvec4 texColor = texture2D( t2D, vUv );\n\t#ifdef DECODE_VIDEO_TEXTURE\n\t\ttexColor = vec4( mix( pow( texColor.rgb * 0.9478672986 + vec3( 0.0521327014 ), vec3( 2.4 ) ), texColor.rgb * 0.0773993808, vec3( lessThanEqual( texColor.rgb, vec3( 0.04045 ) ) ) ), texColor.w );\n\t#endif\n\ttexColor.rgb *= backgroundIntensity;\n\tgl_FragColor = texColor;\n\t#include \n\t#include \n}"; const vertex$g = "varying vec3 vWorldDirection;\n#include \nvoid main() {\n\tvWorldDirection = transformDirection( position, modelMatrix );\n\t#include \n\t#include \n\tgl_Position.z = gl_Position.w;\n}"; const fragment$g = "#ifdef ENVMAP_TYPE_CUBE\n\tuniform samplerCube envMap;\n#elif defined( ENVMAP_TYPE_CUBE_UV )\n\tuniform sampler2D envMap;\n#endif\nuniform float flipEnvMap;\nuniform float backgroundBlurriness;\nuniform float backgroundIntensity;\nuniform mat3 backgroundRotation;\nvarying vec3 vWorldDirection;\n#include \nvoid main() {\n\t#ifdef ENVMAP_TYPE_CUBE\n\t\tvec4 texColor = textureCube( envMap, backgroundRotation * vec3( flipEnvMap * vWorldDirection.x, vWorldDirection.yz ) );\n\t#elif defined( ENVMAP_TYPE_CUBE_UV )\n\t\tvec4 texColor = textureCubeUV( envMap, backgroundRotation * vWorldDirection, backgroundBlurriness );\n\t#else\n\t\tvec4 texColor = vec4( 0.0, 0.0, 0.0, 1.0 );\n\t#endif\n\ttexColor.rgb *= backgroundIntensity;\n\tgl_FragColor = texColor;\n\t#include \n\t#include \n}"; const vertex$f = "varying vec3 vWorldDirection;\n#include \nvoid main() {\n\tvWorldDirection = transformDirection( position, modelMatrix );\n\t#include \n\t#include \n\tgl_Position.z = gl_Position.w;\n}"; const fragment$f = "uniform samplerCube tCube;\nuniform float tFlip;\nuniform float opacity;\nvarying vec3 vWorldDirection;\nvoid main() {\n\tvec4 texColor = textureCube( tCube, vec3( tFlip * vWorldDirection.x, vWorldDirection.yz ) );\n\tgl_FragColor = texColor;\n\tgl_FragColor.a *= opacity;\n\t#include \n\t#include \n}"; const vertex$e = "#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvarying vec2 vHighPrecisionZW;\nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#ifdef USE_DISPLACEMENTMAP\n\t\t#include \n\t\t#include \n\t\t#include \n\t#endif\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvHighPrecisionZW = gl_Position.zw;\n}"; const fragment$e = "#if DEPTH_PACKING == 3200\n\tuniform float opacity;\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvarying vec2 vHighPrecisionZW;\nvoid main() {\n\tvec4 diffuseColor = vec4( 1.0 );\n\t#include \n\t#if DEPTH_PACKING == 3200\n\t\tdiffuseColor.a = opacity;\n\t#endif\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tfloat fragCoordZ = 0.5 * vHighPrecisionZW[0] / vHighPrecisionZW[1] + 0.5;\n\t#if DEPTH_PACKING == 3200\n\t\tgl_FragColor = vec4( vec3( 1.0 - fragCoordZ ), opacity );\n\t#elif DEPTH_PACKING == 3201\n\t\tgl_FragColor = packDepthToRGBA( fragCoordZ );\n\t#elif DEPTH_PACKING == 3202\n\t\tgl_FragColor = vec4( packDepthToRGB( fragCoordZ ), 1.0 );\n\t#elif DEPTH_PACKING == 3203\n\t\tgl_FragColor = vec4( packDepthToRG( fragCoordZ ), 0.0, 1.0 );\n\t#endif\n}"; const vertex$d = "#define DISTANCE\nvarying vec3 vWorldPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#ifdef USE_DISPLACEMENTMAP\n\t\t#include \n\t\t#include \n\t\t#include \n\t#endif\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvWorldPosition = worldPosition.xyz;\n}"; const fragment$d = "#define DISTANCE\nuniform vec3 referencePosition;\nuniform float nearDistance;\nuniform float farDistance;\nvarying vec3 vWorldPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main () {\n\tvec4 diffuseColor = vec4( 1.0 );\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tfloat dist = length( vWorldPosition - referencePosition );\n\tdist = ( dist - nearDistance ) / ( farDistance - nearDistance );\n\tdist = saturate( dist );\n\tgl_FragColor = packDepthToRGBA( dist );\n}"; const vertex$c = "varying vec3 vWorldDirection;\n#include \nvoid main() {\n\tvWorldDirection = transformDirection( position, modelMatrix );\n\t#include \n\t#include \n}"; const fragment$c = "uniform sampler2D tEquirect;\nvarying vec3 vWorldDirection;\n#include \nvoid main() {\n\tvec3 direction = normalize( vWorldDirection );\n\tvec2 sampleUV = equirectUv( direction );\n\tgl_FragColor = texture2D( tEquirect, sampleUV );\n\t#include \n\t#include \n}"; const vertex$b = "uniform float scale;\nattribute float lineDistance;\nvarying float vLineDistance;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvLineDistance = scale * lineDistance;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; const fragment$b = "uniform vec3 diffuse;\nuniform float opacity;\nuniform float dashSize;\nuniform float totalSize;\nvarying float vLineDistance;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\tif ( mod( vLineDistance, totalSize ) > dashSize ) {\n\t\tdiscard;\n\t}\n\tvec3 outgoingLight = vec3( 0.0 );\n\t#include \n\t#include \n\t#include \n\toutgoingLight = diffuseColor.rgb;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; const vertex$a = "#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#if defined ( USE_ENVMAP ) || defined ( USE_SKINNING )\n\t\t#include \n\t\t#include \n\t\t#include \n\t\t#include \n\t\t#include \n\t#endif\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; const fragment$a = "uniform vec3 diffuse;\nuniform float opacity;\n#ifndef FLAT_SHADED\n\tvarying vec3 vNormal;\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\t#ifdef USE_LIGHTMAP\n\t\tvec4 lightMapTexel = texture2D( lightMap, vLightMapUv );\n\t\treflectedLight.indirectDiffuse += lightMapTexel.rgb * lightMapIntensity * RECIPROCAL_PI;\n\t#else\n\t\treflectedLight.indirectDiffuse += vec3( 1.0 );\n\t#endif\n\t#include \n\treflectedLight.indirectDiffuse *= diffuseColor.rgb;\n\tvec3 outgoingLight = reflectedLight.indirectDiffuse;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; const vertex$9 = "#define LAMBERT\nvarying vec3 vViewPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvViewPosition = - mvPosition.xyz;\n\t#include \n\t#include \n\t#include \n\t#include \n}"; const fragment$9 = "#define LAMBERT\nuniform vec3 diffuse;\nuniform vec3 emissive;\nuniform float opacity;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + totalEmissiveRadiance;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; const vertex$8 = "#define MATCAP\nvarying vec3 vViewPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvViewPosition = - mvPosition.xyz;\n}"; const fragment$8 = "#define MATCAP\nuniform vec3 diffuse;\nuniform float opacity;\nuniform sampler2D matcap;\nvarying vec3 vViewPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvec3 viewDir = normalize( vViewPosition );\n\tvec3 x = normalize( vec3( viewDir.z, 0.0, - viewDir.x ) );\n\tvec3 y = cross( viewDir, x );\n\tvec2 uv = vec2( dot( x, normal ), dot( y, normal ) ) * 0.495 + 0.5;\n\t#ifdef USE_MATCAP\n\t\tvec4 matcapColor = texture2D( matcap, uv );\n\t#else\n\t\tvec4 matcapColor = vec4( vec3( mix( 0.2, 0.8, uv.y ) ), 1.0 );\n\t#endif\n\tvec3 outgoingLight = diffuseColor.rgb * matcapColor.rgb;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; const vertex$7 = "#define NORMAL\n#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP_TANGENTSPACE )\n\tvarying vec3 vViewPosition;\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP_TANGENTSPACE )\n\tvViewPosition = - mvPosition.xyz;\n#endif\n}"; const fragment$7 = "#define NORMAL\nuniform float opacity;\n#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP_TANGENTSPACE )\n\tvarying vec3 vViewPosition;\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( 0.0, 0.0, 0.0, opacity );\n\t#include \n\t#include \n\t#include \n\t#include \n\tgl_FragColor = vec4( packNormalToRGB( normal ), diffuseColor.a );\n\t#ifdef OPAQUE\n\t\tgl_FragColor.a = 1.0;\n\t#endif\n}"; const vertex$6 = "#define PHONG\nvarying vec3 vViewPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvViewPosition = - mvPosition.xyz;\n\t#include \n\t#include \n\t#include \n\t#include \n}"; const fragment$6 = "#define PHONG\nuniform vec3 diffuse;\nuniform vec3 emissive;\nuniform vec3 specular;\nuniform float shininess;\nuniform float opacity;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + reflectedLight.directSpecular + reflectedLight.indirectSpecular + totalEmissiveRadiance;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; const vertex$5 = "#define STANDARD\nvarying vec3 vViewPosition;\n#ifdef USE_TRANSMISSION\n\tvarying vec3 vWorldPosition;\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvViewPosition = - mvPosition.xyz;\n\t#include \n\t#include \n\t#include \n#ifdef USE_TRANSMISSION\n\tvWorldPosition = worldPosition.xyz;\n#endif\n}"; const fragment$5 = "#define STANDARD\n#ifdef PHYSICAL\n\t#define IOR\n\t#define USE_SPECULAR\n#endif\nuniform vec3 diffuse;\nuniform vec3 emissive;\nuniform float roughness;\nuniform float metalness;\nuniform float opacity;\n#ifdef IOR\n\tuniform float ior;\n#endif\n#ifdef USE_SPECULAR\n\tuniform float specularIntensity;\n\tuniform vec3 specularColor;\n\t#ifdef USE_SPECULAR_COLORMAP\n\t\tuniform sampler2D specularColorMap;\n\t#endif\n\t#ifdef USE_SPECULAR_INTENSITYMAP\n\t\tuniform sampler2D specularIntensityMap;\n\t#endif\n#endif\n#ifdef USE_CLEARCOAT\n\tuniform float clearcoat;\n\tuniform float clearcoatRoughness;\n#endif\n#ifdef USE_DISPERSION\n\tuniform float dispersion;\n#endif\n#ifdef USE_IRIDESCENCE\n\tuniform float iridescence;\n\tuniform float iridescenceIOR;\n\tuniform float iridescenceThicknessMinimum;\n\tuniform float iridescenceThicknessMaximum;\n#endif\n#ifdef USE_SHEEN\n\tuniform vec3 sheenColor;\n\tuniform float sheenRoughness;\n\t#ifdef USE_SHEEN_COLORMAP\n\t\tuniform sampler2D sheenColorMap;\n\t#endif\n\t#ifdef USE_SHEEN_ROUGHNESSMAP\n\t\tuniform sampler2D sheenRoughnessMap;\n\t#endif\n#endif\n#ifdef USE_ANISOTROPY\n\tuniform vec2 anisotropyVector;\n\t#ifdef USE_ANISOTROPYMAP\n\t\tuniform sampler2D anisotropyMap;\n\t#endif\n#endif\nvarying vec3 vViewPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvec3 totalDiffuse = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse;\n\tvec3 totalSpecular = reflectedLight.directSpecular + reflectedLight.indirectSpecular;\n\t#include \n\tvec3 outgoingLight = totalDiffuse + totalSpecular + totalEmissiveRadiance;\n\t#ifdef USE_SHEEN\n\t\tfloat sheenEnergyComp = 1.0 - 0.157 * max3( material.sheenColor );\n\t\toutgoingLight = outgoingLight * sheenEnergyComp + sheenSpecularDirect + sheenSpecularIndirect;\n\t#endif\n\t#ifdef USE_CLEARCOAT\n\t\tfloat dotNVcc = saturate( dot( geometryClearcoatNormal, geometryViewDir ) );\n\t\tvec3 Fcc = F_Schlick( material.clearcoatF0, material.clearcoatF90, dotNVcc );\n\t\toutgoingLight = outgoingLight * ( 1.0 - material.clearcoat * Fcc ) + ( clearcoatSpecularDirect + clearcoatSpecularIndirect ) * material.clearcoat;\n\t#endif\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; const vertex$4 = "#define TOON\nvarying vec3 vViewPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvViewPosition = - mvPosition.xyz;\n\t#include \n\t#include \n\t#include \n}"; const fragment$4 = "#define TOON\nuniform vec3 diffuse;\nuniform vec3 emissive;\nuniform float opacity;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + totalEmissiveRadiance;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; const vertex$3 = "uniform float size;\nuniform float scale;\n#include \n#include \n#include \n#include \n#include \n#include \n#ifdef USE_POINTS_UV\n\tvarying vec2 vUv;\n\tuniform mat3 uvTransform;\n#endif\nvoid main() {\n\t#ifdef USE_POINTS_UV\n\t\tvUv = ( uvTransform * vec3( uv, 1 ) ).xy;\n\t#endif\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tgl_PointSize = size;\n\t#ifdef USE_SIZEATTENUATION\n\t\tbool isPerspective = isPerspectiveMatrix( projectionMatrix );\n\t\tif ( isPerspective ) gl_PointSize *= ( scale / - mvPosition.z );\n\t#endif\n\t#include \n\t#include \n\t#include \n\t#include \n}"; const fragment$3 = "uniform vec3 diffuse;\nuniform float opacity;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\tvec3 outgoingLight = vec3( 0.0 );\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\toutgoingLight = diffuseColor.rgb;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; const vertex$2 = "#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}"; const fragment$2 = "uniform vec3 color;\nuniform float opacity;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\tgl_FragColor = vec4( color, opacity * ( 1.0 - getShadowMask() ) );\n\t#include \n\t#include \n\t#include \n}"; const vertex$1 = "uniform float rotation;\nuniform vec2 center;\n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\tvec4 mvPosition = modelViewMatrix[ 3 ];\n\tvec2 scale = vec2( length( modelMatrix[ 0 ].xyz ), length( modelMatrix[ 1 ].xyz ) );\n\t#ifndef USE_SIZEATTENUATION\n\t\tbool isPerspective = isPerspectiveMatrix( projectionMatrix );\n\t\tif ( isPerspective ) scale *= - mvPosition.z;\n\t#endif\n\tvec2 alignedPosition = ( position.xy - ( center - vec2( 0.5 ) ) ) * scale;\n\tvec2 rotatedPosition;\n\trotatedPosition.x = cos( rotation ) * alignedPosition.x - sin( rotation ) * alignedPosition.y;\n\trotatedPosition.y = sin( rotation ) * alignedPosition.x + cos( rotation ) * alignedPosition.y;\n\tmvPosition.xy += rotatedPosition;\n\tgl_Position = projectionMatrix * mvPosition;\n\t#include \n\t#include \n\t#include \n}"; const fragment$1 = "uniform vec3 diffuse;\nuniform float opacity;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\tvec3 outgoingLight = vec3( 0.0 );\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\toutgoingLight = diffuseColor.rgb;\n\t#include \n\t#include \n\t#include \n\t#include \n}"; const ShaderChunk = { alphahash_fragment: alphahash_fragment, alphahash_pars_fragment: alphahash_pars_fragment, alphamap_fragment: alphamap_fragment, alphamap_pars_fragment: alphamap_pars_fragment, alphatest_fragment: alphatest_fragment, alphatest_pars_fragment: alphatest_pars_fragment, aomap_fragment: aomap_fragment, aomap_pars_fragment: aomap_pars_fragment, batching_pars_vertex: batching_pars_vertex, batching_vertex: batching_vertex, begin_vertex: begin_vertex, beginnormal_vertex: beginnormal_vertex, bsdfs: bsdfs, iridescence_fragment: iridescence_fragment, bumpmap_pars_fragment: bumpmap_pars_fragment, clipping_planes_fragment: clipping_planes_fragment, clipping_planes_pars_fragment: clipping_planes_pars_fragment, clipping_planes_pars_vertex: clipping_planes_pars_vertex, clipping_planes_vertex: clipping_planes_vertex, color_fragment: color_fragment, color_pars_fragment: color_pars_fragment, color_pars_vertex: color_pars_vertex, color_vertex: color_vertex, common: common, cube_uv_reflection_fragment: cube_uv_reflection_fragment, defaultnormal_vertex: defaultnormal_vertex, displacementmap_pars_vertex: displacementmap_pars_vertex, displacementmap_vertex: displacementmap_vertex, emissivemap_fragment: emissivemap_fragment, emissivemap_pars_fragment: emissivemap_pars_fragment, colorspace_fragment: colorspace_fragment, colorspace_pars_fragment: colorspace_pars_fragment, envmap_fragment: envmap_fragment, envmap_common_pars_fragment: envmap_common_pars_fragment, envmap_pars_fragment: envmap_pars_fragment, envmap_pars_vertex: envmap_pars_vertex, envmap_physical_pars_fragment: envmap_physical_pars_fragment, envmap_vertex: envmap_vertex, fog_vertex: fog_vertex, fog_pars_vertex: fog_pars_vertex, fog_fragment: fog_fragment, fog_pars_fragment: fog_pars_fragment, gradientmap_pars_fragment: gradientmap_pars_fragment, lightmap_pars_fragment: lightmap_pars_fragment, lights_lambert_fragment: lights_lambert_fragment, lights_lambert_pars_fragment: lights_lambert_pars_fragment, lights_pars_begin: lights_pars_begin, lights_toon_fragment: lights_toon_fragment, lights_toon_pars_fragment: lights_toon_pars_fragment, lights_phong_fragment: lights_phong_fragment, lights_phong_pars_fragment: lights_phong_pars_fragment, lights_physical_fragment: lights_physical_fragment, lights_physical_pars_fragment: lights_physical_pars_fragment, lights_fragment_begin: lights_fragment_begin, lights_fragment_maps: lights_fragment_maps, lights_fragment_end: lights_fragment_end, logdepthbuf_fragment: logdepthbuf_fragment, logdepthbuf_pars_fragment: logdepthbuf_pars_fragment, logdepthbuf_pars_vertex: logdepthbuf_pars_vertex, logdepthbuf_vertex: logdepthbuf_vertex, map_fragment: map_fragment, map_pars_fragment: map_pars_fragment, map_particle_fragment: map_particle_fragment, map_particle_pars_fragment: map_particle_pars_fragment, metalnessmap_fragment: metalnessmap_fragment, metalnessmap_pars_fragment: metalnessmap_pars_fragment, morphinstance_vertex: morphinstance_vertex, morphcolor_vertex: morphcolor_vertex, morphnormal_vertex: morphnormal_vertex, morphtarget_pars_vertex: morphtarget_pars_vertex, morphtarget_vertex: morphtarget_vertex, normal_fragment_begin: normal_fragment_begin, normal_fragment_maps: normal_fragment_maps, normal_pars_fragment: normal_pars_fragment, normal_pars_vertex: normal_pars_vertex, normal_vertex: normal_vertex, normalmap_pars_fragment: normalmap_pars_fragment, clearcoat_normal_fragment_begin: clearcoat_normal_fragment_begin, clearcoat_normal_fragment_maps: clearcoat_normal_fragment_maps, clearcoat_pars_fragment: clearcoat_pars_fragment, iridescence_pars_fragment: iridescence_pars_fragment, opaque_fragment: opaque_fragment, packing: packing, premultiplied_alpha_fragment: premultiplied_alpha_fragment, project_vertex: project_vertex, dithering_fragment: dithering_fragment, dithering_pars_fragment: dithering_pars_fragment, roughnessmap_fragment: roughnessmap_fragment, roughnessmap_pars_fragment: roughnessmap_pars_fragment, shadowmap_pars_fragment: shadowmap_pars_fragment, shadowmap_pars_vertex: shadowmap_pars_vertex, shadowmap_vertex: shadowmap_vertex, shadowmask_pars_fragment: shadowmask_pars_fragment, skinbase_vertex: skinbase_vertex, skinning_pars_vertex: skinning_pars_vertex, skinning_vertex: skinning_vertex, skinnormal_vertex: skinnormal_vertex, specularmap_fragment: specularmap_fragment, specularmap_pars_fragment: specularmap_pars_fragment, tonemapping_fragment: tonemapping_fragment, tonemapping_pars_fragment: tonemapping_pars_fragment, transmission_fragment: transmission_fragment, transmission_pars_fragment: transmission_pars_fragment, uv_pars_fragment: uv_pars_fragment, uv_pars_vertex: uv_pars_vertex, uv_vertex: uv_vertex, worldpos_vertex: worldpos_vertex, background_vert: vertex$h, background_frag: fragment$h, backgroundCube_vert: vertex$g, backgroundCube_frag: fragment$g, cube_vert: vertex$f, cube_frag: fragment$f, depth_vert: vertex$e, depth_frag: fragment$e, distanceRGBA_vert: vertex$d, distanceRGBA_frag: fragment$d, equirect_vert: vertex$c, equirect_frag: fragment$c, linedashed_vert: vertex$b, linedashed_frag: fragment$b, meshbasic_vert: vertex$a, meshbasic_frag: fragment$a, meshlambert_vert: vertex$9, meshlambert_frag: fragment$9, meshmatcap_vert: vertex$8, meshmatcap_frag: fragment$8, meshnormal_vert: vertex$7, meshnormal_frag: fragment$7, meshphong_vert: vertex$6, meshphong_frag: fragment$6, meshphysical_vert: vertex$5, meshphysical_frag: fragment$5, meshtoon_vert: vertex$4, meshtoon_frag: fragment$4, points_vert: vertex$3, points_frag: fragment$3, shadow_vert: vertex$2, shadow_frag: fragment$2, sprite_vert: vertex$1, sprite_frag: fragment$1 }; // Uniforms library for shared webgl shaders const UniformsLib = { common: { diffuse: { value: /*@__PURE__*/ new Color( 0xffffff ) }, opacity: { value: 1.0 }, map: { value: null }, mapTransform: { value: /*@__PURE__*/ new Matrix3() }, alphaMap: { value: null }, alphaMapTransform: { value: /*@__PURE__*/ new Matrix3() }, alphaTest: { value: 0 } }, specularmap: { specularMap: { value: null }, specularMapTransform: { value: /*@__PURE__*/ new Matrix3() } }, envmap: { envMap: { value: null }, envMapRotation: { value: /*@__PURE__*/ new Matrix3() }, flipEnvMap: { value: -1 }, reflectivity: { value: 1.0 }, // basic, lambert, phong ior: { value: 1.5 }, // physical refractionRatio: { value: 0.98 }, // basic, lambert, phong }, aomap: { aoMap: { value: null }, aoMapIntensity: { value: 1 }, aoMapTransform: { value: /*@__PURE__*/ new Matrix3() } }, lightmap: { lightMap: { value: null }, lightMapIntensity: { value: 1 }, lightMapTransform: { value: /*@__PURE__*/ new Matrix3() } }, bumpmap: { bumpMap: { value: null }, bumpMapTransform: { value: /*@__PURE__*/ new Matrix3() }, bumpScale: { value: 1 } }, normalmap: { normalMap: { value: null }, normalMapTransform: { value: /*@__PURE__*/ new Matrix3() }, normalScale: { value: /*@__PURE__*/ new Vector2( 1, 1 ) } }, displacementmap: { displacementMap: { value: null }, displacementMapTransform: { value: /*@__PURE__*/ new Matrix3() }, displacementScale: { value: 1 }, displacementBias: { value: 0 } }, emissivemap: { emissiveMap: { value: null }, emissiveMapTransform: { value: /*@__PURE__*/ new Matrix3() } }, metalnessmap: { metalnessMap: { value: null }, metalnessMapTransform: { value: /*@__PURE__*/ new Matrix3() } }, roughnessmap: { roughnessMap: { value: null }, roughnessMapTransform: { value: /*@__PURE__*/ new Matrix3() } }, gradientmap: { gradientMap: { value: null } }, fog: { fogDensity: { value: 0.00025 }, fogNear: { value: 1 }, fogFar: { value: 2000 }, fogColor: { value: /*@__PURE__*/ new Color( 0xffffff ) } }, lights: { ambientLightColor: { value: [] }, lightProbe: { value: [] }, directionalLights: { value: [], properties: { direction: {}, color: {} } }, directionalLightShadows: { value: [], properties: { shadowIntensity: 1, shadowBias: {}, shadowNormalBias: {}, shadowRadius: {}, shadowMapSize: {} } }, directionalShadowMap: { value: [] }, directionalShadowMatrix: { value: [] }, spotLights: { value: [], properties: { color: {}, position: {}, direction: {}, distance: {}, coneCos: {}, penumbraCos: {}, decay: {} } }, spotLightShadows: { value: [], properties: { shadowIntensity: 1, shadowBias: {}, shadowNormalBias: {}, shadowRadius: {}, shadowMapSize: {} } }, spotLightMap: { value: [] }, spotShadowMap: { value: [] }, spotLightMatrix: { value: [] }, pointLights: { value: [], properties: { color: {}, position: {}, decay: {}, distance: {} } }, pointLightShadows: { value: [], properties: { shadowIntensity: 1, shadowBias: {}, shadowNormalBias: {}, shadowRadius: {}, shadowMapSize: {}, shadowCameraNear: {}, shadowCameraFar: {} } }, pointShadowMap: { value: [] }, pointShadowMatrix: { value: [] }, hemisphereLights: { value: [], properties: { direction: {}, skyColor: {}, groundColor: {} } }, // TODO (abelnation): RectAreaLight BRDF data needs to be moved from example to main src rectAreaLights: { value: [], properties: { color: {}, position: {}, width: {}, height: {} } }, ltc_1: { value: null }, ltc_2: { value: null } }, points: { diffuse: { value: /*@__PURE__*/ new Color( 0xffffff ) }, opacity: { value: 1.0 }, size: { value: 1.0 }, scale: { value: 1.0 }, map: { value: null }, alphaMap: { value: null }, alphaMapTransform: { value: /*@__PURE__*/ new Matrix3() }, alphaTest: { value: 0 }, uvTransform: { value: /*@__PURE__*/ new Matrix3() } }, sprite: { diffuse: { value: /*@__PURE__*/ new Color( 0xffffff ) }, opacity: { value: 1.0 }, center: { value: /*@__PURE__*/ new Vector2( 0.5, 0.5 ) }, rotation: { value: 0.0 }, map: { value: null }, mapTransform: { value: /*@__PURE__*/ new Matrix3() }, alphaMap: { value: null }, alphaMapTransform: { value: /*@__PURE__*/ new Matrix3() }, alphaTest: { value: 0 } } }; const ShaderLib = { basic: { uniforms: /*@__PURE__*/ mergeUniforms( [ UniformsLib.common, UniformsLib.specularmap, UniformsLib.envmap, UniformsLib.aomap, UniformsLib.lightmap, UniformsLib.fog ] ), vertexShader: ShaderChunk.meshbasic_vert, fragmentShader: ShaderChunk.meshbasic_frag }, lambert: { uniforms: /*@__PURE__*/ mergeUniforms( [ UniformsLib.common, UniformsLib.specularmap, UniformsLib.envmap, UniformsLib.aomap, UniformsLib.lightmap, UniformsLib.emissivemap, UniformsLib.bumpmap, UniformsLib.normalmap, UniformsLib.displacementmap, UniformsLib.fog, UniformsLib.lights, { emissive: { value: /*@__PURE__*/ new Color( 0x000000 ) } } ] ), vertexShader: ShaderChunk.meshlambert_vert, fragmentShader: ShaderChunk.meshlambert_frag }, phong: { uniforms: /*@__PURE__*/ mergeUniforms( [ UniformsLib.common, UniformsLib.specularmap, UniformsLib.envmap, UniformsLib.aomap, UniformsLib.lightmap, UniformsLib.emissivemap, UniformsLib.bumpmap, UniformsLib.normalmap, UniformsLib.displacementmap, UniformsLib.fog, UniformsLib.lights, { emissive: { value: /*@__PURE__*/ new Color( 0x000000 ) }, specular: { value: /*@__PURE__*/ new Color( 0x111111 ) }, shininess: { value: 30 } } ] ), vertexShader: ShaderChunk.meshphong_vert, fragmentShader: ShaderChunk.meshphong_frag }, standard: { uniforms: /*@__PURE__*/ mergeUniforms( [ UniformsLib.common, UniformsLib.envmap, UniformsLib.aomap, UniformsLib.lightmap, UniformsLib.emissivemap, UniformsLib.bumpmap, UniformsLib.normalmap, UniformsLib.displacementmap, UniformsLib.roughnessmap, UniformsLib.metalnessmap, UniformsLib.fog, UniformsLib.lights, { emissive: { value: /*@__PURE__*/ new Color( 0x000000 ) }, roughness: { value: 1.0 }, metalness: { value: 0.0 }, envMapIntensity: { value: 1 } } ] ), vertexShader: ShaderChunk.meshphysical_vert, fragmentShader: ShaderChunk.meshphysical_frag }, toon: { uniforms: /*@__PURE__*/ mergeUniforms( [ UniformsLib.common, UniformsLib.aomap, UniformsLib.lightmap, UniformsLib.emissivemap, UniformsLib.bumpmap, UniformsLib.normalmap, UniformsLib.displacementmap, UniformsLib.gradientmap, UniformsLib.fog, UniformsLib.lights, { emissive: { value: /*@__PURE__*/ new Color( 0x000000 ) } } ] ), vertexShader: ShaderChunk.meshtoon_vert, fragmentShader: ShaderChunk.meshtoon_frag }, matcap: { uniforms: /*@__PURE__*/ mergeUniforms( [ UniformsLib.common, UniformsLib.bumpmap, UniformsLib.normalmap, UniformsLib.displacementmap, UniformsLib.fog, { matcap: { value: null } } ] ), vertexShader: ShaderChunk.meshmatcap_vert, fragmentShader: ShaderChunk.meshmatcap_frag }, points: { uniforms: /*@__PURE__*/ mergeUniforms( [ UniformsLib.points, UniformsLib.fog ] ), vertexShader: ShaderChunk.points_vert, fragmentShader: ShaderChunk.points_frag }, dashed: { uniforms: /*@__PURE__*/ mergeUniforms( [ UniformsLib.common, UniformsLib.fog, { scale: { value: 1 }, dashSize: { value: 1 }, totalSize: { value: 2 } } ] ), vertexShader: ShaderChunk.linedashed_vert, fragmentShader: ShaderChunk.linedashed_frag }, depth: { uniforms: /*@__PURE__*/ mergeUniforms( [ UniformsLib.common, UniformsLib.displacementmap ] ), vertexShader: ShaderChunk.depth_vert, fragmentShader: ShaderChunk.depth_frag }, normal: { uniforms: /*@__PURE__*/ mergeUniforms( [ UniformsLib.common, UniformsLib.bumpmap, UniformsLib.normalmap, UniformsLib.displacementmap, { opacity: { value: 1.0 } } ] ), vertexShader: ShaderChunk.meshnormal_vert, fragmentShader: ShaderChunk.meshnormal_frag }, sprite: { uniforms: /*@__PURE__*/ mergeUniforms( [ UniformsLib.sprite, UniformsLib.fog ] ), vertexShader: ShaderChunk.sprite_vert, fragmentShader: ShaderChunk.sprite_frag }, background: { uniforms: { uvTransform: { value: /*@__PURE__*/ new Matrix3() }, t2D: { value: null }, backgroundIntensity: { value: 1 } }, vertexShader: ShaderChunk.background_vert, fragmentShader: ShaderChunk.background_frag }, backgroundCube: { uniforms: { envMap: { value: null }, flipEnvMap: { value: -1 }, backgroundBlurriness: { value: 0 }, backgroundIntensity: { value: 1 }, backgroundRotation: { value: /*@__PURE__*/ new Matrix3() } }, vertexShader: ShaderChunk.backgroundCube_vert, fragmentShader: ShaderChunk.backgroundCube_frag }, cube: { uniforms: { tCube: { value: null }, tFlip: { value: -1 }, opacity: { value: 1.0 } }, vertexShader: ShaderChunk.cube_vert, fragmentShader: ShaderChunk.cube_frag }, equirect: { uniforms: { tEquirect: { value: null }, }, vertexShader: ShaderChunk.equirect_vert, fragmentShader: ShaderChunk.equirect_frag }, distanceRGBA: { uniforms: /*@__PURE__*/ mergeUniforms( [ UniformsLib.common, UniformsLib.displacementmap, { referencePosition: { value: /*@__PURE__*/ new Vector3() }, nearDistance: { value: 1 }, farDistance: { value: 1000 } } ] ), vertexShader: ShaderChunk.distanceRGBA_vert, fragmentShader: ShaderChunk.distanceRGBA_frag }, shadow: { uniforms: /*@__PURE__*/ mergeUniforms( [ UniformsLib.lights, UniformsLib.fog, { color: { value: /*@__PURE__*/ new Color( 0x00000 ) }, opacity: { value: 1.0 } }, ] ), vertexShader: ShaderChunk.shadow_vert, fragmentShader: ShaderChunk.shadow_frag } }; ShaderLib.physical = { uniforms: /*@__PURE__*/ mergeUniforms( [ ShaderLib.standard.uniforms, { clearcoat: { value: 0 }, clearcoatMap: { value: null }, clearcoatMapTransform: { value: /*@__PURE__*/ new Matrix3() }, clearcoatNormalMap: { value: null }, clearcoatNormalMapTransform: { value: /*@__PURE__*/ new Matrix3() }, clearcoatNormalScale: { value: /*@__PURE__*/ new Vector2( 1, 1 ) }, clearcoatRoughness: { value: 0 }, clearcoatRoughnessMap: { value: null }, clearcoatRoughnessMapTransform: { value: /*@__PURE__*/ new Matrix3() }, dispersion: { value: 0 }, iridescence: { value: 0 }, iridescenceMap: { value: null }, iridescenceMapTransform: { value: /*@__PURE__*/ new Matrix3() }, iridescenceIOR: { value: 1.3 }, iridescenceThicknessMinimum: { value: 100 }, iridescenceThicknessMaximum: { value: 400 }, iridescenceThicknessMap: { value: null }, iridescenceThicknessMapTransform: { value: /*@__PURE__*/ new Matrix3() }, sheen: { value: 0 }, sheenColor: { value: /*@__PURE__*/ new Color( 0x000000 ) }, sheenColorMap: { value: null }, sheenColorMapTransform: { value: /*@__PURE__*/ new Matrix3() }, sheenRoughness: { value: 1 }, sheenRoughnessMap: { value: null }, sheenRoughnessMapTransform: { value: /*@__PURE__*/ new Matrix3() }, transmission: { value: 0 }, transmissionMap: { value: null }, transmissionMapTransform: { value: /*@__PURE__*/ new Matrix3() }, transmissionSamplerSize: { value: /*@__PURE__*/ new Vector2() }, transmissionSamplerMap: { value: null }, thickness: { value: 0 }, thicknessMap: { value: null }, thicknessMapTransform: { value: /*@__PURE__*/ new Matrix3() }, attenuationDistance: { value: 0 }, attenuationColor: { value: /*@__PURE__*/ new Color( 0x000000 ) }, specularColor: { value: /*@__PURE__*/ new Color( 1, 1, 1 ) }, specularColorMap: { value: null }, specularColorMapTransform: { value: /*@__PURE__*/ new Matrix3() }, specularIntensity: { value: 1 }, specularIntensityMap: { value: null }, specularIntensityMapTransform: { value: /*@__PURE__*/ new Matrix3() }, anisotropyVector: { value: /*@__PURE__*/ new Vector2() }, anisotropyMap: { value: null }, anisotropyMapTransform: { value: /*@__PURE__*/ new Matrix3() }, } ] ), vertexShader: ShaderChunk.meshphysical_vert, fragmentShader: ShaderChunk.meshphysical_frag }; const _rgb = { r: 0, b: 0, g: 0 }; const _e1$1 = /*@__PURE__*/ new Euler(); const _m1$1 = /*@__PURE__*/ new Matrix4(); function WebGLBackground( renderer, cubemaps, cubeuvmaps, state, objects, alpha, premultipliedAlpha ) { const clearColor = new Color( 0x000000 ); let clearAlpha = alpha === true ? 0 : 1; let planeMesh; let boxMesh; let currentBackground = null; let currentBackgroundVersion = 0; let currentTonemapping = null; function getBackground( scene ) { let background = scene.isScene === true ? scene.background : null; if ( background && background.isTexture ) { const usePMREM = scene.backgroundBlurriness > 0; // use PMREM if the user wants to blur the background background = ( usePMREM ? cubeuvmaps : cubemaps ).get( background ); } return background; } function render( scene ) { let forceClear = false; const background = getBackground( scene ); if ( background === null ) { setClear( clearColor, clearAlpha ); } else if ( background && background.isColor ) { setClear( background, 1 ); forceClear = true; } const environmentBlendMode = renderer.xr.getEnvironmentBlendMode(); if ( environmentBlendMode === 'additive' ) { state.buffers.color.setClear( 0, 0, 0, 1, premultipliedAlpha ); } else if ( environmentBlendMode === 'alpha-blend' ) { state.buffers.color.setClear( 0, 0, 0, 0, premultipliedAlpha ); } if ( renderer.autoClear || forceClear ) { // buffers might not be writable which is required to ensure a correct clear state.buffers.depth.setTest( true ); state.buffers.depth.setMask( true ); state.buffers.color.setMask( true ); renderer.clear( renderer.autoClearColor, renderer.autoClearDepth, renderer.autoClearStencil ); } } function addToRenderList( renderList, scene ) { const background = getBackground( scene ); if ( background && ( background.isCubeTexture || background.mapping === CubeUVReflectionMapping ) ) { if ( boxMesh === undefined ) { boxMesh = new Mesh( new BoxGeometry( 1, 1, 1 ), new ShaderMaterial( { name: 'BackgroundCubeMaterial', uniforms: cloneUniforms( ShaderLib.backgroundCube.uniforms ), vertexShader: ShaderLib.backgroundCube.vertexShader, fragmentShader: ShaderLib.backgroundCube.fragmentShader, side: BackSide, depthTest: false, depthWrite: false, fog: false, allowOverride: false } ) ); boxMesh.geometry.deleteAttribute( 'normal' ); boxMesh.geometry.deleteAttribute( 'uv' ); boxMesh.onBeforeRender = function ( renderer, scene, camera ) { this.matrixWorld.copyPosition( camera.matrixWorld ); }; // add "envMap" material property so the renderer can evaluate it like for built-in materials Object.defineProperty( boxMesh.material, 'envMap', { get: function () { return this.uniforms.envMap.value; } } ); objects.update( boxMesh ); } _e1$1.copy( scene.backgroundRotation ); // accommodate left-handed frame _e1$1.x *= -1; _e1$1.y *= -1; _e1$1.z *= -1; if ( background.isCubeTexture && background.isRenderTargetTexture === false ) { // environment maps which are not cube render targets or PMREMs follow a different convention _e1$1.y *= -1; _e1$1.z *= -1; } boxMesh.material.uniforms.envMap.value = background; boxMesh.material.uniforms.flipEnvMap.value = ( background.isCubeTexture && background.isRenderTargetTexture === false ) ? -1 : 1; boxMesh.material.uniforms.backgroundBlurriness.value = scene.backgroundBlurriness; boxMesh.material.uniforms.backgroundIntensity.value = scene.backgroundIntensity; boxMesh.material.uniforms.backgroundRotation.value.setFromMatrix4( _m1$1.makeRotationFromEuler( _e1$1 ) ); boxMesh.material.toneMapped = ColorManagement.getTransfer( background.colorSpace ) !== SRGBTransfer; if ( currentBackground !== background || currentBackgroundVersion !== background.version || currentTonemapping !== renderer.toneMapping ) { boxMesh.material.needsUpdate = true; currentBackground = background; currentBackgroundVersion = background.version; currentTonemapping = renderer.toneMapping; } boxMesh.layers.enableAll(); // push to the pre-sorted opaque render list renderList.unshift( boxMesh, boxMesh.geometry, boxMesh.material, 0, 0, null ); } else if ( background && background.isTexture ) { if ( planeMesh === undefined ) { planeMesh = new Mesh( new PlaneGeometry( 2, 2 ), new ShaderMaterial( { name: 'BackgroundMaterial', uniforms: cloneUniforms( ShaderLib.background.uniforms ), vertexShader: ShaderLib.background.vertexShader, fragmentShader: ShaderLib.background.fragmentShader, side: FrontSide, depthTest: false, depthWrite: false, fog: false, allowOverride: false } ) ); planeMesh.geometry.deleteAttribute( 'normal' ); // add "map" material property so the renderer can evaluate it like for built-in materials Object.defineProperty( planeMesh.material, 'map', { get: function () { return this.uniforms.t2D.value; } } ); objects.update( planeMesh ); } planeMesh.material.uniforms.t2D.value = background; planeMesh.material.uniforms.backgroundIntensity.value = scene.backgroundIntensity; planeMesh.material.toneMapped = ColorManagement.getTransfer( background.colorSpace ) !== SRGBTransfer; if ( background.matrixAutoUpdate === true ) { background.updateMatrix(); } planeMesh.material.uniforms.uvTransform.value.copy( background.matrix ); if ( currentBackground !== background || currentBackgroundVersion !== background.version || currentTonemapping !== renderer.toneMapping ) { planeMesh.material.needsUpdate = true; currentBackground = background; currentBackgroundVersion = background.version; currentTonemapping = renderer.toneMapping; } planeMesh.layers.enableAll(); // push to the pre-sorted opaque render list renderList.unshift( planeMesh, planeMesh.geometry, planeMesh.material, 0, 0, null ); } } function setClear( color, alpha ) { color.getRGB( _rgb, getUnlitUniformColorSpace( renderer ) ); state.buffers.color.setClear( _rgb.r, _rgb.g, _rgb.b, alpha, premultipliedAlpha ); } function dispose() { if ( boxMesh !== undefined ) { boxMesh.geometry.dispose(); boxMesh.material.dispose(); boxMesh = undefined; } if ( planeMesh !== undefined ) { planeMesh.geometry.dispose(); planeMesh.material.dispose(); planeMesh = undefined; } } return { getClearColor: function () { return clearColor; }, setClearColor: function ( color, alpha = 1 ) { clearColor.set( color ); clearAlpha = alpha; setClear( clearColor, clearAlpha ); }, getClearAlpha: function () { return clearAlpha; }, setClearAlpha: function ( alpha ) { clearAlpha = alpha; setClear( clearColor, clearAlpha ); }, render: render, addToRenderList: addToRenderList, dispose: dispose }; } function WebGLBindingStates( gl, attributes ) { const maxVertexAttributes = gl.getParameter( gl.MAX_VERTEX_ATTRIBS ); const bindingStates = {}; const defaultState = createBindingState( null ); let currentState = defaultState; let forceUpdate = false; function setup( object, material, program, geometry, index ) { let updateBuffers = false; const state = getBindingState( geometry, program, material ); if ( currentState !== state ) { currentState = state; bindVertexArrayObject( currentState.object ); } updateBuffers = needsUpdate( object, geometry, program, index ); if ( updateBuffers ) saveCache( object, geometry, program, index ); if ( index !== null ) { attributes.update( index, gl.ELEMENT_ARRAY_BUFFER ); } if ( updateBuffers || forceUpdate ) { forceUpdate = false; setupVertexAttributes( object, material, program, geometry ); if ( index !== null ) { gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, attributes.get( index ).buffer ); } } } function createVertexArrayObject() { return gl.createVertexArray(); } function bindVertexArrayObject( vao ) { return gl.bindVertexArray( vao ); } function deleteVertexArrayObject( vao ) { return gl.deleteVertexArray( vao ); } function getBindingState( geometry, program, material ) { const wireframe = ( material.wireframe === true ); let programMap = bindingStates[ geometry.id ]; if ( programMap === undefined ) { programMap = {}; bindingStates[ geometry.id ] = programMap; } let stateMap = programMap[ program.id ]; if ( stateMap === undefined ) { stateMap = {}; programMap[ program.id ] = stateMap; } let state = stateMap[ wireframe ]; if ( state === undefined ) { state = createBindingState( createVertexArrayObject() ); stateMap[ wireframe ] = state; } return state; } function createBindingState( vao ) { const newAttributes = []; const enabledAttributes = []; const attributeDivisors = []; for ( let i = 0; i < maxVertexAttributes; i ++ ) { newAttributes[ i ] = 0; enabledAttributes[ i ] = 0; attributeDivisors[ i ] = 0; } return { // for backward compatibility on non-VAO support browser geometry: null, program: null, wireframe: false, newAttributes: newAttributes, enabledAttributes: enabledAttributes, attributeDivisors: attributeDivisors, object: vao, attributes: {}, index: null }; } function needsUpdate( object, geometry, program, index ) { const cachedAttributes = currentState.attributes; const geometryAttributes = geometry.attributes; let attributesNum = 0; const programAttributes = program.getAttributes(); for ( const name in programAttributes ) { const programAttribute = programAttributes[ name ]; if ( programAttribute.location >= 0 ) { const cachedAttribute = cachedAttributes[ name ]; let geometryAttribute = geometryAttributes[ name ]; if ( geometryAttribute === undefined ) { if ( name === 'instanceMatrix' && object.instanceMatrix ) geometryAttribute = object.instanceMatrix; if ( name === 'instanceColor' && object.instanceColor ) geometryAttribute = object.instanceColor; } if ( cachedAttribute === undefined ) return true; if ( cachedAttribute.attribute !== geometryAttribute ) return true; if ( geometryAttribute && cachedAttribute.data !== geometryAttribute.data ) return true; attributesNum ++; } } if ( currentState.attributesNum !== attributesNum ) return true; if ( currentState.index !== index ) return true; return false; } function saveCache( object, geometry, program, index ) { const cache = {}; const attributes = geometry.attributes; let attributesNum = 0; const programAttributes = program.getAttributes(); for ( const name in programAttributes ) { const programAttribute = programAttributes[ name ]; if ( programAttribute.location >= 0 ) { let attribute = attributes[ name ]; if ( attribute === undefined ) { if ( name === 'instanceMatrix' && object.instanceMatrix ) attribute = object.instanceMatrix; if ( name === 'instanceColor' && object.instanceColor ) attribute = object.instanceColor; } const data = {}; data.attribute = attribute; if ( attribute && attribute.data ) { data.data = attribute.data; } cache[ name ] = data; attributesNum ++; } } currentState.attributes = cache; currentState.attributesNum = attributesNum; currentState.index = index; } function initAttributes() { const newAttributes = currentState.newAttributes; for ( let i = 0, il = newAttributes.length; i < il; i ++ ) { newAttributes[ i ] = 0; } } function enableAttribute( attribute ) { enableAttributeAndDivisor( attribute, 0 ); } function enableAttributeAndDivisor( attribute, meshPerAttribute ) { const newAttributes = currentState.newAttributes; const enabledAttributes = currentState.enabledAttributes; const attributeDivisors = currentState.attributeDivisors; newAttributes[ attribute ] = 1; if ( enabledAttributes[ attribute ] === 0 ) { gl.enableVertexAttribArray( attribute ); enabledAttributes[ attribute ] = 1; } if ( attributeDivisors[ attribute ] !== meshPerAttribute ) { gl.vertexAttribDivisor( attribute, meshPerAttribute ); attributeDivisors[ attribute ] = meshPerAttribute; } } function disableUnusedAttributes() { const newAttributes = currentState.newAttributes; const enabledAttributes = currentState.enabledAttributes; for ( let i = 0, il = enabledAttributes.length; i < il; i ++ ) { if ( enabledAttributes[ i ] !== newAttributes[ i ] ) { gl.disableVertexAttribArray( i ); enabledAttributes[ i ] = 0; } } } function vertexAttribPointer( index, size, type, normalized, stride, offset, integer ) { if ( integer === true ) { gl.vertexAttribIPointer( index, size, type, stride, offset ); } else { gl.vertexAttribPointer( index, size, type, normalized, stride, offset ); } } function setupVertexAttributes( object, material, program, geometry ) { initAttributes(); const geometryAttributes = geometry.attributes; const programAttributes = program.getAttributes(); const materialDefaultAttributeValues = material.defaultAttributeValues; for ( const name in programAttributes ) { const programAttribute = programAttributes[ name ]; if ( programAttribute.location >= 0 ) { let geometryAttribute = geometryAttributes[ name ]; if ( geometryAttribute === undefined ) { if ( name === 'instanceMatrix' && object.instanceMatrix ) geometryAttribute = object.instanceMatrix; if ( name === 'instanceColor' && object.instanceColor ) geometryAttribute = object.instanceColor; } if ( geometryAttribute !== undefined ) { const normalized = geometryAttribute.normalized; const size = geometryAttribute.itemSize; const attribute = attributes.get( geometryAttribute ); // TODO Attribute may not be available on context restore if ( attribute === undefined ) continue; const buffer = attribute.buffer; const type = attribute.type; const bytesPerElement = attribute.bytesPerElement; // check for integer attributes const integer = ( type === gl.INT || type === gl.UNSIGNED_INT || geometryAttribute.gpuType === IntType ); if ( geometryAttribute.isInterleavedBufferAttribute ) { const data = geometryAttribute.data; const stride = data.stride; const offset = geometryAttribute.offset; if ( data.isInstancedInterleavedBuffer ) { for ( let i = 0; i < programAttribute.locationSize; i ++ ) { enableAttributeAndDivisor( programAttribute.location + i, data.meshPerAttribute ); } if ( object.isInstancedMesh !== true && geometry._maxInstanceCount === undefined ) { geometry._maxInstanceCount = data.meshPerAttribute * data.count; } } else { for ( let i = 0; i < programAttribute.locationSize; i ++ ) { enableAttribute( programAttribute.location + i ); } } gl.bindBuffer( gl.ARRAY_BUFFER, buffer ); for ( let i = 0; i < programAttribute.locationSize; i ++ ) { vertexAttribPointer( programAttribute.location + i, size / programAttribute.locationSize, type, normalized, stride * bytesPerElement, ( offset + ( size / programAttribute.locationSize ) * i ) * bytesPerElement, integer ); } } else { if ( geometryAttribute.isInstancedBufferAttribute ) { for ( let i = 0; i < programAttribute.locationSize; i ++ ) { enableAttributeAndDivisor( programAttribute.location + i, geometryAttribute.meshPerAttribute ); } if ( object.isInstancedMesh !== true && geometry._maxInstanceCount === undefined ) { geometry._maxInstanceCount = geometryAttribute.meshPerAttribute * geometryAttribute.count; } } else { for ( let i = 0; i < programAttribute.locationSize; i ++ ) { enableAttribute( programAttribute.location + i ); } } gl.bindBuffer( gl.ARRAY_BUFFER, buffer ); for ( let i = 0; i < programAttribute.locationSize; i ++ ) { vertexAttribPointer( programAttribute.location + i, size / programAttribute.locationSize, type, normalized, size * bytesPerElement, ( size / programAttribute.locationSize ) * i * bytesPerElement, integer ); } } } else if ( materialDefaultAttributeValues !== undefined ) { const value = materialDefaultAttributeValues[ name ]; if ( value !== undefined ) { switch ( value.length ) { case 2: gl.vertexAttrib2fv( programAttribute.location, value ); break; case 3: gl.vertexAttrib3fv( programAttribute.location, value ); break; case 4: gl.vertexAttrib4fv( programAttribute.location, value ); break; default: gl.vertexAttrib1fv( programAttribute.location, value ); } } } } } disableUnusedAttributes(); } function dispose() { reset(); for ( const geometryId in bindingStates ) { const programMap = bindingStates[ geometryId ]; for ( const programId in programMap ) { const stateMap = programMap[ programId ]; for ( const wireframe in stateMap ) { deleteVertexArrayObject( stateMap[ wireframe ].object ); delete stateMap[ wireframe ]; } delete programMap[ programId ]; } delete bindingStates[ geometryId ]; } } function releaseStatesOfGeometry( geometry ) { if ( bindingStates[ geometry.id ] === undefined ) return; const programMap = bindingStates[ geometry.id ]; for ( const programId in programMap ) { const stateMap = programMap[ programId ]; for ( const wireframe in stateMap ) { deleteVertexArrayObject( stateMap[ wireframe ].object ); delete stateMap[ wireframe ]; } delete programMap[ programId ]; } delete bindingStates[ geometry.id ]; } function releaseStatesOfProgram( program ) { for ( const geometryId in bindingStates ) { const programMap = bindingStates[ geometryId ]; if ( programMap[ program.id ] === undefined ) continue; const stateMap = programMap[ program.id ]; for ( const wireframe in stateMap ) { deleteVertexArrayObject( stateMap[ wireframe ].object ); delete stateMap[ wireframe ]; } delete programMap[ program.id ]; } } function reset() { resetDefaultState(); forceUpdate = true; if ( currentState === defaultState ) return; currentState = defaultState; bindVertexArrayObject( currentState.object ); } // for backward-compatibility function resetDefaultState() { defaultState.geometry = null; defaultState.program = null; defaultState.wireframe = false; } return { setup: setup, reset: reset, resetDefaultState: resetDefaultState, dispose: dispose, releaseStatesOfGeometry: releaseStatesOfGeometry, releaseStatesOfProgram: releaseStatesOfProgram, initAttributes: initAttributes, enableAttribute: enableAttribute, disableUnusedAttributes: disableUnusedAttributes }; } function WebGLBufferRenderer( gl, extensions, info ) { let mode; function setMode( value ) { mode = value; } function render( start, count ) { gl.drawArrays( mode, start, count ); info.update( count, mode, 1 ); } function renderInstances( start, count, primcount ) { if ( primcount === 0 ) return; gl.drawArraysInstanced( mode, start, count, primcount ); info.update( count, mode, primcount ); } function renderMultiDraw( starts, counts, drawCount ) { if ( drawCount === 0 ) return; const extension = extensions.get( 'WEBGL_multi_draw' ); extension.multiDrawArraysWEBGL( mode, starts, 0, counts, 0, drawCount ); let elementCount = 0; for ( let i = 0; i < drawCount; i ++ ) { elementCount += counts[ i ]; } info.update( elementCount, mode, 1 ); } function renderMultiDrawInstances( starts, counts, drawCount, primcount ) { if ( drawCount === 0 ) return; const extension = extensions.get( 'WEBGL_multi_draw' ); if ( extension === null ) { for ( let i = 0; i < starts.length; i ++ ) { renderInstances( starts[ i ], counts[ i ], primcount[ i ] ); } } else { extension.multiDrawArraysInstancedWEBGL( mode, starts, 0, counts, 0, primcount, 0, drawCount ); let elementCount = 0; for ( let i = 0; i < drawCount; i ++ ) { elementCount += counts[ i ] * primcount[ i ]; } info.update( elementCount, mode, 1 ); } } // this.setMode = setMode; this.render = render; this.renderInstances = renderInstances; this.renderMultiDraw = renderMultiDraw; this.renderMultiDrawInstances = renderMultiDrawInstances; } function WebGLCapabilities( gl, extensions, parameters, utils ) { let maxAnisotropy; function getMaxAnisotropy() { if ( maxAnisotropy !== undefined ) return maxAnisotropy; if ( extensions.has( 'EXT_texture_filter_anisotropic' ) === true ) { const extension = extensions.get( 'EXT_texture_filter_anisotropic' ); maxAnisotropy = gl.getParameter( extension.MAX_TEXTURE_MAX_ANISOTROPY_EXT ); } else { maxAnisotropy = 0; } return maxAnisotropy; } function textureFormatReadable( textureFormat ) { if ( textureFormat !== RGBAFormat && utils.convert( textureFormat ) !== gl.getParameter( gl.IMPLEMENTATION_COLOR_READ_FORMAT ) ) { return false; } return true; } function textureTypeReadable( textureType ) { const halfFloatSupportedByExt = ( textureType === HalfFloatType ) && ( extensions.has( 'EXT_color_buffer_half_float' ) || extensions.has( 'EXT_color_buffer_float' ) ); if ( textureType !== UnsignedByteType && utils.convert( textureType ) !== gl.getParameter( gl.IMPLEMENTATION_COLOR_READ_TYPE ) && // Edge and Chrome Mac < 52 (#9513) textureType !== FloatType && ! halfFloatSupportedByExt ) { return false; } return true; } function getMaxPrecision( precision ) { if ( precision === 'highp' ) { if ( gl.getShaderPrecisionFormat( gl.VERTEX_SHADER, gl.HIGH_FLOAT ).precision > 0 && gl.getShaderPrecisionFormat( gl.FRAGMENT_SHADER, gl.HIGH_FLOAT ).precision > 0 ) { return 'highp'; } precision = 'mediump'; } if ( precision === 'mediump' ) { if ( gl.getShaderPrecisionFormat( gl.VERTEX_SHADER, gl.MEDIUM_FLOAT ).precision > 0 && gl.getShaderPrecisionFormat( gl.FRAGMENT_SHADER, gl.MEDIUM_FLOAT ).precision > 0 ) { return 'mediump'; } } return 'lowp'; } let precision = parameters.precision !== undefined ? parameters.precision : 'highp'; const maxPrecision = getMaxPrecision( precision ); if ( maxPrecision !== precision ) { console.warn( 'THREE.WebGLRenderer:', precision, 'not supported, using', maxPrecision, 'instead.' ); precision = maxPrecision; } const logarithmicDepthBuffer = parameters.logarithmicDepthBuffer === true; const reverseDepthBuffer = parameters.reverseDepthBuffer === true && extensions.has( 'EXT_clip_control' ); const maxTextures = gl.getParameter( gl.MAX_TEXTURE_IMAGE_UNITS ); const maxVertexTextures = gl.getParameter( gl.MAX_VERTEX_TEXTURE_IMAGE_UNITS ); const maxTextureSize = gl.getParameter( gl.MAX_TEXTURE_SIZE ); const maxCubemapSize = gl.getParameter( gl.MAX_CUBE_MAP_TEXTURE_SIZE ); const maxAttributes = gl.getParameter( gl.MAX_VERTEX_ATTRIBS ); const maxVertexUniforms = gl.getParameter( gl.MAX_VERTEX_UNIFORM_VECTORS ); const maxVaryings = gl.getParameter( gl.MAX_VARYING_VECTORS ); const maxFragmentUniforms = gl.getParameter( gl.MAX_FRAGMENT_UNIFORM_VECTORS ); const vertexTextures = maxVertexTextures > 0; const maxSamples = gl.getParameter( gl.MAX_SAMPLES ); return { isWebGL2: true, // keeping this for backwards compatibility getMaxAnisotropy: getMaxAnisotropy, getMaxPrecision: getMaxPrecision, textureFormatReadable: textureFormatReadable, textureTypeReadable: textureTypeReadable, precision: precision, logarithmicDepthBuffer: logarithmicDepthBuffer, reverseDepthBuffer: reverseDepthBuffer, maxTextures: maxTextures, maxVertexTextures: maxVertexTextures, maxTextureSize: maxTextureSize, maxCubemapSize: maxCubemapSize, maxAttributes: maxAttributes, maxVertexUniforms: maxVertexUniforms, maxVaryings: maxVaryings, maxFragmentUniforms: maxFragmentUniforms, vertexTextures: vertexTextures, maxSamples: maxSamples }; } function WebGLClipping( properties ) { const scope = this; let globalState = null, numGlobalPlanes = 0, localClippingEnabled = false, renderingShadows = false; const plane = new Plane(), viewNormalMatrix = new Matrix3(), uniform = { value: null, needsUpdate: false }; this.uniform = uniform; this.numPlanes = 0; this.numIntersection = 0; this.init = function ( planes, enableLocalClipping ) { const enabled = planes.length !== 0 || enableLocalClipping || // enable state of previous frame - the clipping code has to // run another frame in order to reset the state: numGlobalPlanes !== 0 || localClippingEnabled; localClippingEnabled = enableLocalClipping; numGlobalPlanes = planes.length; return enabled; }; this.beginShadows = function () { renderingShadows = true; projectPlanes( null ); }; this.endShadows = function () { renderingShadows = false; }; this.setGlobalState = function ( planes, camera ) { globalState = projectPlanes( planes, camera, 0 ); }; this.setState = function ( material, camera, useCache ) { const planes = material.clippingPlanes, clipIntersection = material.clipIntersection, clipShadows = material.clipShadows; const materialProperties = properties.get( material ); if ( ! localClippingEnabled || planes === null || planes.length === 0 || renderingShadows && ! clipShadows ) { // there's no local clipping if ( renderingShadows ) { // there's no global clipping projectPlanes( null ); } else { resetGlobalState(); } } else { const nGlobal = renderingShadows ? 0 : numGlobalPlanes, lGlobal = nGlobal * 4; let dstArray = materialProperties.clippingState || null; uniform.value = dstArray; // ensure unique state dstArray = projectPlanes( planes, camera, lGlobal, useCache ); for ( let i = 0; i !== lGlobal; ++ i ) { dstArray[ i ] = globalState[ i ]; } materialProperties.clippingState = dstArray; this.numIntersection = clipIntersection ? this.numPlanes : 0; this.numPlanes += nGlobal; } }; function resetGlobalState() { if ( uniform.value !== globalState ) { uniform.value = globalState; uniform.needsUpdate = numGlobalPlanes > 0; } scope.numPlanes = numGlobalPlanes; scope.numIntersection = 0; } function projectPlanes( planes, camera, dstOffset, skipTransform ) { const nPlanes = planes !== null ? planes.length : 0; let dstArray = null; if ( nPlanes !== 0 ) { dstArray = uniform.value; if ( skipTransform !== true || dstArray === null ) { const flatSize = dstOffset + nPlanes * 4, viewMatrix = camera.matrixWorldInverse; viewNormalMatrix.getNormalMatrix( viewMatrix ); if ( dstArray === null || dstArray.length < flatSize ) { dstArray = new Float32Array( flatSize ); } for ( let i = 0, i4 = dstOffset; i !== nPlanes; ++ i, i4 += 4 ) { plane.copy( planes[ i ] ).applyMatrix4( viewMatrix, viewNormalMatrix ); plane.normal.toArray( dstArray, i4 ); dstArray[ i4 + 3 ] = plane.constant; } } uniform.value = dstArray; uniform.needsUpdate = true; } scope.numPlanes = nPlanes; scope.numIntersection = 0; return dstArray; } } function WebGLCubeMaps( renderer ) { let cubemaps = new WeakMap(); function mapTextureMapping( texture, mapping ) { if ( mapping === EquirectangularReflectionMapping ) { texture.mapping = CubeReflectionMapping; } else if ( mapping === EquirectangularRefractionMapping ) { texture.mapping = CubeRefractionMapping; } return texture; } function get( texture ) { if ( texture && texture.isTexture ) { const mapping = texture.mapping; if ( mapping === EquirectangularReflectionMapping || mapping === EquirectangularRefractionMapping ) { if ( cubemaps.has( texture ) ) { const cubemap = cubemaps.get( texture ).texture; return mapTextureMapping( cubemap, texture.mapping ); } else { const image = texture.image; if ( image && image.height > 0 ) { const renderTarget = new WebGLCubeRenderTarget( image.height ); renderTarget.fromEquirectangularTexture( renderer, texture ); cubemaps.set( texture, renderTarget ); texture.addEventListener( 'dispose', onTextureDispose ); return mapTextureMapping( renderTarget.texture, texture.mapping ); } else { // image not yet ready. try the conversion next frame return null; } } } } return texture; } function onTextureDispose( event ) { const texture = event.target; texture.removeEventListener( 'dispose', onTextureDispose ); const cubemap = cubemaps.get( texture ); if ( cubemap !== undefined ) { cubemaps.delete( texture ); cubemap.dispose(); } } function dispose() { cubemaps = new WeakMap(); } return { get: get, dispose: dispose }; } const LOD_MIN = 4; // The standard deviations (radians) associated with the extra mips. These are // chosen to approximate a Trowbridge-Reitz distribution function times the // geometric shadowing function. These sigma values squared must match the // variance #defines in cube_uv_reflection_fragment.glsl.js. const EXTRA_LOD_SIGMA = [ 0.125, 0.215, 0.35, 0.446, 0.526, 0.582 ]; // The maximum length of the blur for loop. Smaller sigmas will use fewer // samples and exit early, but not recompile the shader. const MAX_SAMPLES = 20; const _flatCamera = /*@__PURE__*/ new OrthographicCamera(); const _clearColor = /*@__PURE__*/ new Color(); let _oldTarget = null; let _oldActiveCubeFace = 0; let _oldActiveMipmapLevel = 0; let _oldXrEnabled = false; // Golden Ratio const PHI = ( 1 + Math.sqrt( 5 ) ) / 2; const INV_PHI = 1 / PHI; // Vertices of a dodecahedron (except the opposites, which represent the // same axis), used as axis directions evenly spread on a sphere. const _axisDirections = [ /*@__PURE__*/ new Vector3( - PHI, INV_PHI, 0 ), /*@__PURE__*/ new Vector3( PHI, INV_PHI, 0 ), /*@__PURE__*/ new Vector3( - INV_PHI, 0, PHI ), /*@__PURE__*/ new Vector3( INV_PHI, 0, PHI ), /*@__PURE__*/ new Vector3( 0, PHI, - INV_PHI ), /*@__PURE__*/ new Vector3( 0, PHI, INV_PHI ), /*@__PURE__*/ new Vector3( -1, 1, -1 ), /*@__PURE__*/ new Vector3( 1, 1, -1 ), /*@__PURE__*/ new Vector3( -1, 1, 1 ), /*@__PURE__*/ new Vector3( 1, 1, 1 ) ]; const _origin = /*@__PURE__*/ new Vector3(); /** * This class generates a Prefiltered, Mipmapped Radiance Environment Map * (PMREM) from a cubeMap environment texture. This allows different levels of * blur to be quickly accessed based on material roughness. It is packed into a * special CubeUV format that allows us to perform custom interpolation so that * we can support nonlinear formats such as RGBE. Unlike a traditional mipmap * chain, it only goes down to the LOD_MIN level (above), and then creates extra * even more filtered 'mips' at the same LOD_MIN resolution, associated with * higher roughness levels. In this way we maintain resolution to smoothly * interpolate diffuse lighting while limiting sampling computation. * * Paper: Fast, Accurate Image-Based Lighting: * {@link https://drive.google.com/file/d/15y8r_UpKlU9SvV4ILb0C3qCPecS8pvLz/view} */ class PMREMGenerator { /** * Constructs a new PMREM generator. * * @param {WebGLRenderer} renderer - The renderer. */ constructor( renderer ) { this._renderer = renderer; this._pingPongRenderTarget = null; this._lodMax = 0; this._cubeSize = 0; this._lodPlanes = []; this._sizeLods = []; this._sigmas = []; this._blurMaterial = null; this._cubemapMaterial = null; this._equirectMaterial = null; this._compileMaterial( this._blurMaterial ); } /** * Generates a PMREM from a supplied Scene, which can be faster than using an * image if networking bandwidth is low. Optional sigma specifies a blur radius * in radians to be applied to the scene before PMREM generation. Optional near * and far planes ensure the scene is rendered in its entirety. * * @param {Scene} scene - The scene to be captured. * @param {number} [sigma=0] - The blur radius in radians. * @param {number} [near=0.1] - The near plane distance. * @param {number} [far=100] - The far plane distance. * @param {Object} [options={}] - The configuration options. * @param {number} [options.size=256] - The texture size of the PMREM. * @param {Vector3} [options.renderTarget=origin] - The position of the internal cube camera that renders the scene. * @return {WebGLRenderTarget} The resulting PMREM. */ fromScene( scene, sigma = 0, near = 0.1, far = 100, options = {} ) { const { size = 256, position = _origin, } = options; _oldTarget = this._renderer.getRenderTarget(); _oldActiveCubeFace = this._renderer.getActiveCubeFace(); _oldActiveMipmapLevel = this._renderer.getActiveMipmapLevel(); _oldXrEnabled = this._renderer.xr.enabled; this._renderer.xr.enabled = false; this._setSize( size ); const cubeUVRenderTarget = this._allocateTargets(); cubeUVRenderTarget.depthBuffer = true; this._sceneToCubeUV( scene, near, far, cubeUVRenderTarget, position ); if ( sigma > 0 ) { this._blur( cubeUVRenderTarget, 0, 0, sigma ); } this._applyPMREM( cubeUVRenderTarget ); this._cleanup( cubeUVRenderTarget ); return cubeUVRenderTarget; } /** * Generates a PMREM from an equirectangular texture, which can be either LDR * or HDR. The ideal input image size is 1k (1024 x 512), * as this matches best with the 256 x 256 cubemap output. * * @param {Texture} equirectangular - The equirectangular texture to be converted. * @param {?WebGLRenderTarget} [renderTarget=null] - The render target to use. * @return {WebGLRenderTarget} The resulting PMREM. */ fromEquirectangular( equirectangular, renderTarget = null ) { return this._fromTexture( equirectangular, renderTarget ); } /** * Generates a PMREM from an cubemap texture, which can be either LDR * or HDR. The ideal input cube size is 256 x 256, * as this matches best with the 256 x 256 cubemap output. * * @param {Texture} cubemap - The cubemap texture to be converted. * @param {?WebGLRenderTarget} [renderTarget=null] - The render target to use. * @return {WebGLRenderTarget} The resulting PMREM. */ fromCubemap( cubemap, renderTarget = null ) { return this._fromTexture( cubemap, renderTarget ); } /** * Pre-compiles the cubemap shader. You can get faster start-up by invoking this method during * your texture's network fetch for increased concurrency. */ compileCubemapShader() { if ( this._cubemapMaterial === null ) { this._cubemapMaterial = _getCubemapMaterial(); this._compileMaterial( this._cubemapMaterial ); } } /** * Pre-compiles the equirectangular shader. You can get faster start-up by invoking this method during * your texture's network fetch for increased concurrency. */ compileEquirectangularShader() { if ( this._equirectMaterial === null ) { this._equirectMaterial = _getEquirectMaterial(); this._compileMaterial( this._equirectMaterial ); } } /** * Disposes of the PMREMGenerator's internal memory. Note that PMREMGenerator is a static class, * so you should not need more than one PMREMGenerator object. If you do, calling dispose() on * one of them will cause any others to also become unusable. */ dispose() { this._dispose(); if ( this._cubemapMaterial !== null ) this._cubemapMaterial.dispose(); if ( this._equirectMaterial !== null ) this._equirectMaterial.dispose(); } // private interface _setSize( cubeSize ) { this._lodMax = Math.floor( Math.log2( cubeSize ) ); this._cubeSize = Math.pow( 2, this._lodMax ); } _dispose() { if ( this._blurMaterial !== null ) this._blurMaterial.dispose(); if ( this._pingPongRenderTarget !== null ) this._pingPongRenderTarget.dispose(); for ( let i = 0; i < this._lodPlanes.length; i ++ ) { this._lodPlanes[ i ].dispose(); } } _cleanup( outputTarget ) { this._renderer.setRenderTarget( _oldTarget, _oldActiveCubeFace, _oldActiveMipmapLevel ); this._renderer.xr.enabled = _oldXrEnabled; outputTarget.scissorTest = false; _setViewport( outputTarget, 0, 0, outputTarget.width, outputTarget.height ); } _fromTexture( texture, renderTarget ) { if ( texture.mapping === CubeReflectionMapping || texture.mapping === CubeRefractionMapping ) { this._setSize( texture.image.length === 0 ? 16 : ( texture.image[ 0 ].width || texture.image[ 0 ].image.width ) ); } else { // Equirectangular this._setSize( texture.image.width / 4 ); } _oldTarget = this._renderer.getRenderTarget(); _oldActiveCubeFace = this._renderer.getActiveCubeFace(); _oldActiveMipmapLevel = this._renderer.getActiveMipmapLevel(); _oldXrEnabled = this._renderer.xr.enabled; this._renderer.xr.enabled = false; const cubeUVRenderTarget = renderTarget || this._allocateTargets(); this._textureToCubeUV( texture, cubeUVRenderTarget ); this._applyPMREM( cubeUVRenderTarget ); this._cleanup( cubeUVRenderTarget ); return cubeUVRenderTarget; } _allocateTargets() { const width = 3 * Math.max( this._cubeSize, 16 * 7 ); const height = 4 * this._cubeSize; const params = { magFilter: LinearFilter, minFilter: LinearFilter, generateMipmaps: false, type: HalfFloatType, format: RGBAFormat, colorSpace: LinearSRGBColorSpace, depthBuffer: false }; const cubeUVRenderTarget = _createRenderTarget( width, height, params ); if ( this._pingPongRenderTarget === null || this._pingPongRenderTarget.width !== width || this._pingPongRenderTarget.height !== height ) { if ( this._pingPongRenderTarget !== null ) { this._dispose(); } this._pingPongRenderTarget = _createRenderTarget( width, height, params ); const { _lodMax } = this; ( { sizeLods: this._sizeLods, lodPlanes: this._lodPlanes, sigmas: this._sigmas } = _createPlanes( _lodMax ) ); this._blurMaterial = _getBlurShader( _lodMax, width, height ); } return cubeUVRenderTarget; } _compileMaterial( material ) { const tmpMesh = new Mesh( this._lodPlanes[ 0 ], material ); this._renderer.compile( tmpMesh, _flatCamera ); } _sceneToCubeUV( scene, near, far, cubeUVRenderTarget, position ) { const fov = 90; const aspect = 1; const cubeCamera = new PerspectiveCamera( fov, aspect, near, far ); const upSign = [ 1, -1, 1, 1, 1, 1 ]; const forwardSign = [ 1, 1, 1, -1, -1, -1 ]; const renderer = this._renderer; const originalAutoClear = renderer.autoClear; const toneMapping = renderer.toneMapping; renderer.getClearColor( _clearColor ); renderer.toneMapping = NoToneMapping; renderer.autoClear = false; const backgroundMaterial = new MeshBasicMaterial( { name: 'PMREM.Background', side: BackSide, depthWrite: false, depthTest: false, } ); const backgroundBox = new Mesh( new BoxGeometry(), backgroundMaterial ); let useSolidColor = false; const background = scene.background; if ( background ) { if ( background.isColor ) { backgroundMaterial.color.copy( background ); scene.background = null; useSolidColor = true; } } else { backgroundMaterial.color.copy( _clearColor ); useSolidColor = true; } for ( let i = 0; i < 6; i ++ ) { const col = i % 3; if ( col === 0 ) { cubeCamera.up.set( 0, upSign[ i ], 0 ); cubeCamera.position.set( position.x, position.y, position.z ); cubeCamera.lookAt( position.x + forwardSign[ i ], position.y, position.z ); } else if ( col === 1 ) { cubeCamera.up.set( 0, 0, upSign[ i ] ); cubeCamera.position.set( position.x, position.y, position.z ); cubeCamera.lookAt( position.x, position.y + forwardSign[ i ], position.z ); } else { cubeCamera.up.set( 0, upSign[ i ], 0 ); cubeCamera.position.set( position.x, position.y, position.z ); cubeCamera.lookAt( position.x, position.y, position.z + forwardSign[ i ] ); } const size = this._cubeSize; _setViewport( cubeUVRenderTarget, col * size, i > 2 ? size : 0, size, size ); renderer.setRenderTarget( cubeUVRenderTarget ); if ( useSolidColor ) { renderer.render( backgroundBox, cubeCamera ); } renderer.render( scene, cubeCamera ); } backgroundBox.geometry.dispose(); backgroundBox.material.dispose(); renderer.toneMapping = toneMapping; renderer.autoClear = originalAutoClear; scene.background = background; } _textureToCubeUV( texture, cubeUVRenderTarget ) { const renderer = this._renderer; const isCubeTexture = ( texture.mapping === CubeReflectionMapping || texture.mapping === CubeRefractionMapping ); if ( isCubeTexture ) { if ( this._cubemapMaterial === null ) { this._cubemapMaterial = _getCubemapMaterial(); } this._cubemapMaterial.uniforms.flipEnvMap.value = ( texture.isRenderTargetTexture === false ) ? -1 : 1; } else { if ( this._equirectMaterial === null ) { this._equirectMaterial = _getEquirectMaterial(); } } const material = isCubeTexture ? this._cubemapMaterial : this._equirectMaterial; const mesh = new Mesh( this._lodPlanes[ 0 ], material ); const uniforms = material.uniforms; uniforms[ 'envMap' ].value = texture; const size = this._cubeSize; _setViewport( cubeUVRenderTarget, 0, 0, 3 * size, 2 * size ); renderer.setRenderTarget( cubeUVRenderTarget ); renderer.render( mesh, _flatCamera ); } _applyPMREM( cubeUVRenderTarget ) { const renderer = this._renderer; const autoClear = renderer.autoClear; renderer.autoClear = false; const n = this._lodPlanes.length; for ( let i = 1; i < n; i ++ ) { const sigma = Math.sqrt( this._sigmas[ i ] * this._sigmas[ i ] - this._sigmas[ i - 1 ] * this._sigmas[ i - 1 ] ); const poleAxis = _axisDirections[ ( n - i - 1 ) % _axisDirections.length ]; this._blur( cubeUVRenderTarget, i - 1, i, sigma, poleAxis ); } renderer.autoClear = autoClear; } /** * This is a two-pass Gaussian blur for a cubemap. Normally this is done * vertically and horizontally, but this breaks down on a cube. Here we apply * the blur latitudinally (around the poles), and then longitudinally (towards * the poles) to approximate the orthogonally-separable blur. It is least * accurate at the poles, but still does a decent job. * * @private * @param {WebGLRenderTarget} cubeUVRenderTarget * @param {number} lodIn * @param {number} lodOut * @param {number} sigma * @param {Vector3} [poleAxis] */ _blur( cubeUVRenderTarget, lodIn, lodOut, sigma, poleAxis ) { const pingPongRenderTarget = this._pingPongRenderTarget; this._halfBlur( cubeUVRenderTarget, pingPongRenderTarget, lodIn, lodOut, sigma, 'latitudinal', poleAxis ); this._halfBlur( pingPongRenderTarget, cubeUVRenderTarget, lodOut, lodOut, sigma, 'longitudinal', poleAxis ); } _halfBlur( targetIn, targetOut, lodIn, lodOut, sigmaRadians, direction, poleAxis ) { const renderer = this._renderer; const blurMaterial = this._blurMaterial; if ( direction !== 'latitudinal' && direction !== 'longitudinal' ) { console.error( 'blur direction must be either latitudinal or longitudinal!' ); } // Number of standard deviations at which to cut off the discrete approximation. const STANDARD_DEVIATIONS = 3; const blurMesh = new Mesh( this._lodPlanes[ lodOut ], blurMaterial ); const blurUniforms = blurMaterial.uniforms; const pixels = this._sizeLods[ lodIn ] - 1; const radiansPerPixel = isFinite( sigmaRadians ) ? Math.PI / ( 2 * pixels ) : 2 * Math.PI / ( 2 * MAX_SAMPLES - 1 ); const sigmaPixels = sigmaRadians / radiansPerPixel; const samples = isFinite( sigmaRadians ) ? 1 + Math.floor( STANDARD_DEVIATIONS * sigmaPixels ) : MAX_SAMPLES; if ( samples > MAX_SAMPLES ) { console.warn( `sigmaRadians, ${ sigmaRadians}, is too large and will clip, as it requested ${ samples} samples when the maximum is set to ${MAX_SAMPLES}` ); } const weights = []; let sum = 0; for ( let i = 0; i < MAX_SAMPLES; ++ i ) { const x = i / sigmaPixels; const weight = Math.exp( - x * x / 2 ); weights.push( weight ); if ( i === 0 ) { sum += weight; } else if ( i < samples ) { sum += 2 * weight; } } for ( let i = 0; i < weights.length; i ++ ) { weights[ i ] = weights[ i ] / sum; } blurUniforms[ 'envMap' ].value = targetIn.texture; blurUniforms[ 'samples' ].value = samples; blurUniforms[ 'weights' ].value = weights; blurUniforms[ 'latitudinal' ].value = direction === 'latitudinal'; if ( poleAxis ) { blurUniforms[ 'poleAxis' ].value = poleAxis; } const { _lodMax } = this; blurUniforms[ 'dTheta' ].value = radiansPerPixel; blurUniforms[ 'mipInt' ].value = _lodMax - lodIn; const outputSize = this._sizeLods[ lodOut ]; const x = 3 * outputSize * ( lodOut > _lodMax - LOD_MIN ? lodOut - _lodMax + LOD_MIN : 0 ); const y = 4 * ( this._cubeSize - outputSize ); _setViewport( targetOut, x, y, 3 * outputSize, 2 * outputSize ); renderer.setRenderTarget( targetOut ); renderer.render( blurMesh, _flatCamera ); } } function _createPlanes( lodMax ) { const lodPlanes = []; const sizeLods = []; const sigmas = []; let lod = lodMax; const totalLods = lodMax - LOD_MIN + 1 + EXTRA_LOD_SIGMA.length; for ( let i = 0; i < totalLods; i ++ ) { const sizeLod = Math.pow( 2, lod ); sizeLods.push( sizeLod ); let sigma = 1.0 / sizeLod; if ( i > lodMax - LOD_MIN ) { sigma = EXTRA_LOD_SIGMA[ i - lodMax + LOD_MIN - 1 ]; } else if ( i === 0 ) { sigma = 0; } sigmas.push( sigma ); const texelSize = 1.0 / ( sizeLod - 2 ); const min = - texelSize; const max = 1 + texelSize; const uv1 = [ min, min, max, min, max, max, min, min, max, max, min, max ]; const cubeFaces = 6; const vertices = 6; const positionSize = 3; const uvSize = 2; const faceIndexSize = 1; const position = new Float32Array( positionSize * vertices * cubeFaces ); const uv = new Float32Array( uvSize * vertices * cubeFaces ); const faceIndex = new Float32Array( faceIndexSize * vertices * cubeFaces ); for ( let face = 0; face < cubeFaces; face ++ ) { const x = ( face % 3 ) * 2 / 3 - 1; const y = face > 2 ? 0 : -1; const coordinates = [ x, y, 0, x + 2 / 3, y, 0, x + 2 / 3, y + 1, 0, x, y, 0, x + 2 / 3, y + 1, 0, x, y + 1, 0 ]; position.set( coordinates, positionSize * vertices * face ); uv.set( uv1, uvSize * vertices * face ); const fill = [ face, face, face, face, face, face ]; faceIndex.set( fill, faceIndexSize * vertices * face ); } const planes = new BufferGeometry(); planes.setAttribute( 'position', new BufferAttribute( position, positionSize ) ); planes.setAttribute( 'uv', new BufferAttribute( uv, uvSize ) ); planes.setAttribute( 'faceIndex', new BufferAttribute( faceIndex, faceIndexSize ) ); lodPlanes.push( planes ); if ( lod > LOD_MIN ) { lod --; } } return { lodPlanes, sizeLods, sigmas }; } function _createRenderTarget( width, height, params ) { const cubeUVRenderTarget = new WebGLRenderTarget( width, height, params ); cubeUVRenderTarget.texture.mapping = CubeUVReflectionMapping; cubeUVRenderTarget.texture.name = 'PMREM.cubeUv'; cubeUVRenderTarget.scissorTest = true; return cubeUVRenderTarget; } function _setViewport( target, x, y, width, height ) { target.viewport.set( x, y, width, height ); target.scissor.set( x, y, width, height ); } function _getBlurShader( lodMax, width, height ) { const weights = new Float32Array( MAX_SAMPLES ); const poleAxis = new Vector3( 0, 1, 0 ); const shaderMaterial = new ShaderMaterial( { name: 'SphericalGaussianBlur', defines: { 'n': MAX_SAMPLES, 'CUBEUV_TEXEL_WIDTH': 1.0 / width, 'CUBEUV_TEXEL_HEIGHT': 1.0 / height, 'CUBEUV_MAX_MIP': `${lodMax}.0`, }, uniforms: { 'envMap': { value: null }, 'samples': { value: 1 }, 'weights': { value: weights }, 'latitudinal': { value: false }, 'dTheta': { value: 0 }, 'mipInt': { value: 0 }, 'poleAxis': { value: poleAxis } }, vertexShader: _getCommonVertexShader(), fragmentShader: /* glsl */` precision mediump float; precision mediump int; varying vec3 vOutputDirection; uniform sampler2D envMap; uniform int samples; uniform float weights[ n ]; uniform bool latitudinal; uniform float dTheta; uniform float mipInt; uniform vec3 poleAxis; #define ENVMAP_TYPE_CUBE_UV #include vec3 getSample( float theta, vec3 axis ) { float cosTheta = cos( theta ); // Rodrigues' axis-angle rotation vec3 sampleDirection = vOutputDirection * cosTheta + cross( axis, vOutputDirection ) * sin( theta ) + axis * dot( axis, vOutputDirection ) * ( 1.0 - cosTheta ); return bilinearCubeUV( envMap, sampleDirection, mipInt ); } void main() { vec3 axis = latitudinal ? poleAxis : cross( poleAxis, vOutputDirection ); if ( all( equal( axis, vec3( 0.0 ) ) ) ) { axis = vec3( vOutputDirection.z, 0.0, - vOutputDirection.x ); } axis = normalize( axis ); gl_FragColor = vec4( 0.0, 0.0, 0.0, 1.0 ); gl_FragColor.rgb += weights[ 0 ] * getSample( 0.0, axis ); for ( int i = 1; i < n; i++ ) { if ( i >= samples ) { break; } float theta = dTheta * float( i ); gl_FragColor.rgb += weights[ i ] * getSample( -1.0 * theta, axis ); gl_FragColor.rgb += weights[ i ] * getSample( theta, axis ); } } `, blending: NoBlending, depthTest: false, depthWrite: false } ); return shaderMaterial; } function _getEquirectMaterial() { return new ShaderMaterial( { name: 'EquirectangularToCubeUV', uniforms: { 'envMap': { value: null } }, vertexShader: _getCommonVertexShader(), fragmentShader: /* glsl */` precision mediump float; precision mediump int; varying vec3 vOutputDirection; uniform sampler2D envMap; #include void main() { vec3 outputDirection = normalize( vOutputDirection ); vec2 uv = equirectUv( outputDirection ); gl_FragColor = vec4( texture2D ( envMap, uv ).rgb, 1.0 ); } `, blending: NoBlending, depthTest: false, depthWrite: false } ); } function _getCubemapMaterial() { return new ShaderMaterial( { name: 'CubemapToCubeUV', uniforms: { 'envMap': { value: null }, 'flipEnvMap': { value: -1 } }, vertexShader: _getCommonVertexShader(), fragmentShader: /* glsl */` precision mediump float; precision mediump int; uniform float flipEnvMap; varying vec3 vOutputDirection; uniform samplerCube envMap; void main() { gl_FragColor = textureCube( envMap, vec3( flipEnvMap * vOutputDirection.x, vOutputDirection.yz ) ); } `, blending: NoBlending, depthTest: false, depthWrite: false } ); } function _getCommonVertexShader() { return /* glsl */` precision mediump float; precision mediump int; attribute float faceIndex; varying vec3 vOutputDirection; // RH coordinate system; PMREM face-indexing convention vec3 getDirection( vec2 uv, float face ) { uv = 2.0 * uv - 1.0; vec3 direction = vec3( uv, 1.0 ); if ( face == 0.0 ) { direction = direction.zyx; // ( 1, v, u ) pos x } else if ( face == 1.0 ) { direction = direction.xzy; direction.xz *= -1.0; // ( -u, 1, -v ) pos y } else if ( face == 2.0 ) { direction.x *= -1.0; // ( -u, v, 1 ) pos z } else if ( face == 3.0 ) { direction = direction.zyx; direction.xz *= -1.0; // ( -1, v, -u ) neg x } else if ( face == 4.0 ) { direction = direction.xzy; direction.xy *= -1.0; // ( -u, -1, v ) neg y } else if ( face == 5.0 ) { direction.z *= -1.0; // ( u, v, -1 ) neg z } return direction; } void main() { vOutputDirection = getDirection( uv, faceIndex ); gl_Position = vec4( position, 1.0 ); } `; } function WebGLCubeUVMaps( renderer ) { let cubeUVmaps = new WeakMap(); let pmremGenerator = null; function get( texture ) { if ( texture && texture.isTexture ) { const mapping = texture.mapping; const isEquirectMap = ( mapping === EquirectangularReflectionMapping || mapping === EquirectangularRefractionMapping ); const isCubeMap = ( mapping === CubeReflectionMapping || mapping === CubeRefractionMapping ); // equirect/cube map to cubeUV conversion if ( isEquirectMap || isCubeMap ) { let renderTarget = cubeUVmaps.get( texture ); const currentPMREMVersion = renderTarget !== undefined ? renderTarget.texture.pmremVersion : 0; if ( texture.isRenderTargetTexture && texture.pmremVersion !== currentPMREMVersion ) { if ( pmremGenerator === null ) pmremGenerator = new PMREMGenerator( renderer ); renderTarget = isEquirectMap ? pmremGenerator.fromEquirectangular( texture, renderTarget ) : pmremGenerator.fromCubemap( texture, renderTarget ); renderTarget.texture.pmremVersion = texture.pmremVersion; cubeUVmaps.set( texture, renderTarget ); return renderTarget.texture; } else { if ( renderTarget !== undefined ) { return renderTarget.texture; } else { const image = texture.image; if ( ( isEquirectMap && image && image.height > 0 ) || ( isCubeMap && image && isCubeTextureComplete( image ) ) ) { if ( pmremGenerator === null ) pmremGenerator = new PMREMGenerator( renderer ); renderTarget = isEquirectMap ? pmremGenerator.fromEquirectangular( texture ) : pmremGenerator.fromCubemap( texture ); renderTarget.texture.pmremVersion = texture.pmremVersion; cubeUVmaps.set( texture, renderTarget ); texture.addEventListener( 'dispose', onTextureDispose ); return renderTarget.texture; } else { // image not yet ready. try the conversion next frame return null; } } } } } return texture; } function isCubeTextureComplete( image ) { let count = 0; const length = 6; for ( let i = 0; i < length; i ++ ) { if ( image[ i ] !== undefined ) count ++; } return count === length; } function onTextureDispose( event ) { const texture = event.target; texture.removeEventListener( 'dispose', onTextureDispose ); const cubemapUV = cubeUVmaps.get( texture ); if ( cubemapUV !== undefined ) { cubeUVmaps.delete( texture ); cubemapUV.dispose(); } } function dispose() { cubeUVmaps = new WeakMap(); if ( pmremGenerator !== null ) { pmremGenerator.dispose(); pmremGenerator = null; } } return { get: get, dispose: dispose }; } function WebGLExtensions( gl ) { const extensions = {}; function getExtension( name ) { if ( extensions[ name ] !== undefined ) { return extensions[ name ]; } let extension; switch ( name ) { case 'WEBGL_depth_texture': extension = gl.getExtension( 'WEBGL_depth_texture' ) || gl.getExtension( 'MOZ_WEBGL_depth_texture' ) || gl.getExtension( 'WEBKIT_WEBGL_depth_texture' ); break; case 'EXT_texture_filter_anisotropic': extension = gl.getExtension( 'EXT_texture_filter_anisotropic' ) || gl.getExtension( 'MOZ_EXT_texture_filter_anisotropic' ) || gl.getExtension( 'WEBKIT_EXT_texture_filter_anisotropic' ); break; case 'WEBGL_compressed_texture_s3tc': extension = gl.getExtension( 'WEBGL_compressed_texture_s3tc' ) || gl.getExtension( 'MOZ_WEBGL_compressed_texture_s3tc' ) || gl.getExtension( 'WEBKIT_WEBGL_compressed_texture_s3tc' ); break; case 'WEBGL_compressed_texture_pvrtc': extension = gl.getExtension( 'WEBGL_compressed_texture_pvrtc' ) || gl.getExtension( 'WEBKIT_WEBGL_compressed_texture_pvrtc' ); break; default: extension = gl.getExtension( name ); } extensions[ name ] = extension; return extension; } return { has: function ( name ) { return getExtension( name ) !== null; }, init: function () { getExtension( 'EXT_color_buffer_float' ); getExtension( 'WEBGL_clip_cull_distance' ); getExtension( 'OES_texture_float_linear' ); getExtension( 'EXT_color_buffer_half_float' ); getExtension( 'WEBGL_multisampled_render_to_texture' ); getExtension( 'WEBGL_render_shared_exponent' ); }, get: function ( name ) { const extension = getExtension( name ); if ( extension === null ) { warnOnce( 'THREE.WebGLRenderer: ' + name + ' extension not supported.' ); } return extension; } }; } function WebGLGeometries( gl, attributes, info, bindingStates ) { const geometries = {}; const wireframeAttributes = new WeakMap(); function onGeometryDispose( event ) { const geometry = event.target; if ( geometry.index !== null ) { attributes.remove( geometry.index ); } for ( const name in geometry.attributes ) { attributes.remove( geometry.attributes[ name ] ); } geometry.removeEventListener( 'dispose', onGeometryDispose ); delete geometries[ geometry.id ]; const attribute = wireframeAttributes.get( geometry ); if ( attribute ) { attributes.remove( attribute ); wireframeAttributes.delete( geometry ); } bindingStates.releaseStatesOfGeometry( geometry ); if ( geometry.isInstancedBufferGeometry === true ) { delete geometry._maxInstanceCount; } // info.memory.geometries --; } function get( object, geometry ) { if ( geometries[ geometry.id ] === true ) return geometry; geometry.addEventListener( 'dispose', onGeometryDispose ); geometries[ geometry.id ] = true; info.memory.geometries ++; return geometry; } function update( geometry ) { const geometryAttributes = geometry.attributes; // Updating index buffer in VAO now. See WebGLBindingStates. for ( const name in geometryAttributes ) { attributes.update( geometryAttributes[ name ], gl.ARRAY_BUFFER ); } } function updateWireframeAttribute( geometry ) { const indices = []; const geometryIndex = geometry.index; const geometryPosition = geometry.attributes.position; let version = 0; if ( geometryIndex !== null ) { const array = geometryIndex.array; version = geometryIndex.version; for ( let i = 0, l = array.length; i < l; i += 3 ) { const a = array[ i + 0 ]; const b = array[ i + 1 ]; const c = array[ i + 2 ]; indices.push( a, b, b, c, c, a ); } } else if ( geometryPosition !== undefined ) { const array = geometryPosition.array; version = geometryPosition.version; for ( let i = 0, l = ( array.length / 3 ) - 1; i < l; i += 3 ) { const a = i + 0; const b = i + 1; const c = i + 2; indices.push( a, b, b, c, c, a ); } } else { return; } const attribute = new ( arrayNeedsUint32( indices ) ? Uint32BufferAttribute : Uint16BufferAttribute )( indices, 1 ); attribute.version = version; // Updating index buffer in VAO now. See WebGLBindingStates // const previousAttribute = wireframeAttributes.get( geometry ); if ( previousAttribute ) attributes.remove( previousAttribute ); // wireframeAttributes.set( geometry, attribute ); } function getWireframeAttribute( geometry ) { const currentAttribute = wireframeAttributes.get( geometry ); if ( currentAttribute ) { const geometryIndex = geometry.index; if ( geometryIndex !== null ) { // if the attribute is obsolete, create a new one if ( currentAttribute.version < geometryIndex.version ) { updateWireframeAttribute( geometry ); } } } else { updateWireframeAttribute( geometry ); } return wireframeAttributes.get( geometry ); } return { get: get, update: update, getWireframeAttribute: getWireframeAttribute }; } function WebGLIndexedBufferRenderer( gl, extensions, info ) { let mode; function setMode( value ) { mode = value; } let type, bytesPerElement; function setIndex( value ) { type = value.type; bytesPerElement = value.bytesPerElement; } function render( start, count ) { gl.drawElements( mode, count, type, start * bytesPerElement ); info.update( count, mode, 1 ); } function renderInstances( start, count, primcount ) { if ( primcount === 0 ) return; gl.drawElementsInstanced( mode, count, type, start * bytesPerElement, primcount ); info.update( count, mode, primcount ); } function renderMultiDraw( starts, counts, drawCount ) { if ( drawCount === 0 ) return; const extension = extensions.get( 'WEBGL_multi_draw' ); extension.multiDrawElementsWEBGL( mode, counts, 0, type, starts, 0, drawCount ); let elementCount = 0; for ( let i = 0; i < drawCount; i ++ ) { elementCount += counts[ i ]; } info.update( elementCount, mode, 1 ); } function renderMultiDrawInstances( starts, counts, drawCount, primcount ) { if ( drawCount === 0 ) return; const extension = extensions.get( 'WEBGL_multi_draw' ); if ( extension === null ) { for ( let i = 0; i < starts.length; i ++ ) { renderInstances( starts[ i ] / bytesPerElement, counts[ i ], primcount[ i ] ); } } else { extension.multiDrawElementsInstancedWEBGL( mode, counts, 0, type, starts, 0, primcount, 0, drawCount ); let elementCount = 0; for ( let i = 0; i < drawCount; i ++ ) { elementCount += counts[ i ] * primcount[ i ]; } info.update( elementCount, mode, 1 ); } } // this.setMode = setMode; this.setIndex = setIndex; this.render = render; this.renderInstances = renderInstances; this.renderMultiDraw = renderMultiDraw; this.renderMultiDrawInstances = renderMultiDrawInstances; } function WebGLInfo( gl ) { const memory = { geometries: 0, textures: 0 }; const render = { frame: 0, calls: 0, triangles: 0, points: 0, lines: 0 }; function update( count, mode, instanceCount ) { render.calls ++; switch ( mode ) { case gl.TRIANGLES: render.triangles += instanceCount * ( count / 3 ); break; case gl.LINES: render.lines += instanceCount * ( count / 2 ); break; case gl.LINE_STRIP: render.lines += instanceCount * ( count - 1 ); break; case gl.LINE_LOOP: render.lines += instanceCount * count; break; case gl.POINTS: render.points += instanceCount * count; break; default: console.error( 'THREE.WebGLInfo: Unknown draw mode:', mode ); break; } } function reset() { render.calls = 0; render.triangles = 0; render.points = 0; render.lines = 0; } return { memory: memory, render: render, programs: null, autoReset: true, reset: reset, update: update }; } function WebGLMorphtargets( gl, capabilities, textures ) { const morphTextures = new WeakMap(); const morph = new Vector4(); function update( object, geometry, program ) { const objectInfluences = object.morphTargetInfluences; // the following encodes morph targets into an array of data textures. Each layer represents a single morph target. const morphAttribute = geometry.morphAttributes.position || geometry.morphAttributes.normal || geometry.morphAttributes.color; const morphTargetsCount = ( morphAttribute !== undefined ) ? morphAttribute.length : 0; let entry = morphTextures.get( geometry ); if ( entry === undefined || entry.count !== morphTargetsCount ) { if ( entry !== undefined ) entry.texture.dispose(); const hasMorphPosition = geometry.morphAttributes.position !== undefined; const hasMorphNormals = geometry.morphAttributes.normal !== undefined; const hasMorphColors = geometry.morphAttributes.color !== undefined; const morphTargets = geometry.morphAttributes.position || []; const morphNormals = geometry.morphAttributes.normal || []; const morphColors = geometry.morphAttributes.color || []; let vertexDataCount = 0; if ( hasMorphPosition === true ) vertexDataCount = 1; if ( hasMorphNormals === true ) vertexDataCount = 2; if ( hasMorphColors === true ) vertexDataCount = 3; let width = geometry.attributes.position.count * vertexDataCount; let height = 1; if ( width > capabilities.maxTextureSize ) { height = Math.ceil( width / capabilities.maxTextureSize ); width = capabilities.maxTextureSize; } const buffer = new Float32Array( width * height * 4 * morphTargetsCount ); const texture = new DataArrayTexture( buffer, width, height, morphTargetsCount ); texture.type = FloatType; texture.needsUpdate = true; // fill buffer const vertexDataStride = vertexDataCount * 4; for ( let i = 0; i < morphTargetsCount; i ++ ) { const morphTarget = morphTargets[ i ]; const morphNormal = morphNormals[ i ]; const morphColor = morphColors[ i ]; const offset = width * height * 4 * i; for ( let j = 0; j < morphTarget.count; j ++ ) { const stride = j * vertexDataStride; if ( hasMorphPosition === true ) { morph.fromBufferAttribute( morphTarget, j ); buffer[ offset + stride + 0 ] = morph.x; buffer[ offset + stride + 1 ] = morph.y; buffer[ offset + stride + 2 ] = morph.z; buffer[ offset + stride + 3 ] = 0; } if ( hasMorphNormals === true ) { morph.fromBufferAttribute( morphNormal, j ); buffer[ offset + stride + 4 ] = morph.x; buffer[ offset + stride + 5 ] = morph.y; buffer[ offset + stride + 6 ] = morph.z; buffer[ offset + stride + 7 ] = 0; } if ( hasMorphColors === true ) { morph.fromBufferAttribute( morphColor, j ); buffer[ offset + stride + 8 ] = morph.x; buffer[ offset + stride + 9 ] = morph.y; buffer[ offset + stride + 10 ] = morph.z; buffer[ offset + stride + 11 ] = ( morphColor.itemSize === 4 ) ? morph.w : 1; } } } entry = { count: morphTargetsCount, texture: texture, size: new Vector2( width, height ) }; morphTextures.set( geometry, entry ); function disposeTexture() { texture.dispose(); morphTextures.delete( geometry ); geometry.removeEventListener( 'dispose', disposeTexture ); } geometry.addEventListener( 'dispose', disposeTexture ); } // if ( object.isInstancedMesh === true && object.morphTexture !== null ) { program.getUniforms().setValue( gl, 'morphTexture', object.morphTexture, textures ); } else { let morphInfluencesSum = 0; for ( let i = 0; i < objectInfluences.length; i ++ ) { morphInfluencesSum += objectInfluences[ i ]; } const morphBaseInfluence = geometry.morphTargetsRelative ? 1 : 1 - morphInfluencesSum; program.getUniforms().setValue( gl, 'morphTargetBaseInfluence', morphBaseInfluence ); program.getUniforms().setValue( gl, 'morphTargetInfluences', objectInfluences ); } program.getUniforms().setValue( gl, 'morphTargetsTexture', entry.texture, textures ); program.getUniforms().setValue( gl, 'morphTargetsTextureSize', entry.size ); } return { update: update }; } function WebGLObjects( gl, geometries, attributes, info ) { let updateMap = new WeakMap(); function update( object ) { const frame = info.render.frame; const geometry = object.geometry; const buffergeometry = geometries.get( object, geometry ); // Update once per frame if ( updateMap.get( buffergeometry ) !== frame ) { geometries.update( buffergeometry ); updateMap.set( buffergeometry, frame ); } if ( object.isInstancedMesh ) { if ( object.hasEventListener( 'dispose', onInstancedMeshDispose ) === false ) { object.addEventListener( 'dispose', onInstancedMeshDispose ); } if ( updateMap.get( object ) !== frame ) { attributes.update( object.instanceMatrix, gl.ARRAY_BUFFER ); if ( object.instanceColor !== null ) { attributes.update( object.instanceColor, gl.ARRAY_BUFFER ); } updateMap.set( object, frame ); } } if ( object.isSkinnedMesh ) { const skeleton = object.skeleton; if ( updateMap.get( skeleton ) !== frame ) { skeleton.update(); updateMap.set( skeleton, frame ); } } return buffergeometry; } function dispose() { updateMap = new WeakMap(); } function onInstancedMeshDispose( event ) { const instancedMesh = event.target; instancedMesh.removeEventListener( 'dispose', onInstancedMeshDispose ); attributes.remove( instancedMesh.instanceMatrix ); if ( instancedMesh.instanceColor !== null ) attributes.remove( instancedMesh.instanceColor ); } return { update: update, dispose: dispose }; } /** * Uniforms of a program. * Those form a tree structure with a special top-level container for the root, * which you get by calling 'new WebGLUniforms( gl, program )'. * * * Properties of inner nodes including the top-level container: * * .seq - array of nested uniforms * .map - nested uniforms by name * * * Methods of all nodes except the top-level container: * * .setValue( gl, value, [textures] ) * * uploads a uniform value(s) * the 'textures' parameter is needed for sampler uniforms * * * Static methods of the top-level container (textures factorizations): * * .upload( gl, seq, values, textures ) * * sets uniforms in 'seq' to 'values[id].value' * * .seqWithValue( seq, values ) : filteredSeq * * filters 'seq' entries with corresponding entry in values * * * Methods of the top-level container (textures factorizations): * * .setValue( gl, name, value, textures ) * * sets uniform with name 'name' to 'value' * * .setOptional( gl, obj, prop ) * * like .set for an optional property of the object * */ const emptyTexture = /*@__PURE__*/ new Texture(); const emptyShadowTexture = /*@__PURE__*/ new DepthTexture( 1, 1 ); const emptyArrayTexture = /*@__PURE__*/ new DataArrayTexture(); const empty3dTexture = /*@__PURE__*/ new Data3DTexture(); const emptyCubeTexture = /*@__PURE__*/ new CubeTexture(); // --- Utilities --- // Array Caches (provide typed arrays for temporary by size) const arrayCacheF32 = []; const arrayCacheI32 = []; // Float32Array caches used for uploading Matrix uniforms const mat4array = new Float32Array( 16 ); const mat3array = new Float32Array( 9 ); const mat2array = new Float32Array( 4 ); // Flattening for arrays of vectors and matrices function flatten( array, nBlocks, blockSize ) { const firstElem = array[ 0 ]; if ( firstElem <= 0 || firstElem > 0 ) return array; // unoptimized: ! isNaN( firstElem ) // see http://jacksondunstan.com/articles/983 const n = nBlocks * blockSize; let r = arrayCacheF32[ n ]; if ( r === undefined ) { r = new Float32Array( n ); arrayCacheF32[ n ] = r; } if ( nBlocks !== 0 ) { firstElem.toArray( r, 0 ); for ( let i = 1, offset = 0; i !== nBlocks; ++ i ) { offset += blockSize; array[ i ].toArray( r, offset ); } } return r; } function arraysEqual( a, b ) { if ( a.length !== b.length ) return false; for ( let i = 0, l = a.length; i < l; i ++ ) { if ( a[ i ] !== b[ i ] ) return false; } return true; } function copyArray( a, b ) { for ( let i = 0, l = b.length; i < l; i ++ ) { a[ i ] = b[ i ]; } } // Texture unit allocation function allocTexUnits( textures, n ) { let r = arrayCacheI32[ n ]; if ( r === undefined ) { r = new Int32Array( n ); arrayCacheI32[ n ] = r; } for ( let i = 0; i !== n; ++ i ) { r[ i ] = textures.allocateTextureUnit(); } return r; } // --- Setters --- // Note: Defining these methods externally, because they come in a bunch // and this way their names minify. // Single scalar function setValueV1f( gl, v ) { const cache = this.cache; if ( cache[ 0 ] === v ) return; gl.uniform1f( this.addr, v ); cache[ 0 ] = v; } // Single float vector (from flat array or THREE.VectorN) function setValueV2f( gl, v ) { const cache = this.cache; if ( v.x !== undefined ) { if ( cache[ 0 ] !== v.x || cache[ 1 ] !== v.y ) { gl.uniform2f( this.addr, v.x, v.y ); cache[ 0 ] = v.x; cache[ 1 ] = v.y; } } else { if ( arraysEqual( cache, v ) ) return; gl.uniform2fv( this.addr, v ); copyArray( cache, v ); } } function setValueV3f( gl, v ) { const cache = this.cache; if ( v.x !== undefined ) { if ( cache[ 0 ] !== v.x || cache[ 1 ] !== v.y || cache[ 2 ] !== v.z ) { gl.uniform3f( this.addr, v.x, v.y, v.z ); cache[ 0 ] = v.x; cache[ 1 ] = v.y; cache[ 2 ] = v.z; } } else if ( v.r !== undefined ) { if ( cache[ 0 ] !== v.r || cache[ 1 ] !== v.g || cache[ 2 ] !== v.b ) { gl.uniform3f( this.addr, v.r, v.g, v.b ); cache[ 0 ] = v.r; cache[ 1 ] = v.g; cache[ 2 ] = v.b; } } else { if ( arraysEqual( cache, v ) ) return; gl.uniform3fv( this.addr, v ); copyArray( cache, v ); } } function setValueV4f( gl, v ) { const cache = this.cache; if ( v.x !== undefined ) { if ( cache[ 0 ] !== v.x || cache[ 1 ] !== v.y || cache[ 2 ] !== v.z || cache[ 3 ] !== v.w ) { gl.uniform4f( this.addr, v.x, v.y, v.z, v.w ); cache[ 0 ] = v.x; cache[ 1 ] = v.y; cache[ 2 ] = v.z; cache[ 3 ] = v.w; } } else { if ( arraysEqual( cache, v ) ) return; gl.uniform4fv( this.addr, v ); copyArray( cache, v ); } } // Single matrix (from flat array or THREE.MatrixN) function setValueM2( gl, v ) { const cache = this.cache; const elements = v.elements; if ( elements === undefined ) { if ( arraysEqual( cache, v ) ) return; gl.uniformMatrix2fv( this.addr, false, v ); copyArray( cache, v ); } else { if ( arraysEqual( cache, elements ) ) return; mat2array.set( elements ); gl.uniformMatrix2fv( this.addr, false, mat2array ); copyArray( cache, elements ); } } function setValueM3( gl, v ) { const cache = this.cache; const elements = v.elements; if ( elements === undefined ) { if ( arraysEqual( cache, v ) ) return; gl.uniformMatrix3fv( this.addr, false, v ); copyArray( cache, v ); } else { if ( arraysEqual( cache, elements ) ) return; mat3array.set( elements ); gl.uniformMatrix3fv( this.addr, false, mat3array ); copyArray( cache, elements ); } } function setValueM4( gl, v ) { const cache = this.cache; const elements = v.elements; if ( elements === undefined ) { if ( arraysEqual( cache, v ) ) return; gl.uniformMatrix4fv( this.addr, false, v ); copyArray( cache, v ); } else { if ( arraysEqual( cache, elements ) ) return; mat4array.set( elements ); gl.uniformMatrix4fv( this.addr, false, mat4array ); copyArray( cache, elements ); } } // Single integer / boolean function setValueV1i( gl, v ) { const cache = this.cache; if ( cache[ 0 ] === v ) return; gl.uniform1i( this.addr, v ); cache[ 0 ] = v; } // Single integer / boolean vector (from flat array or THREE.VectorN) function setValueV2i( gl, v ) { const cache = this.cache; if ( v.x !== undefined ) { if ( cache[ 0 ] !== v.x || cache[ 1 ] !== v.y ) { gl.uniform2i( this.addr, v.x, v.y ); cache[ 0 ] = v.x; cache[ 1 ] = v.y; } } else { if ( arraysEqual( cache, v ) ) return; gl.uniform2iv( this.addr, v ); copyArray( cache, v ); } } function setValueV3i( gl, v ) { const cache = this.cache; if ( v.x !== undefined ) { if ( cache[ 0 ] !== v.x || cache[ 1 ] !== v.y || cache[ 2 ] !== v.z ) { gl.uniform3i( this.addr, v.x, v.y, v.z ); cache[ 0 ] = v.x; cache[ 1 ] = v.y; cache[ 2 ] = v.z; } } else { if ( arraysEqual( cache, v ) ) return; gl.uniform3iv( this.addr, v ); copyArray( cache, v ); } } function setValueV4i( gl, v ) { const cache = this.cache; if ( v.x !== undefined ) { if ( cache[ 0 ] !== v.x || cache[ 1 ] !== v.y || cache[ 2 ] !== v.z || cache[ 3 ] !== v.w ) { gl.uniform4i( this.addr, v.x, v.y, v.z, v.w ); cache[ 0 ] = v.x; cache[ 1 ] = v.y; cache[ 2 ] = v.z; cache[ 3 ] = v.w; } } else { if ( arraysEqual( cache, v ) ) return; gl.uniform4iv( this.addr, v ); copyArray( cache, v ); } } // Single unsigned integer function setValueV1ui( gl, v ) { const cache = this.cache; if ( cache[ 0 ] === v ) return; gl.uniform1ui( this.addr, v ); cache[ 0 ] = v; } // Single unsigned integer vector (from flat array or THREE.VectorN) function setValueV2ui( gl, v ) { const cache = this.cache; if ( v.x !== undefined ) { if ( cache[ 0 ] !== v.x || cache[ 1 ] !== v.y ) { gl.uniform2ui( this.addr, v.x, v.y ); cache[ 0 ] = v.x; cache[ 1 ] = v.y; } } else { if ( arraysEqual( cache, v ) ) return; gl.uniform2uiv( this.addr, v ); copyArray( cache, v ); } } function setValueV3ui( gl, v ) { const cache = this.cache; if ( v.x !== undefined ) { if ( cache[ 0 ] !== v.x || cache[ 1 ] !== v.y || cache[ 2 ] !== v.z ) { gl.uniform3ui( this.addr, v.x, v.y, v.z ); cache[ 0 ] = v.x; cache[ 1 ] = v.y; cache[ 2 ] = v.z; } } else { if ( arraysEqual( cache, v ) ) return; gl.uniform3uiv( this.addr, v ); copyArray( cache, v ); } } function setValueV4ui( gl, v ) { const cache = this.cache; if ( v.x !== undefined ) { if ( cache[ 0 ] !== v.x || cache[ 1 ] !== v.y || cache[ 2 ] !== v.z || cache[ 3 ] !== v.w ) { gl.uniform4ui( this.addr, v.x, v.y, v.z, v.w ); cache[ 0 ] = v.x; cache[ 1 ] = v.y; cache[ 2 ] = v.z; cache[ 3 ] = v.w; } } else { if ( arraysEqual( cache, v ) ) return; gl.uniform4uiv( this.addr, v ); copyArray( cache, v ); } } // Single texture (2D / Cube) function setValueT1( gl, v, textures ) { const cache = this.cache; const unit = textures.allocateTextureUnit(); if ( cache[ 0 ] !== unit ) { gl.uniform1i( this.addr, unit ); cache[ 0 ] = unit; } let emptyTexture2D; if ( this.type === gl.SAMPLER_2D_SHADOW ) { emptyShadowTexture.compareFunction = LessEqualCompare; // #28670 emptyTexture2D = emptyShadowTexture; } else { emptyTexture2D = emptyTexture; } textures.setTexture2D( v || emptyTexture2D, unit ); } function setValueT3D1( gl, v, textures ) { const cache = this.cache; const unit = textures.allocateTextureUnit(); if ( cache[ 0 ] !== unit ) { gl.uniform1i( this.addr, unit ); cache[ 0 ] = unit; } textures.setTexture3D( v || empty3dTexture, unit ); } function setValueT6( gl, v, textures ) { const cache = this.cache; const unit = textures.allocateTextureUnit(); if ( cache[ 0 ] !== unit ) { gl.uniform1i( this.addr, unit ); cache[ 0 ] = unit; } textures.setTextureCube( v || emptyCubeTexture, unit ); } function setValueT2DArray1( gl, v, textures ) { const cache = this.cache; const unit = textures.allocateTextureUnit(); if ( cache[ 0 ] !== unit ) { gl.uniform1i( this.addr, unit ); cache[ 0 ] = unit; } textures.setTexture2DArray( v || emptyArrayTexture, unit ); } // Helper to pick the right setter for the singular case function getSingularSetter( type ) { switch ( type ) { case 0x1406: return setValueV1f; // FLOAT case 0x8b50: return setValueV2f; // _VEC2 case 0x8b51: return setValueV3f; // _VEC3 case 0x8b52: return setValueV4f; // _VEC4 case 0x8b5a: return setValueM2; // _MAT2 case 0x8b5b: return setValueM3; // _MAT3 case 0x8b5c: return setValueM4; // _MAT4 case 0x1404: case 0x8b56: return setValueV1i; // INT, BOOL case 0x8b53: case 0x8b57: return setValueV2i; // _VEC2 case 0x8b54: case 0x8b58: return setValueV3i; // _VEC3 case 0x8b55: case 0x8b59: return setValueV4i; // _VEC4 case 0x1405: return setValueV1ui; // UINT case 0x8dc6: return setValueV2ui; // _VEC2 case 0x8dc7: return setValueV3ui; // _VEC3 case 0x8dc8: return setValueV4ui; // _VEC4 case 0x8b5e: // SAMPLER_2D case 0x8d66: // SAMPLER_EXTERNAL_OES case 0x8dca: // INT_SAMPLER_2D case 0x8dd2: // UNSIGNED_INT_SAMPLER_2D case 0x8b62: // SAMPLER_2D_SHADOW return setValueT1; case 0x8b5f: // SAMPLER_3D case 0x8dcb: // INT_SAMPLER_3D case 0x8dd3: // UNSIGNED_INT_SAMPLER_3D return setValueT3D1; case 0x8b60: // SAMPLER_CUBE case 0x8dcc: // INT_SAMPLER_CUBE case 0x8dd4: // UNSIGNED_INT_SAMPLER_CUBE case 0x8dc5: // SAMPLER_CUBE_SHADOW return setValueT6; case 0x8dc1: // SAMPLER_2D_ARRAY case 0x8dcf: // INT_SAMPLER_2D_ARRAY case 0x8dd7: // UNSIGNED_INT_SAMPLER_2D_ARRAY case 0x8dc4: // SAMPLER_2D_ARRAY_SHADOW return setValueT2DArray1; } } // Array of scalars function setValueV1fArray( gl, v ) { gl.uniform1fv( this.addr, v ); } // Array of vectors (from flat array or array of THREE.VectorN) function setValueV2fArray( gl, v ) { const data = flatten( v, this.size, 2 ); gl.uniform2fv( this.addr, data ); } function setValueV3fArray( gl, v ) { const data = flatten( v, this.size, 3 ); gl.uniform3fv( this.addr, data ); } function setValueV4fArray( gl, v ) { const data = flatten( v, this.size, 4 ); gl.uniform4fv( this.addr, data ); } // Array of matrices (from flat array or array of THREE.MatrixN) function setValueM2Array( gl, v ) { const data = flatten( v, this.size, 4 ); gl.uniformMatrix2fv( this.addr, false, data ); } function setValueM3Array( gl, v ) { const data = flatten( v, this.size, 9 ); gl.uniformMatrix3fv( this.addr, false, data ); } function setValueM4Array( gl, v ) { const data = flatten( v, this.size, 16 ); gl.uniformMatrix4fv( this.addr, false, data ); } // Array of integer / boolean function setValueV1iArray( gl, v ) { gl.uniform1iv( this.addr, v ); } // Array of integer / boolean vectors (from flat array) function setValueV2iArray( gl, v ) { gl.uniform2iv( this.addr, v ); } function setValueV3iArray( gl, v ) { gl.uniform3iv( this.addr, v ); } function setValueV4iArray( gl, v ) { gl.uniform4iv( this.addr, v ); } // Array of unsigned integer function setValueV1uiArray( gl, v ) { gl.uniform1uiv( this.addr, v ); } // Array of unsigned integer vectors (from flat array) function setValueV2uiArray( gl, v ) { gl.uniform2uiv( this.addr, v ); } function setValueV3uiArray( gl, v ) { gl.uniform3uiv( this.addr, v ); } function setValueV4uiArray( gl, v ) { gl.uniform4uiv( this.addr, v ); } // Array of textures (2D / 3D / Cube / 2DArray) function setValueT1Array( gl, v, textures ) { const cache = this.cache; const n = v.length; const units = allocTexUnits( textures, n ); if ( ! arraysEqual( cache, units ) ) { gl.uniform1iv( this.addr, units ); copyArray( cache, units ); } for ( let i = 0; i !== n; ++ i ) { textures.setTexture2D( v[ i ] || emptyTexture, units[ i ] ); } } function setValueT3DArray( gl, v, textures ) { const cache = this.cache; const n = v.length; const units = allocTexUnits( textures, n ); if ( ! arraysEqual( cache, units ) ) { gl.uniform1iv( this.addr, units ); copyArray( cache, units ); } for ( let i = 0; i !== n; ++ i ) { textures.setTexture3D( v[ i ] || empty3dTexture, units[ i ] ); } } function setValueT6Array( gl, v, textures ) { const cache = this.cache; const n = v.length; const units = allocTexUnits( textures, n ); if ( ! arraysEqual( cache, units ) ) { gl.uniform1iv( this.addr, units ); copyArray( cache, units ); } for ( let i = 0; i !== n; ++ i ) { textures.setTextureCube( v[ i ] || emptyCubeTexture, units[ i ] ); } } function setValueT2DArrayArray( gl, v, textures ) { const cache = this.cache; const n = v.length; const units = allocTexUnits( textures, n ); if ( ! arraysEqual( cache, units ) ) { gl.uniform1iv( this.addr, units ); copyArray( cache, units ); } for ( let i = 0; i !== n; ++ i ) { textures.setTexture2DArray( v[ i ] || emptyArrayTexture, units[ i ] ); } } // Helper to pick the right setter for a pure (bottom-level) array function getPureArraySetter( type ) { switch ( type ) { case 0x1406: return setValueV1fArray; // FLOAT case 0x8b50: return setValueV2fArray; // _VEC2 case 0x8b51: return setValueV3fArray; // _VEC3 case 0x8b52: return setValueV4fArray; // _VEC4 case 0x8b5a: return setValueM2Array; // _MAT2 case 0x8b5b: return setValueM3Array; // _MAT3 case 0x8b5c: return setValueM4Array; // _MAT4 case 0x1404: case 0x8b56: return setValueV1iArray; // INT, BOOL case 0x8b53: case 0x8b57: return setValueV2iArray; // _VEC2 case 0x8b54: case 0x8b58: return setValueV3iArray; // _VEC3 case 0x8b55: case 0x8b59: return setValueV4iArray; // _VEC4 case 0x1405: return setValueV1uiArray; // UINT case 0x8dc6: return setValueV2uiArray; // _VEC2 case 0x8dc7: return setValueV3uiArray; // _VEC3 case 0x8dc8: return setValueV4uiArray; // _VEC4 case 0x8b5e: // SAMPLER_2D case 0x8d66: // SAMPLER_EXTERNAL_OES case 0x8dca: // INT_SAMPLER_2D case 0x8dd2: // UNSIGNED_INT_SAMPLER_2D case 0x8b62: // SAMPLER_2D_SHADOW return setValueT1Array; case 0x8b5f: // SAMPLER_3D case 0x8dcb: // INT_SAMPLER_3D case 0x8dd3: // UNSIGNED_INT_SAMPLER_3D return setValueT3DArray; case 0x8b60: // SAMPLER_CUBE case 0x8dcc: // INT_SAMPLER_CUBE case 0x8dd4: // UNSIGNED_INT_SAMPLER_CUBE case 0x8dc5: // SAMPLER_CUBE_SHADOW return setValueT6Array; case 0x8dc1: // SAMPLER_2D_ARRAY case 0x8dcf: // INT_SAMPLER_2D_ARRAY case 0x8dd7: // UNSIGNED_INT_SAMPLER_2D_ARRAY case 0x8dc4: // SAMPLER_2D_ARRAY_SHADOW return setValueT2DArrayArray; } } // --- Uniform Classes --- class SingleUniform { constructor( id, activeInfo, addr ) { this.id = id; this.addr = addr; this.cache = []; this.type = activeInfo.type; this.setValue = getSingularSetter( activeInfo.type ); // this.path = activeInfo.name; // DEBUG } } class PureArrayUniform { constructor( id, activeInfo, addr ) { this.id = id; this.addr = addr; this.cache = []; this.type = activeInfo.type; this.size = activeInfo.size; this.setValue = getPureArraySetter( activeInfo.type ); // this.path = activeInfo.name; // DEBUG } } class StructuredUniform { constructor( id ) { this.id = id; this.seq = []; this.map = {}; } setValue( gl, value, textures ) { const seq = this.seq; for ( let i = 0, n = seq.length; i !== n; ++ i ) { const u = seq[ i ]; u.setValue( gl, value[ u.id ], textures ); } } } // --- Top-level --- // Parser - builds up the property tree from the path strings const RePathPart = /(\w+)(\])?(\[|\.)?/g; // extracts // - the identifier (member name or array index) // - followed by an optional right bracket (found when array index) // - followed by an optional left bracket or dot (type of subscript) // // Note: These portions can be read in a non-overlapping fashion and // allow straightforward parsing of the hierarchy that WebGL encodes // in the uniform names. function addUniform( container, uniformObject ) { container.seq.push( uniformObject ); container.map[ uniformObject.id ] = uniformObject; } function parseUniform( activeInfo, addr, container ) { const path = activeInfo.name, pathLength = path.length; // reset RegExp object, because of the early exit of a previous run RePathPart.lastIndex = 0; while ( true ) { const match = RePathPart.exec( path ), matchEnd = RePathPart.lastIndex; let id = match[ 1 ]; const idIsIndex = match[ 2 ] === ']', subscript = match[ 3 ]; if ( idIsIndex ) id = id | 0; // convert to integer if ( subscript === undefined || subscript === '[' && matchEnd + 2 === pathLength ) { // bare name or "pure" bottom-level array "[0]" suffix addUniform( container, subscript === undefined ? new SingleUniform( id, activeInfo, addr ) : new PureArrayUniform( id, activeInfo, addr ) ); break; } else { // step into inner node / create it in case it doesn't exist const map = container.map; let next = map[ id ]; if ( next === undefined ) { next = new StructuredUniform( id ); addUniform( container, next ); } container = next; } } } // Root Container class WebGLUniforms { constructor( gl, program ) { this.seq = []; this.map = {}; const n = gl.getProgramParameter( program, gl.ACTIVE_UNIFORMS ); for ( let i = 0; i < n; ++ i ) { const info = gl.getActiveUniform( program, i ), addr = gl.getUniformLocation( program, info.name ); parseUniform( info, addr, this ); } } setValue( gl, name, value, textures ) { const u = this.map[ name ]; if ( u !== undefined ) u.setValue( gl, value, textures ); } setOptional( gl, object, name ) { const v = object[ name ]; if ( v !== undefined ) this.setValue( gl, name, v ); } static upload( gl, seq, values, textures ) { for ( let i = 0, n = seq.length; i !== n; ++ i ) { const u = seq[ i ], v = values[ u.id ]; if ( v.needsUpdate !== false ) { // note: always updating when .needsUpdate is undefined u.setValue( gl, v.value, textures ); } } } static seqWithValue( seq, values ) { const r = []; for ( let i = 0, n = seq.length; i !== n; ++ i ) { const u = seq[ i ]; if ( u.id in values ) r.push( u ); } return r; } } function WebGLShader( gl, type, string ) { const shader = gl.createShader( type ); gl.shaderSource( shader, string ); gl.compileShader( shader ); return shader; } // From https://www.khronos.org/registry/webgl/extensions/KHR_parallel_shader_compile/ const COMPLETION_STATUS_KHR = 0x91B1; let programIdCount = 0; function handleSource( string, errorLine ) { const lines = string.split( '\n' ); const lines2 = []; const from = Math.max( errorLine - 6, 0 ); const to = Math.min( errorLine + 6, lines.length ); for ( let i = from; i < to; i ++ ) { const line = i + 1; lines2.push( `${line === errorLine ? '>' : ' '} ${line}: ${lines[ i ]}` ); } return lines2.join( '\n' ); } const _m0 = /*@__PURE__*/ new Matrix3(); function getEncodingComponents( colorSpace ) { ColorManagement._getMatrix( _m0, ColorManagement.workingColorSpace, colorSpace ); const encodingMatrix = `mat3( ${ _m0.elements.map( ( v ) => v.toFixed( 4 ) ) } )`; switch ( ColorManagement.getTransfer( colorSpace ) ) { case LinearTransfer: return [ encodingMatrix, 'LinearTransferOETF' ]; case SRGBTransfer: return [ encodingMatrix, 'sRGBTransferOETF' ]; default: console.warn( 'THREE.WebGLProgram: Unsupported color space: ', colorSpace ); return [ encodingMatrix, 'LinearTransferOETF' ]; } } function getShaderErrors( gl, shader, type ) { const status = gl.getShaderParameter( shader, gl.COMPILE_STATUS ); const errors = gl.getShaderInfoLog( shader ).trim(); if ( status && errors === '' ) return ''; const errorMatches = /ERROR: 0:(\d+)/.exec( errors ); if ( errorMatches ) { // --enable-privileged-webgl-extension // console.log( '**' + type + '**', gl.getExtension( 'WEBGL_debug_shaders' ).getTranslatedShaderSource( shader ) ); const errorLine = parseInt( errorMatches[ 1 ] ); return type.toUpperCase() + '\n\n' + errors + '\n\n' + handleSource( gl.getShaderSource( shader ), errorLine ); } else { return errors; } } function getTexelEncodingFunction( functionName, colorSpace ) { const components = getEncodingComponents( colorSpace ); return [ `vec4 ${functionName}( vec4 value ) {`, ` return ${components[ 1 ]}( vec4( value.rgb * ${components[ 0 ]}, value.a ) );`, '}', ].join( '\n' ); } function getToneMappingFunction( functionName, toneMapping ) { let toneMappingName; switch ( toneMapping ) { case LinearToneMapping: toneMappingName = 'Linear'; break; case ReinhardToneMapping: toneMappingName = 'Reinhard'; break; case CineonToneMapping: toneMappingName = 'Cineon'; break; case ACESFilmicToneMapping: toneMappingName = 'ACESFilmic'; break; case AgXToneMapping: toneMappingName = 'AgX'; break; case NeutralToneMapping: toneMappingName = 'Neutral'; break; case CustomToneMapping: toneMappingName = 'Custom'; break; default: console.warn( 'THREE.WebGLProgram: Unsupported toneMapping:', toneMapping ); toneMappingName = 'Linear'; } return 'vec3 ' + functionName + '( vec3 color ) { return ' + toneMappingName + 'ToneMapping( color ); }'; } const _v0 = /*@__PURE__*/ new Vector3(); function getLuminanceFunction() { ColorManagement.getLuminanceCoefficients( _v0 ); const r = _v0.x.toFixed( 4 ); const g = _v0.y.toFixed( 4 ); const b = _v0.z.toFixed( 4 ); return [ 'float luminance( const in vec3 rgb ) {', ` const vec3 weights = vec3( ${ r }, ${ g }, ${ b } );`, ' return dot( weights, rgb );', '}' ].join( '\n' ); } function generateVertexExtensions( parameters ) { const chunks = [ parameters.extensionClipCullDistance ? '#extension GL_ANGLE_clip_cull_distance : require' : '', parameters.extensionMultiDraw ? '#extension GL_ANGLE_multi_draw : require' : '', ]; return chunks.filter( filterEmptyLine ).join( '\n' ); } function generateDefines( defines ) { const chunks = []; for ( const name in defines ) { const value = defines[ name ]; if ( value === false ) continue; chunks.push( '#define ' + name + ' ' + value ); } return chunks.join( '\n' ); } function fetchAttributeLocations( gl, program ) { const attributes = {}; const n = gl.getProgramParameter( program, gl.ACTIVE_ATTRIBUTES ); for ( let i = 0; i < n; i ++ ) { const info = gl.getActiveAttrib( program, i ); const name = info.name; let locationSize = 1; if ( info.type === gl.FLOAT_MAT2 ) locationSize = 2; if ( info.type === gl.FLOAT_MAT3 ) locationSize = 3; if ( info.type === gl.FLOAT_MAT4 ) locationSize = 4; // console.log( 'THREE.WebGLProgram: ACTIVE VERTEX ATTRIBUTE:', name, i ); attributes[ name ] = { type: info.type, location: gl.getAttribLocation( program, name ), locationSize: locationSize }; } return attributes; } function filterEmptyLine( string ) { return string !== ''; } function replaceLightNums( string, parameters ) { const numSpotLightCoords = parameters.numSpotLightShadows + parameters.numSpotLightMaps - parameters.numSpotLightShadowsWithMaps; return string .replace( /NUM_DIR_LIGHTS/g, parameters.numDirLights ) .replace( /NUM_SPOT_LIGHTS/g, parameters.numSpotLights ) .replace( /NUM_SPOT_LIGHT_MAPS/g, parameters.numSpotLightMaps ) .replace( /NUM_SPOT_LIGHT_COORDS/g, numSpotLightCoords ) .replace( /NUM_RECT_AREA_LIGHTS/g, parameters.numRectAreaLights ) .replace( /NUM_POINT_LIGHTS/g, parameters.numPointLights ) .replace( /NUM_HEMI_LIGHTS/g, parameters.numHemiLights ) .replace( /NUM_DIR_LIGHT_SHADOWS/g, parameters.numDirLightShadows ) .replace( /NUM_SPOT_LIGHT_SHADOWS_WITH_MAPS/g, parameters.numSpotLightShadowsWithMaps ) .replace( /NUM_SPOT_LIGHT_SHADOWS/g, parameters.numSpotLightShadows ) .replace( /NUM_POINT_LIGHT_SHADOWS/g, parameters.numPointLightShadows ); } function replaceClippingPlaneNums( string, parameters ) { return string .replace( /NUM_CLIPPING_PLANES/g, parameters.numClippingPlanes ) .replace( /UNION_CLIPPING_PLANES/g, ( parameters.numClippingPlanes - parameters.numClipIntersection ) ); } // Resolve Includes const includePattern = /^[ \t]*#include +<([\w\d./]+)>/gm; function resolveIncludes( string ) { return string.replace( includePattern, includeReplacer ); } const shaderChunkMap = new Map(); function includeReplacer( match, include ) { let string = ShaderChunk[ include ]; if ( string === undefined ) { const newInclude = shaderChunkMap.get( include ); if ( newInclude !== undefined ) { string = ShaderChunk[ newInclude ]; console.warn( 'THREE.WebGLRenderer: Shader chunk "%s" has been deprecated. Use "%s" instead.', include, newInclude ); } else { throw new Error( 'Can not resolve #include <' + include + '>' ); } } return resolveIncludes( string ); } // Unroll Loops const unrollLoopPattern = /#pragma unroll_loop_start\s+for\s*\(\s*int\s+i\s*=\s*(\d+)\s*;\s*i\s*<\s*(\d+)\s*;\s*i\s*\+\+\s*\)\s*{([\s\S]+?)}\s+#pragma unroll_loop_end/g; function unrollLoops( string ) { return string.replace( unrollLoopPattern, loopReplacer ); } function loopReplacer( match, start, end, snippet ) { let string = ''; for ( let i = parseInt( start ); i < parseInt( end ); i ++ ) { string += snippet .replace( /\[\s*i\s*\]/g, '[ ' + i + ' ]' ) .replace( /UNROLLED_LOOP_INDEX/g, i ); } return string; } // function generatePrecision( parameters ) { let precisionstring = `precision ${parameters.precision} float; precision ${parameters.precision} int; precision ${parameters.precision} sampler2D; precision ${parameters.precision} samplerCube; precision ${parameters.precision} sampler3D; precision ${parameters.precision} sampler2DArray; precision ${parameters.precision} sampler2DShadow; precision ${parameters.precision} samplerCubeShadow; precision ${parameters.precision} sampler2DArrayShadow; precision ${parameters.precision} isampler2D; precision ${parameters.precision} isampler3D; precision ${parameters.precision} isamplerCube; precision ${parameters.precision} isampler2DArray; precision ${parameters.precision} usampler2D; precision ${parameters.precision} usampler3D; precision ${parameters.precision} usamplerCube; precision ${parameters.precision} usampler2DArray; `; if ( parameters.precision === 'highp' ) { precisionstring += '\n#define HIGH_PRECISION'; } else if ( parameters.precision === 'mediump' ) { precisionstring += '\n#define MEDIUM_PRECISION'; } else if ( parameters.precision === 'lowp' ) { precisionstring += '\n#define LOW_PRECISION'; } return precisionstring; } function generateShadowMapTypeDefine( parameters ) { let shadowMapTypeDefine = 'SHADOWMAP_TYPE_BASIC'; if ( parameters.shadowMapType === PCFShadowMap ) { shadowMapTypeDefine = 'SHADOWMAP_TYPE_PCF'; } else if ( parameters.shadowMapType === PCFSoftShadowMap ) { shadowMapTypeDefine = 'SHADOWMAP_TYPE_PCF_SOFT'; } else if ( parameters.shadowMapType === VSMShadowMap ) { shadowMapTypeDefine = 'SHADOWMAP_TYPE_VSM'; } return shadowMapTypeDefine; } function generateEnvMapTypeDefine( parameters ) { let envMapTypeDefine = 'ENVMAP_TYPE_CUBE'; if ( parameters.envMap ) { switch ( parameters.envMapMode ) { case CubeReflectionMapping: case CubeRefractionMapping: envMapTypeDefine = 'ENVMAP_TYPE_CUBE'; break; case CubeUVReflectionMapping: envMapTypeDefine = 'ENVMAP_TYPE_CUBE_UV'; break; } } return envMapTypeDefine; } function generateEnvMapModeDefine( parameters ) { let envMapModeDefine = 'ENVMAP_MODE_REFLECTION'; if ( parameters.envMap ) { switch ( parameters.envMapMode ) { case CubeRefractionMapping: envMapModeDefine = 'ENVMAP_MODE_REFRACTION'; break; } } return envMapModeDefine; } function generateEnvMapBlendingDefine( parameters ) { let envMapBlendingDefine = 'ENVMAP_BLENDING_NONE'; if ( parameters.envMap ) { switch ( parameters.combine ) { case MultiplyOperation: envMapBlendingDefine = 'ENVMAP_BLENDING_MULTIPLY'; break; case MixOperation: envMapBlendingDefine = 'ENVMAP_BLENDING_MIX'; break; case AddOperation: envMapBlendingDefine = 'ENVMAP_BLENDING_ADD'; break; } } return envMapBlendingDefine; } function generateCubeUVSize( parameters ) { const imageHeight = parameters.envMapCubeUVHeight; if ( imageHeight === null ) return null; const maxMip = Math.log2( imageHeight ) - 2; const texelHeight = 1.0 / imageHeight; const texelWidth = 1.0 / ( 3 * Math.max( Math.pow( 2, maxMip ), 7 * 16 ) ); return { texelWidth, texelHeight, maxMip }; } function WebGLProgram( renderer, cacheKey, parameters, bindingStates ) { // TODO Send this event to Three.js DevTools // console.log( 'WebGLProgram', cacheKey ); const gl = renderer.getContext(); const defines = parameters.defines; let vertexShader = parameters.vertexShader; let fragmentShader = parameters.fragmentShader; const shadowMapTypeDefine = generateShadowMapTypeDefine( parameters ); const envMapTypeDefine = generateEnvMapTypeDefine( parameters ); const envMapModeDefine = generateEnvMapModeDefine( parameters ); const envMapBlendingDefine = generateEnvMapBlendingDefine( parameters ); const envMapCubeUVSize = generateCubeUVSize( parameters ); const customVertexExtensions = generateVertexExtensions( parameters ); const customDefines = generateDefines( defines ); const program = gl.createProgram(); let prefixVertex, prefixFragment; let versionString = parameters.glslVersion ? '#version ' + parameters.glslVersion + '\n' : ''; if ( parameters.isRawShaderMaterial ) { prefixVertex = [ '#define SHADER_TYPE ' + parameters.shaderType, '#define SHADER_NAME ' + parameters.shaderName, customDefines ].filter( filterEmptyLine ).join( '\n' ); if ( prefixVertex.length > 0 ) { prefixVertex += '\n'; } prefixFragment = [ '#define SHADER_TYPE ' + parameters.shaderType, '#define SHADER_NAME ' + parameters.shaderName, customDefines ].filter( filterEmptyLine ).join( '\n' ); if ( prefixFragment.length > 0 ) { prefixFragment += '\n'; } } else { prefixVertex = [ generatePrecision( parameters ), '#define SHADER_TYPE ' + parameters.shaderType, '#define SHADER_NAME ' + parameters.shaderName, customDefines, parameters.extensionClipCullDistance ? '#define USE_CLIP_DISTANCE' : '', parameters.batching ? '#define USE_BATCHING' : '', parameters.batchingColor ? '#define USE_BATCHING_COLOR' : '', parameters.instancing ? '#define USE_INSTANCING' : '', parameters.instancingColor ? '#define USE_INSTANCING_COLOR' : '', parameters.instancingMorph ? '#define USE_INSTANCING_MORPH' : '', parameters.useFog && parameters.fog ? '#define USE_FOG' : '', parameters.useFog && parameters.fogExp2 ? '#define FOG_EXP2' : '', parameters.map ? '#define USE_MAP' : '', parameters.envMap ? '#define USE_ENVMAP' : '', parameters.envMap ? '#define ' + envMapModeDefine : '', parameters.lightMap ? '#define USE_LIGHTMAP' : '', parameters.aoMap ? '#define USE_AOMAP' : '', parameters.bumpMap ? '#define USE_BUMPMAP' : '', parameters.normalMap ? '#define USE_NORMALMAP' : '', parameters.normalMapObjectSpace ? '#define USE_NORMALMAP_OBJECTSPACE' : '', parameters.normalMapTangentSpace ? '#define USE_NORMALMAP_TANGENTSPACE' : '', parameters.displacementMap ? '#define USE_DISPLACEMENTMAP' : '', parameters.emissiveMap ? '#define USE_EMISSIVEMAP' : '', parameters.anisotropy ? '#define USE_ANISOTROPY' : '', parameters.anisotropyMap ? '#define USE_ANISOTROPYMAP' : '', parameters.clearcoatMap ? '#define USE_CLEARCOATMAP' : '', parameters.clearcoatRoughnessMap ? '#define USE_CLEARCOAT_ROUGHNESSMAP' : '', parameters.clearcoatNormalMap ? '#define USE_CLEARCOAT_NORMALMAP' : '', parameters.iridescenceMap ? '#define USE_IRIDESCENCEMAP' : '', parameters.iridescenceThicknessMap ? '#define USE_IRIDESCENCE_THICKNESSMAP' : '', parameters.specularMap ? '#define USE_SPECULARMAP' : '', parameters.specularColorMap ? '#define USE_SPECULAR_COLORMAP' : '', parameters.specularIntensityMap ? '#define USE_SPECULAR_INTENSITYMAP' : '', parameters.roughnessMap ? '#define USE_ROUGHNESSMAP' : '', parameters.metalnessMap ? '#define USE_METALNESSMAP' : '', parameters.alphaMap ? '#define USE_ALPHAMAP' : '', parameters.alphaHash ? '#define USE_ALPHAHASH' : '', parameters.transmission ? '#define USE_TRANSMISSION' : '', parameters.transmissionMap ? '#define USE_TRANSMISSIONMAP' : '', parameters.thicknessMap ? '#define USE_THICKNESSMAP' : '', parameters.sheenColorMap ? '#define USE_SHEEN_COLORMAP' : '', parameters.sheenRoughnessMap ? '#define USE_SHEEN_ROUGHNESSMAP' : '', // parameters.mapUv ? '#define MAP_UV ' + parameters.mapUv : '', parameters.alphaMapUv ? '#define ALPHAMAP_UV ' + parameters.alphaMapUv : '', parameters.lightMapUv ? '#define LIGHTMAP_UV ' + parameters.lightMapUv : '', parameters.aoMapUv ? '#define AOMAP_UV ' + parameters.aoMapUv : '', parameters.emissiveMapUv ? '#define EMISSIVEMAP_UV ' + parameters.emissiveMapUv : '', parameters.bumpMapUv ? '#define BUMPMAP_UV ' + parameters.bumpMapUv : '', parameters.normalMapUv ? '#define NORMALMAP_UV ' + parameters.normalMapUv : '', parameters.displacementMapUv ? '#define DISPLACEMENTMAP_UV ' + parameters.displacementMapUv : '', parameters.metalnessMapUv ? '#define METALNESSMAP_UV ' + parameters.metalnessMapUv : '', parameters.roughnessMapUv ? '#define ROUGHNESSMAP_UV ' + parameters.roughnessMapUv : '', parameters.anisotropyMapUv ? '#define ANISOTROPYMAP_UV ' + parameters.anisotropyMapUv : '', parameters.clearcoatMapUv ? '#define CLEARCOATMAP_UV ' + parameters.clearcoatMapUv : '', parameters.clearcoatNormalMapUv ? '#define CLEARCOAT_NORMALMAP_UV ' + parameters.clearcoatNormalMapUv : '', parameters.clearcoatRoughnessMapUv ? '#define CLEARCOAT_ROUGHNESSMAP_UV ' + parameters.clearcoatRoughnessMapUv : '', parameters.iridescenceMapUv ? '#define IRIDESCENCEMAP_UV ' + parameters.iridescenceMapUv : '', parameters.iridescenceThicknessMapUv ? '#define IRIDESCENCE_THICKNESSMAP_UV ' + parameters.iridescenceThicknessMapUv : '', parameters.sheenColorMapUv ? '#define SHEEN_COLORMAP_UV ' + parameters.sheenColorMapUv : '', parameters.sheenRoughnessMapUv ? '#define SHEEN_ROUGHNESSMAP_UV ' + parameters.sheenRoughnessMapUv : '', parameters.specularMapUv ? '#define SPECULARMAP_UV ' + parameters.specularMapUv : '', parameters.specularColorMapUv ? '#define SPECULAR_COLORMAP_UV ' + parameters.specularColorMapUv : '', parameters.specularIntensityMapUv ? '#define SPECULAR_INTENSITYMAP_UV ' + parameters.specularIntensityMapUv : '', parameters.transmissionMapUv ? '#define TRANSMISSIONMAP_UV ' + parameters.transmissionMapUv : '', parameters.thicknessMapUv ? '#define THICKNESSMAP_UV ' + parameters.thicknessMapUv : '', // parameters.vertexTangents && parameters.flatShading === false ? '#define USE_TANGENT' : '', parameters.vertexColors ? '#define USE_COLOR' : '', parameters.vertexAlphas ? '#define USE_COLOR_ALPHA' : '', parameters.vertexUv1s ? '#define USE_UV1' : '', parameters.vertexUv2s ? '#define USE_UV2' : '', parameters.vertexUv3s ? '#define USE_UV3' : '', parameters.pointsUvs ? '#define USE_POINTS_UV' : '', parameters.flatShading ? '#define FLAT_SHADED' : '', parameters.skinning ? '#define USE_SKINNING' : '', parameters.morphTargets ? '#define USE_MORPHTARGETS' : '', parameters.morphNormals && parameters.flatShading === false ? '#define USE_MORPHNORMALS' : '', ( parameters.morphColors ) ? '#define USE_MORPHCOLORS' : '', ( parameters.morphTargetsCount > 0 ) ? '#define MORPHTARGETS_TEXTURE_STRIDE ' + parameters.morphTextureStride : '', ( parameters.morphTargetsCount > 0 ) ? '#define MORPHTARGETS_COUNT ' + parameters.morphTargetsCount : '', parameters.doubleSided ? '#define DOUBLE_SIDED' : '', parameters.flipSided ? '#define FLIP_SIDED' : '', parameters.shadowMapEnabled ? '#define USE_SHADOWMAP' : '', parameters.shadowMapEnabled ? '#define ' + shadowMapTypeDefine : '', parameters.sizeAttenuation ? '#define USE_SIZEATTENUATION' : '', parameters.numLightProbes > 0 ? '#define USE_LIGHT_PROBES' : '', parameters.logarithmicDepthBuffer ? '#define USE_LOGDEPTHBUF' : '', parameters.reverseDepthBuffer ? '#define USE_REVERSEDEPTHBUF' : '', 'uniform mat4 modelMatrix;', 'uniform mat4 modelViewMatrix;', 'uniform mat4 projectionMatrix;', 'uniform mat4 viewMatrix;', 'uniform mat3 normalMatrix;', 'uniform vec3 cameraPosition;', 'uniform bool isOrthographic;', '#ifdef USE_INSTANCING', ' attribute mat4 instanceMatrix;', '#endif', '#ifdef USE_INSTANCING_COLOR', ' attribute vec3 instanceColor;', '#endif', '#ifdef USE_INSTANCING_MORPH', ' uniform sampler2D morphTexture;', '#endif', 'attribute vec3 position;', 'attribute vec3 normal;', 'attribute vec2 uv;', '#ifdef USE_UV1', ' attribute vec2 uv1;', '#endif', '#ifdef USE_UV2', ' attribute vec2 uv2;', '#endif', '#ifdef USE_UV3', ' attribute vec2 uv3;', '#endif', '#ifdef USE_TANGENT', ' attribute vec4 tangent;', '#endif', '#if defined( USE_COLOR_ALPHA )', ' attribute vec4 color;', '#elif defined( USE_COLOR )', ' attribute vec3 color;', '#endif', '#ifdef USE_SKINNING', ' attribute vec4 skinIndex;', ' attribute vec4 skinWeight;', '#endif', '\n' ].filter( filterEmptyLine ).join( '\n' ); prefixFragment = [ generatePrecision( parameters ), '#define SHADER_TYPE ' + parameters.shaderType, '#define SHADER_NAME ' + parameters.shaderName, customDefines, parameters.useFog && parameters.fog ? '#define USE_FOG' : '', parameters.useFog && parameters.fogExp2 ? '#define FOG_EXP2' : '', parameters.alphaToCoverage ? '#define ALPHA_TO_COVERAGE' : '', parameters.map ? '#define USE_MAP' : '', parameters.matcap ? '#define USE_MATCAP' : '', parameters.envMap ? '#define USE_ENVMAP' : '', parameters.envMap ? '#define ' + envMapTypeDefine : '', parameters.envMap ? '#define ' + envMapModeDefine : '', parameters.envMap ? '#define ' + envMapBlendingDefine : '', envMapCubeUVSize ? '#define CUBEUV_TEXEL_WIDTH ' + envMapCubeUVSize.texelWidth : '', envMapCubeUVSize ? '#define CUBEUV_TEXEL_HEIGHT ' + envMapCubeUVSize.texelHeight : '', envMapCubeUVSize ? '#define CUBEUV_MAX_MIP ' + envMapCubeUVSize.maxMip + '.0' : '', parameters.lightMap ? '#define USE_LIGHTMAP' : '', parameters.aoMap ? '#define USE_AOMAP' : '', parameters.bumpMap ? '#define USE_BUMPMAP' : '', parameters.normalMap ? '#define USE_NORMALMAP' : '', parameters.normalMapObjectSpace ? '#define USE_NORMALMAP_OBJECTSPACE' : '', parameters.normalMapTangentSpace ? '#define USE_NORMALMAP_TANGENTSPACE' : '', parameters.emissiveMap ? '#define USE_EMISSIVEMAP' : '', parameters.anisotropy ? '#define USE_ANISOTROPY' : '', parameters.anisotropyMap ? '#define USE_ANISOTROPYMAP' : '', parameters.clearcoat ? '#define USE_CLEARCOAT' : '', parameters.clearcoatMap ? '#define USE_CLEARCOATMAP' : '', parameters.clearcoatRoughnessMap ? '#define USE_CLEARCOAT_ROUGHNESSMAP' : '', parameters.clearcoatNormalMap ? '#define USE_CLEARCOAT_NORMALMAP' : '', parameters.dispersion ? '#define USE_DISPERSION' : '', parameters.iridescence ? '#define USE_IRIDESCENCE' : '', parameters.iridescenceMap ? '#define USE_IRIDESCENCEMAP' : '', parameters.iridescenceThicknessMap ? '#define USE_IRIDESCENCE_THICKNESSMAP' : '', parameters.specularMap ? '#define USE_SPECULARMAP' : '', parameters.specularColorMap ? '#define USE_SPECULAR_COLORMAP' : '', parameters.specularIntensityMap ? '#define USE_SPECULAR_INTENSITYMAP' : '', parameters.roughnessMap ? '#define USE_ROUGHNESSMAP' : '', parameters.metalnessMap ? '#define USE_METALNESSMAP' : '', parameters.alphaMap ? '#define USE_ALPHAMAP' : '', parameters.alphaTest ? '#define USE_ALPHATEST' : '', parameters.alphaHash ? '#define USE_ALPHAHASH' : '', parameters.sheen ? '#define USE_SHEEN' : '', parameters.sheenColorMap ? '#define USE_SHEEN_COLORMAP' : '', parameters.sheenRoughnessMap ? '#define USE_SHEEN_ROUGHNESSMAP' : '', parameters.transmission ? '#define USE_TRANSMISSION' : '', parameters.transmissionMap ? '#define USE_TRANSMISSIONMAP' : '', parameters.thicknessMap ? '#define USE_THICKNESSMAP' : '', parameters.vertexTangents && parameters.flatShading === false ? '#define USE_TANGENT' : '', parameters.vertexColors || parameters.instancingColor || parameters.batchingColor ? '#define USE_COLOR' : '', parameters.vertexAlphas ? '#define USE_COLOR_ALPHA' : '', parameters.vertexUv1s ? '#define USE_UV1' : '', parameters.vertexUv2s ? '#define USE_UV2' : '', parameters.vertexUv3s ? '#define USE_UV3' : '', parameters.pointsUvs ? '#define USE_POINTS_UV' : '', parameters.gradientMap ? '#define USE_GRADIENTMAP' : '', parameters.flatShading ? '#define FLAT_SHADED' : '', parameters.doubleSided ? '#define DOUBLE_SIDED' : '', parameters.flipSided ? '#define FLIP_SIDED' : '', parameters.shadowMapEnabled ? '#define USE_SHADOWMAP' : '', parameters.shadowMapEnabled ? '#define ' + shadowMapTypeDefine : '', parameters.premultipliedAlpha ? '#define PREMULTIPLIED_ALPHA' : '', parameters.numLightProbes > 0 ? '#define USE_LIGHT_PROBES' : '', parameters.decodeVideoTexture ? '#define DECODE_VIDEO_TEXTURE' : '', parameters.decodeVideoTextureEmissive ? '#define DECODE_VIDEO_TEXTURE_EMISSIVE' : '', parameters.logarithmicDepthBuffer ? '#define USE_LOGDEPTHBUF' : '', parameters.reverseDepthBuffer ? '#define USE_REVERSEDEPTHBUF' : '', 'uniform mat4 viewMatrix;', 'uniform vec3 cameraPosition;', 'uniform bool isOrthographic;', ( parameters.toneMapping !== NoToneMapping ) ? '#define TONE_MAPPING' : '', ( parameters.toneMapping !== NoToneMapping ) ? ShaderChunk[ 'tonemapping_pars_fragment' ] : '', // this code is required here because it is used by the toneMapping() function defined below ( parameters.toneMapping !== NoToneMapping ) ? getToneMappingFunction( 'toneMapping', parameters.toneMapping ) : '', parameters.dithering ? '#define DITHERING' : '', parameters.opaque ? '#define OPAQUE' : '', ShaderChunk[ 'colorspace_pars_fragment' ], // this code is required here because it is used by the various encoding/decoding function defined below getTexelEncodingFunction( 'linearToOutputTexel', parameters.outputColorSpace ), getLuminanceFunction(), parameters.useDepthPacking ? '#define DEPTH_PACKING ' + parameters.depthPacking : '', '\n' ].filter( filterEmptyLine ).join( '\n' ); } vertexShader = resolveIncludes( vertexShader ); vertexShader = replaceLightNums( vertexShader, parameters ); vertexShader = replaceClippingPlaneNums( vertexShader, parameters ); fragmentShader = resolveIncludes( fragmentShader ); fragmentShader = replaceLightNums( fragmentShader, parameters ); fragmentShader = replaceClippingPlaneNums( fragmentShader, parameters ); vertexShader = unrollLoops( vertexShader ); fragmentShader = unrollLoops( fragmentShader ); if ( parameters.isRawShaderMaterial !== true ) { // GLSL 3.0 conversion for built-in materials and ShaderMaterial versionString = '#version 300 es\n'; prefixVertex = [ customVertexExtensions, '#define attribute in', '#define varying out', '#define texture2D texture' ].join( '\n' ) + '\n' + prefixVertex; prefixFragment = [ '#define varying in', ( parameters.glslVersion === GLSL3 ) ? '' : 'layout(location = 0) out highp vec4 pc_fragColor;', ( parameters.glslVersion === GLSL3 ) ? '' : '#define gl_FragColor pc_fragColor', '#define gl_FragDepthEXT gl_FragDepth', '#define texture2D texture', '#define textureCube texture', '#define texture2DProj textureProj', '#define texture2DLodEXT textureLod', '#define texture2DProjLodEXT textureProjLod', '#define textureCubeLodEXT textureLod', '#define texture2DGradEXT textureGrad', '#define texture2DProjGradEXT textureProjGrad', '#define textureCubeGradEXT textureGrad' ].join( '\n' ) + '\n' + prefixFragment; } const vertexGlsl = versionString + prefixVertex + vertexShader; const fragmentGlsl = versionString + prefixFragment + fragmentShader; // console.log( '*VERTEX*', vertexGlsl ); // console.log( '*FRAGMENT*', fragmentGlsl ); const glVertexShader = WebGLShader( gl, gl.VERTEX_SHADER, vertexGlsl ); const glFragmentShader = WebGLShader( gl, gl.FRAGMENT_SHADER, fragmentGlsl ); gl.attachShader( program, glVertexShader ); gl.attachShader( program, glFragmentShader ); // Force a particular attribute to index 0. if ( parameters.index0AttributeName !== undefined ) { gl.bindAttribLocation( program, 0, parameters.index0AttributeName ); } else if ( parameters.morphTargets === true ) { // programs with morphTargets displace position out of attribute 0 gl.bindAttribLocation( program, 0, 'position' ); } gl.linkProgram( program ); function onFirstUse( self ) { // check for link errors if ( renderer.debug.checkShaderErrors ) { const programLog = gl.getProgramInfoLog( program ).trim(); const vertexLog = gl.getShaderInfoLog( glVertexShader ).trim(); const fragmentLog = gl.getShaderInfoLog( glFragmentShader ).trim(); let runnable = true; let haveDiagnostics = true; if ( gl.getProgramParameter( program, gl.LINK_STATUS ) === false ) { runnable = false; if ( typeof renderer.debug.onShaderError === 'function' ) { renderer.debug.onShaderError( gl, program, glVertexShader, glFragmentShader ); } else { // default error reporting const vertexErrors = getShaderErrors( gl, glVertexShader, 'vertex' ); const fragmentErrors = getShaderErrors( gl, glFragmentShader, 'fragment' ); console.error( 'THREE.WebGLProgram: Shader Error ' + gl.getError() + ' - ' + 'VALIDATE_STATUS ' + gl.getProgramParameter( program, gl.VALIDATE_STATUS ) + '\n\n' + 'Material Name: ' + self.name + '\n' + 'Material Type: ' + self.type + '\n\n' + 'Program Info Log: ' + programLog + '\n' + vertexErrors + '\n' + fragmentErrors ); } } else if ( programLog !== '' ) { console.warn( 'THREE.WebGLProgram: Program Info Log:', programLog ); } else if ( vertexLog === '' || fragmentLog === '' ) { haveDiagnostics = false; } if ( haveDiagnostics ) { self.diagnostics = { runnable: runnable, programLog: programLog, vertexShader: { log: vertexLog, prefix: prefixVertex }, fragmentShader: { log: fragmentLog, prefix: prefixFragment } }; } } // Clean up // Crashes in iOS9 and iOS10. #18402 // gl.detachShader( program, glVertexShader ); // gl.detachShader( program, glFragmentShader ); gl.deleteShader( glVertexShader ); gl.deleteShader( glFragmentShader ); cachedUniforms = new WebGLUniforms( gl, program ); cachedAttributes = fetchAttributeLocations( gl, program ); } // set up caching for uniform locations let cachedUniforms; this.getUniforms = function () { if ( cachedUniforms === undefined ) { // Populates cachedUniforms and cachedAttributes onFirstUse( this ); } return cachedUniforms; }; // set up caching for attribute locations let cachedAttributes; this.getAttributes = function () { if ( cachedAttributes === undefined ) { // Populates cachedAttributes and cachedUniforms onFirstUse( this ); } return cachedAttributes; }; // indicate when the program is ready to be used. if the KHR_parallel_shader_compile extension isn't supported, // flag the program as ready immediately. It may cause a stall when it's first used. let programReady = ( parameters.rendererExtensionParallelShaderCompile === false ); this.isReady = function () { if ( programReady === false ) { programReady = gl.getProgramParameter( program, COMPLETION_STATUS_KHR ); } return programReady; }; // free resource this.destroy = function () { bindingStates.releaseStatesOfProgram( this ); gl.deleteProgram( program ); this.program = undefined; }; // this.type = parameters.shaderType; this.name = parameters.shaderName; this.id = programIdCount ++; this.cacheKey = cacheKey; this.usedTimes = 1; this.program = program; this.vertexShader = glVertexShader; this.fragmentShader = glFragmentShader; return this; } let _id = 0; class WebGLShaderCache { constructor() { this.shaderCache = new Map(); this.materialCache = new Map(); } update( material ) { const vertexShader = material.vertexShader; const fragmentShader = material.fragmentShader; const vertexShaderStage = this._getShaderStage( vertexShader ); const fragmentShaderStage = this._getShaderStage( fragmentShader ); const materialShaders = this._getShaderCacheForMaterial( material ); if ( materialShaders.has( vertexShaderStage ) === false ) { materialShaders.add( vertexShaderStage ); vertexShaderStage.usedTimes ++; } if ( materialShaders.has( fragmentShaderStage ) === false ) { materialShaders.add( fragmentShaderStage ); fragmentShaderStage.usedTimes ++; } return this; } remove( material ) { const materialShaders = this.materialCache.get( material ); for ( const shaderStage of materialShaders ) { shaderStage.usedTimes --; if ( shaderStage.usedTimes === 0 ) this.shaderCache.delete( shaderStage.code ); } this.materialCache.delete( material ); return this; } getVertexShaderID( material ) { return this._getShaderStage( material.vertexShader ).id; } getFragmentShaderID( material ) { return this._getShaderStage( material.fragmentShader ).id; } dispose() { this.shaderCache.clear(); this.materialCache.clear(); } _getShaderCacheForMaterial( material ) { const cache = this.materialCache; let set = cache.get( material ); if ( set === undefined ) { set = new Set(); cache.set( material, set ); } return set; } _getShaderStage( code ) { const cache = this.shaderCache; let stage = cache.get( code ); if ( stage === undefined ) { stage = new WebGLShaderStage( code ); cache.set( code, stage ); } return stage; } } class WebGLShaderStage { constructor( code ) { this.id = _id ++; this.code = code; this.usedTimes = 0; } } function WebGLPrograms( renderer, cubemaps, cubeuvmaps, extensions, capabilities, bindingStates, clipping ) { const _programLayers = new Layers(); const _customShaders = new WebGLShaderCache(); const _activeChannels = new Set(); const programs = []; const logarithmicDepthBuffer = capabilities.logarithmicDepthBuffer; const SUPPORTS_VERTEX_TEXTURES = capabilities.vertexTextures; let precision = capabilities.precision; const shaderIDs = { MeshDepthMaterial: 'depth', MeshDistanceMaterial: 'distanceRGBA', MeshNormalMaterial: 'normal', MeshBasicMaterial: 'basic', MeshLambertMaterial: 'lambert', MeshPhongMaterial: 'phong', MeshToonMaterial: 'toon', MeshStandardMaterial: 'physical', MeshPhysicalMaterial: 'physical', MeshMatcapMaterial: 'matcap', LineBasicMaterial: 'basic', LineDashedMaterial: 'dashed', PointsMaterial: 'points', ShadowMaterial: 'shadow', SpriteMaterial: 'sprite' }; function getChannel( value ) { _activeChannels.add( value ); if ( value === 0 ) return 'uv'; return `uv${ value }`; } function getParameters( material, lights, shadows, scene, object ) { const fog = scene.fog; const geometry = object.geometry; const environment = material.isMeshStandardMaterial ? scene.environment : null; const envMap = ( material.isMeshStandardMaterial ? cubeuvmaps : cubemaps ).get( material.envMap || environment ); const envMapCubeUVHeight = ( !! envMap ) && ( envMap.mapping === CubeUVReflectionMapping ) ? envMap.image.height : null; const shaderID = shaderIDs[ material.type ]; // heuristics to create shader parameters according to lights in the scene // (not to blow over maxLights budget) if ( material.precision !== null ) { precision = capabilities.getMaxPrecision( material.precision ); if ( precision !== material.precision ) { console.warn( 'THREE.WebGLProgram.getParameters:', material.precision, 'not supported, using', precision, 'instead.' ); } } // const morphAttribute = geometry.morphAttributes.position || geometry.morphAttributes.normal || geometry.morphAttributes.color; const morphTargetsCount = ( morphAttribute !== undefined ) ? morphAttribute.length : 0; let morphTextureStride = 0; if ( geometry.morphAttributes.position !== undefined ) morphTextureStride = 1; if ( geometry.morphAttributes.normal !== undefined ) morphTextureStride = 2; if ( geometry.morphAttributes.color !== undefined ) morphTextureStride = 3; // let vertexShader, fragmentShader; let customVertexShaderID, customFragmentShaderID; if ( shaderID ) { const shader = ShaderLib[ shaderID ]; vertexShader = shader.vertexShader; fragmentShader = shader.fragmentShader; } else { vertexShader = material.vertexShader; fragmentShader = material.fragmentShader; _customShaders.update( material ); customVertexShaderID = _customShaders.getVertexShaderID( material ); customFragmentShaderID = _customShaders.getFragmentShaderID( material ); } const currentRenderTarget = renderer.getRenderTarget(); const reverseDepthBuffer = renderer.state.buffers.depth.getReversed(); const IS_INSTANCEDMESH = object.isInstancedMesh === true; const IS_BATCHEDMESH = object.isBatchedMesh === true; const HAS_MAP = !! material.map; const HAS_MATCAP = !! material.matcap; const HAS_ENVMAP = !! envMap; const HAS_AOMAP = !! material.aoMap; const HAS_LIGHTMAP = !! material.lightMap; const HAS_BUMPMAP = !! material.bumpMap; const HAS_NORMALMAP = !! material.normalMap; const HAS_DISPLACEMENTMAP = !! material.displacementMap; const HAS_EMISSIVEMAP = !! material.emissiveMap; const HAS_METALNESSMAP = !! material.metalnessMap; const HAS_ROUGHNESSMAP = !! material.roughnessMap; const HAS_ANISOTROPY = material.anisotropy > 0; const HAS_CLEARCOAT = material.clearcoat > 0; const HAS_DISPERSION = material.dispersion > 0; const HAS_IRIDESCENCE = material.iridescence > 0; const HAS_SHEEN = material.sheen > 0; const HAS_TRANSMISSION = material.transmission > 0; const HAS_ANISOTROPYMAP = HAS_ANISOTROPY && !! material.anisotropyMap; const HAS_CLEARCOATMAP = HAS_CLEARCOAT && !! material.clearcoatMap; const HAS_CLEARCOAT_NORMALMAP = HAS_CLEARCOAT && !! material.clearcoatNormalMap; const HAS_CLEARCOAT_ROUGHNESSMAP = HAS_CLEARCOAT && !! material.clearcoatRoughnessMap; const HAS_IRIDESCENCEMAP = HAS_IRIDESCENCE && !! material.iridescenceMap; const HAS_IRIDESCENCE_THICKNESSMAP = HAS_IRIDESCENCE && !! material.iridescenceThicknessMap; const HAS_SHEEN_COLORMAP = HAS_SHEEN && !! material.sheenColorMap; const HAS_SHEEN_ROUGHNESSMAP = HAS_SHEEN && !! material.sheenRoughnessMap; const HAS_SPECULARMAP = !! material.specularMap; const HAS_SPECULAR_COLORMAP = !! material.specularColorMap; const HAS_SPECULAR_INTENSITYMAP = !! material.specularIntensityMap; const HAS_TRANSMISSIONMAP = HAS_TRANSMISSION && !! material.transmissionMap; const HAS_THICKNESSMAP = HAS_TRANSMISSION && !! material.thicknessMap; const HAS_GRADIENTMAP = !! material.gradientMap; const HAS_ALPHAMAP = !! material.alphaMap; const HAS_ALPHATEST = material.alphaTest > 0; const HAS_ALPHAHASH = !! material.alphaHash; const HAS_EXTENSIONS = !! material.extensions; let toneMapping = NoToneMapping; if ( material.toneMapped ) { if ( currentRenderTarget === null || currentRenderTarget.isXRRenderTarget === true ) { toneMapping = renderer.toneMapping; } } const parameters = { shaderID: shaderID, shaderType: material.type, shaderName: material.name, vertexShader: vertexShader, fragmentShader: fragmentShader, defines: material.defines, customVertexShaderID: customVertexShaderID, customFragmentShaderID: customFragmentShaderID, isRawShaderMaterial: material.isRawShaderMaterial === true, glslVersion: material.glslVersion, precision: precision, batching: IS_BATCHEDMESH, batchingColor: IS_BATCHEDMESH && object._colorsTexture !== null, instancing: IS_INSTANCEDMESH, instancingColor: IS_INSTANCEDMESH && object.instanceColor !== null, instancingMorph: IS_INSTANCEDMESH && object.morphTexture !== null, supportsVertexTextures: SUPPORTS_VERTEX_TEXTURES, outputColorSpace: ( currentRenderTarget === null ) ? renderer.outputColorSpace : ( currentRenderTarget.isXRRenderTarget === true ? currentRenderTarget.texture.colorSpace : LinearSRGBColorSpace ), alphaToCoverage: !! material.alphaToCoverage, map: HAS_MAP, matcap: HAS_MATCAP, envMap: HAS_ENVMAP, envMapMode: HAS_ENVMAP && envMap.mapping, envMapCubeUVHeight: envMapCubeUVHeight, aoMap: HAS_AOMAP, lightMap: HAS_LIGHTMAP, bumpMap: HAS_BUMPMAP, normalMap: HAS_NORMALMAP, displacementMap: SUPPORTS_VERTEX_TEXTURES && HAS_DISPLACEMENTMAP, emissiveMap: HAS_EMISSIVEMAP, normalMapObjectSpace: HAS_NORMALMAP && material.normalMapType === ObjectSpaceNormalMap, normalMapTangentSpace: HAS_NORMALMAP && material.normalMapType === TangentSpaceNormalMap, metalnessMap: HAS_METALNESSMAP, roughnessMap: HAS_ROUGHNESSMAP, anisotropy: HAS_ANISOTROPY, anisotropyMap: HAS_ANISOTROPYMAP, clearcoat: HAS_CLEARCOAT, clearcoatMap: HAS_CLEARCOATMAP, clearcoatNormalMap: HAS_CLEARCOAT_NORMALMAP, clearcoatRoughnessMap: HAS_CLEARCOAT_ROUGHNESSMAP, dispersion: HAS_DISPERSION, iridescence: HAS_IRIDESCENCE, iridescenceMap: HAS_IRIDESCENCEMAP, iridescenceThicknessMap: HAS_IRIDESCENCE_THICKNESSMAP, sheen: HAS_SHEEN, sheenColorMap: HAS_SHEEN_COLORMAP, sheenRoughnessMap: HAS_SHEEN_ROUGHNESSMAP, specularMap: HAS_SPECULARMAP, specularColorMap: HAS_SPECULAR_COLORMAP, specularIntensityMap: HAS_SPECULAR_INTENSITYMAP, transmission: HAS_TRANSMISSION, transmissionMap: HAS_TRANSMISSIONMAP, thicknessMap: HAS_THICKNESSMAP, gradientMap: HAS_GRADIENTMAP, opaque: material.transparent === false && material.blending === NormalBlending && material.alphaToCoverage === false, alphaMap: HAS_ALPHAMAP, alphaTest: HAS_ALPHATEST, alphaHash: HAS_ALPHAHASH, combine: material.combine, // mapUv: HAS_MAP && getChannel( material.map.channel ), aoMapUv: HAS_AOMAP && getChannel( material.aoMap.channel ), lightMapUv: HAS_LIGHTMAP && getChannel( material.lightMap.channel ), bumpMapUv: HAS_BUMPMAP && getChannel( material.bumpMap.channel ), normalMapUv: HAS_NORMALMAP && getChannel( material.normalMap.channel ), displacementMapUv: HAS_DISPLACEMENTMAP && getChannel( material.displacementMap.channel ), emissiveMapUv: HAS_EMISSIVEMAP && getChannel( material.emissiveMap.channel ), metalnessMapUv: HAS_METALNESSMAP && getChannel( material.metalnessMap.channel ), roughnessMapUv: HAS_ROUGHNESSMAP && getChannel( material.roughnessMap.channel ), anisotropyMapUv: HAS_ANISOTROPYMAP && getChannel( material.anisotropyMap.channel ), clearcoatMapUv: HAS_CLEARCOATMAP && getChannel( material.clearcoatMap.channel ), clearcoatNormalMapUv: HAS_CLEARCOAT_NORMALMAP && getChannel( material.clearcoatNormalMap.channel ), clearcoatRoughnessMapUv: HAS_CLEARCOAT_ROUGHNESSMAP && getChannel( material.clearcoatRoughnessMap.channel ), iridescenceMapUv: HAS_IRIDESCENCEMAP && getChannel( material.iridescenceMap.channel ), iridescenceThicknessMapUv: HAS_IRIDESCENCE_THICKNESSMAP && getChannel( material.iridescenceThicknessMap.channel ), sheenColorMapUv: HAS_SHEEN_COLORMAP && getChannel( material.sheenColorMap.channel ), sheenRoughnessMapUv: HAS_SHEEN_ROUGHNESSMAP && getChannel( material.sheenRoughnessMap.channel ), specularMapUv: HAS_SPECULARMAP && getChannel( material.specularMap.channel ), specularColorMapUv: HAS_SPECULAR_COLORMAP && getChannel( material.specularColorMap.channel ), specularIntensityMapUv: HAS_SPECULAR_INTENSITYMAP && getChannel( material.specularIntensityMap.channel ), transmissionMapUv: HAS_TRANSMISSIONMAP && getChannel( material.transmissionMap.channel ), thicknessMapUv: HAS_THICKNESSMAP && getChannel( material.thicknessMap.channel ), alphaMapUv: HAS_ALPHAMAP && getChannel( material.alphaMap.channel ), // vertexTangents: !! geometry.attributes.tangent && ( HAS_NORMALMAP || HAS_ANISOTROPY ), vertexColors: material.vertexColors, vertexAlphas: material.vertexColors === true && !! geometry.attributes.color && geometry.attributes.color.itemSize === 4, pointsUvs: object.isPoints === true && !! geometry.attributes.uv && ( HAS_MAP || HAS_ALPHAMAP ), fog: !! fog, useFog: material.fog === true, fogExp2: ( !! fog && fog.isFogExp2 ), flatShading: material.flatShading === true, sizeAttenuation: material.sizeAttenuation === true, logarithmicDepthBuffer: logarithmicDepthBuffer, reverseDepthBuffer: reverseDepthBuffer, skinning: object.isSkinnedMesh === true, morphTargets: geometry.morphAttributes.position !== undefined, morphNormals: geometry.morphAttributes.normal !== undefined, morphColors: geometry.morphAttributes.color !== undefined, morphTargetsCount: morphTargetsCount, morphTextureStride: morphTextureStride, numDirLights: lights.directional.length, numPointLights: lights.point.length, numSpotLights: lights.spot.length, numSpotLightMaps: lights.spotLightMap.length, numRectAreaLights: lights.rectArea.length, numHemiLights: lights.hemi.length, numDirLightShadows: lights.directionalShadowMap.length, numPointLightShadows: lights.pointShadowMap.length, numSpotLightShadows: lights.spotShadowMap.length, numSpotLightShadowsWithMaps: lights.numSpotLightShadowsWithMaps, numLightProbes: lights.numLightProbes, numClippingPlanes: clipping.numPlanes, numClipIntersection: clipping.numIntersection, dithering: material.dithering, shadowMapEnabled: renderer.shadowMap.enabled && shadows.length > 0, shadowMapType: renderer.shadowMap.type, toneMapping: toneMapping, decodeVideoTexture: HAS_MAP && ( material.map.isVideoTexture === true ) && ( ColorManagement.getTransfer( material.map.colorSpace ) === SRGBTransfer ), decodeVideoTextureEmissive: HAS_EMISSIVEMAP && ( material.emissiveMap.isVideoTexture === true ) && ( ColorManagement.getTransfer( material.emissiveMap.colorSpace ) === SRGBTransfer ), premultipliedAlpha: material.premultipliedAlpha, doubleSided: material.side === DoubleSide, flipSided: material.side === BackSide, useDepthPacking: material.depthPacking >= 0, depthPacking: material.depthPacking || 0, index0AttributeName: material.index0AttributeName, extensionClipCullDistance: HAS_EXTENSIONS && material.extensions.clipCullDistance === true && extensions.has( 'WEBGL_clip_cull_distance' ), extensionMultiDraw: ( HAS_EXTENSIONS && material.extensions.multiDraw === true || IS_BATCHEDMESH ) && extensions.has( 'WEBGL_multi_draw' ), rendererExtensionParallelShaderCompile: extensions.has( 'KHR_parallel_shader_compile' ), customProgramCacheKey: material.customProgramCacheKey() }; // the usage of getChannel() determines the active texture channels for this shader parameters.vertexUv1s = _activeChannels.has( 1 ); parameters.vertexUv2s = _activeChannels.has( 2 ); parameters.vertexUv3s = _activeChannels.has( 3 ); _activeChannels.clear(); return parameters; } function getProgramCacheKey( parameters ) { const array = []; if ( parameters.shaderID ) { array.push( parameters.shaderID ); } else { array.push( parameters.customVertexShaderID ); array.push( parameters.customFragmentShaderID ); } if ( parameters.defines !== undefined ) { for ( const name in parameters.defines ) { array.push( name ); array.push( parameters.defines[ name ] ); } } if ( parameters.isRawShaderMaterial === false ) { getProgramCacheKeyParameters( array, parameters ); getProgramCacheKeyBooleans( array, parameters ); array.push( renderer.outputColorSpace ); } array.push( parameters.customProgramCacheKey ); return array.join(); } function getProgramCacheKeyParameters( array, parameters ) { array.push( parameters.precision ); array.push( parameters.outputColorSpace ); array.push( parameters.envMapMode ); array.push( parameters.envMapCubeUVHeight ); array.push( parameters.mapUv ); array.push( parameters.alphaMapUv ); array.push( parameters.lightMapUv ); array.push( parameters.aoMapUv ); array.push( parameters.bumpMapUv ); array.push( parameters.normalMapUv ); array.push( parameters.displacementMapUv ); array.push( parameters.emissiveMapUv ); array.push( parameters.metalnessMapUv ); array.push( parameters.roughnessMapUv ); array.push( parameters.anisotropyMapUv ); array.push( parameters.clearcoatMapUv ); array.push( parameters.clearcoatNormalMapUv ); array.push( parameters.clearcoatRoughnessMapUv ); array.push( parameters.iridescenceMapUv ); array.push( parameters.iridescenceThicknessMapUv ); array.push( parameters.sheenColorMapUv ); array.push( parameters.sheenRoughnessMapUv ); array.push( parameters.specularMapUv ); array.push( parameters.specularColorMapUv ); array.push( parameters.specularIntensityMapUv ); array.push( parameters.transmissionMapUv ); array.push( parameters.thicknessMapUv ); array.push( parameters.combine ); array.push( parameters.fogExp2 ); array.push( parameters.sizeAttenuation ); array.push( parameters.morphTargetsCount ); array.push( parameters.morphAttributeCount ); array.push( parameters.numDirLights ); array.push( parameters.numPointLights ); array.push( parameters.numSpotLights ); array.push( parameters.numSpotLightMaps ); array.push( parameters.numHemiLights ); array.push( parameters.numRectAreaLights ); array.push( parameters.numDirLightShadows ); array.push( parameters.numPointLightShadows ); array.push( parameters.numSpotLightShadows ); array.push( parameters.numSpotLightShadowsWithMaps ); array.push( parameters.numLightProbes ); array.push( parameters.shadowMapType ); array.push( parameters.toneMapping ); array.push( parameters.numClippingPlanes ); array.push( parameters.numClipIntersection ); array.push( parameters.depthPacking ); } function getProgramCacheKeyBooleans( array, parameters ) { _programLayers.disableAll(); if ( parameters.supportsVertexTextures ) _programLayers.enable( 0 ); if ( parameters.instancing ) _programLayers.enable( 1 ); if ( parameters.instancingColor ) _programLayers.enable( 2 ); if ( parameters.instancingMorph ) _programLayers.enable( 3 ); if ( parameters.matcap ) _programLayers.enable( 4 ); if ( parameters.envMap ) _programLayers.enable( 5 ); if ( parameters.normalMapObjectSpace ) _programLayers.enable( 6 ); if ( parameters.normalMapTangentSpace ) _programLayers.enable( 7 ); if ( parameters.clearcoat ) _programLayers.enable( 8 ); if ( parameters.iridescence ) _programLayers.enable( 9 ); if ( parameters.alphaTest ) _programLayers.enable( 10 ); if ( parameters.vertexColors ) _programLayers.enable( 11 ); if ( parameters.vertexAlphas ) _programLayers.enable( 12 ); if ( parameters.vertexUv1s ) _programLayers.enable( 13 ); if ( parameters.vertexUv2s ) _programLayers.enable( 14 ); if ( parameters.vertexUv3s ) _programLayers.enable( 15 ); if ( parameters.vertexTangents ) _programLayers.enable( 16 ); if ( parameters.anisotropy ) _programLayers.enable( 17 ); if ( parameters.alphaHash ) _programLayers.enable( 18 ); if ( parameters.batching ) _programLayers.enable( 19 ); if ( parameters.dispersion ) _programLayers.enable( 20 ); if ( parameters.batchingColor ) _programLayers.enable( 21 ); array.push( _programLayers.mask ); _programLayers.disableAll(); if ( parameters.fog ) _programLayers.enable( 0 ); if ( parameters.useFog ) _programLayers.enable( 1 ); if ( parameters.flatShading ) _programLayers.enable( 2 ); if ( parameters.logarithmicDepthBuffer ) _programLayers.enable( 3 ); if ( parameters.reverseDepthBuffer ) _programLayers.enable( 4 ); if ( parameters.skinning ) _programLayers.enable( 5 ); if ( parameters.morphTargets ) _programLayers.enable( 6 ); if ( parameters.morphNormals ) _programLayers.enable( 7 ); if ( parameters.morphColors ) _programLayers.enable( 8 ); if ( parameters.premultipliedAlpha ) _programLayers.enable( 9 ); if ( parameters.shadowMapEnabled ) _programLayers.enable( 10 ); if ( parameters.doubleSided ) _programLayers.enable( 11 ); if ( parameters.flipSided ) _programLayers.enable( 12 ); if ( parameters.useDepthPacking ) _programLayers.enable( 13 ); if ( parameters.dithering ) _programLayers.enable( 14 ); if ( parameters.transmission ) _programLayers.enable( 15 ); if ( parameters.sheen ) _programLayers.enable( 16 ); if ( parameters.opaque ) _programLayers.enable( 17 ); if ( parameters.pointsUvs ) _programLayers.enable( 18 ); if ( parameters.decodeVideoTexture ) _programLayers.enable( 19 ); if ( parameters.decodeVideoTextureEmissive ) _programLayers.enable( 20 ); if ( parameters.alphaToCoverage ) _programLayers.enable( 21 ); array.push( _programLayers.mask ); } function getUniforms( material ) { const shaderID = shaderIDs[ material.type ]; let uniforms; if ( shaderID ) { const shader = ShaderLib[ shaderID ]; uniforms = UniformsUtils.clone( shader.uniforms ); } else { uniforms = material.uniforms; } return uniforms; } function acquireProgram( parameters, cacheKey ) { let program; // Check if code has been already compiled for ( let p = 0, pl = programs.length; p < pl; p ++ ) { const preexistingProgram = programs[ p ]; if ( preexistingProgram.cacheKey === cacheKey ) { program = preexistingProgram; ++ program.usedTimes; break; } } if ( program === undefined ) { program = new WebGLProgram( renderer, cacheKey, parameters, bindingStates ); programs.push( program ); } return program; } function releaseProgram( program ) { if ( -- program.usedTimes === 0 ) { // Remove from unordered set const i = programs.indexOf( program ); programs[ i ] = programs[ programs.length - 1 ]; programs.pop(); // Free WebGL resources program.destroy(); } } function releaseShaderCache( material ) { _customShaders.remove( material ); } function dispose() { _customShaders.dispose(); } return { getParameters: getParameters, getProgramCacheKey: getProgramCacheKey, getUniforms: getUniforms, acquireProgram: acquireProgram, releaseProgram: releaseProgram, releaseShaderCache: releaseShaderCache, // Exposed for resource monitoring & error feedback via renderer.info: programs: programs, dispose: dispose }; } function WebGLProperties() { let properties = new WeakMap(); function has( object ) { return properties.has( object ); } function get( object ) { let map = properties.get( object ); if ( map === undefined ) { map = {}; properties.set( object, map ); } return map; } function remove( object ) { properties.delete( object ); } function update( object, key, value ) { properties.get( object )[ key ] = value; } function dispose() { properties = new WeakMap(); } return { has: has, get: get, remove: remove, update: update, dispose: dispose }; } function painterSortStable( a, b ) { if ( a.groupOrder !== b.groupOrder ) { return a.groupOrder - b.groupOrder; } else if ( a.renderOrder !== b.renderOrder ) { return a.renderOrder - b.renderOrder; } else if ( a.material.id !== b.material.id ) { return a.material.id - b.material.id; } else if ( a.z !== b.z ) { return a.z - b.z; } else { return a.id - b.id; } } function reversePainterSortStable( a, b ) { if ( a.groupOrder !== b.groupOrder ) { return a.groupOrder - b.groupOrder; } else if ( a.renderOrder !== b.renderOrder ) { return a.renderOrder - b.renderOrder; } else if ( a.z !== b.z ) { return b.z - a.z; } else { return a.id - b.id; } } function WebGLRenderList() { const renderItems = []; let renderItemsIndex = 0; const opaque = []; const transmissive = []; const transparent = []; function init() { renderItemsIndex = 0; opaque.length = 0; transmissive.length = 0; transparent.length = 0; } function getNextRenderItem( object, geometry, material, groupOrder, z, group ) { let renderItem = renderItems[ renderItemsIndex ]; if ( renderItem === undefined ) { renderItem = { id: object.id, object: object, geometry: geometry, material: material, groupOrder: groupOrder, renderOrder: object.renderOrder, z: z, group: group }; renderItems[ renderItemsIndex ] = renderItem; } else { renderItem.id = object.id; renderItem.object = object; renderItem.geometry = geometry; renderItem.material = material; renderItem.groupOrder = groupOrder; renderItem.renderOrder = object.renderOrder; renderItem.z = z; renderItem.group = group; } renderItemsIndex ++; return renderItem; } function push( object, geometry, material, groupOrder, z, group ) { const renderItem = getNextRenderItem( object, geometry, material, groupOrder, z, group ); if ( material.transmission > 0.0 ) { transmissive.push( renderItem ); } else if ( material.transparent === true ) { transparent.push( renderItem ); } else { opaque.push( renderItem ); } } function unshift( object, geometry, material, groupOrder, z, group ) { const renderItem = getNextRenderItem( object, geometry, material, groupOrder, z, group ); if ( material.transmission > 0.0 ) { transmissive.unshift( renderItem ); } else if ( material.transparent === true ) { transparent.unshift( renderItem ); } else { opaque.unshift( renderItem ); } } function sort( customOpaqueSort, customTransparentSort ) { if ( opaque.length > 1 ) opaque.sort( customOpaqueSort || painterSortStable ); if ( transmissive.length > 1 ) transmissive.sort( customTransparentSort || reversePainterSortStable ); if ( transparent.length > 1 ) transparent.sort( customTransparentSort || reversePainterSortStable ); } function finish() { // Clear references from inactive renderItems in the list for ( let i = renderItemsIndex, il = renderItems.length; i < il; i ++ ) { const renderItem = renderItems[ i ]; if ( renderItem.id === null ) break; renderItem.id = null; renderItem.object = null; renderItem.geometry = null; renderItem.material = null; renderItem.group = null; } } return { opaque: opaque, transmissive: transmissive, transparent: transparent, init: init, push: push, unshift: unshift, finish: finish, sort: sort }; } function WebGLRenderLists() { let lists = new WeakMap(); function get( scene, renderCallDepth ) { const listArray = lists.get( scene ); let list; if ( listArray === undefined ) { list = new WebGLRenderList(); lists.set( scene, [ list ] ); } else { if ( renderCallDepth >= listArray.length ) { list = new WebGLRenderList(); listArray.push( list ); } else { list = listArray[ renderCallDepth ]; } } return list; } function dispose() { lists = new WeakMap(); } return { get: get, dispose: dispose }; } function UniformsCache() { const lights = {}; return { get: function ( light ) { if ( lights[ light.id ] !== undefined ) { return lights[ light.id ]; } let uniforms; switch ( light.type ) { case 'DirectionalLight': uniforms = { direction: new Vector3(), color: new Color() }; break; case 'SpotLight': uniforms = { position: new Vector3(), direction: new Vector3(), color: new Color(), distance: 0, coneCos: 0, penumbraCos: 0, decay: 0 }; break; case 'PointLight': uniforms = { position: new Vector3(), color: new Color(), distance: 0, decay: 0 }; break; case 'HemisphereLight': uniforms = { direction: new Vector3(), skyColor: new Color(), groundColor: new Color() }; break; case 'RectAreaLight': uniforms = { color: new Color(), position: new Vector3(), halfWidth: new Vector3(), halfHeight: new Vector3() }; break; } lights[ light.id ] = uniforms; return uniforms; } }; } function ShadowUniformsCache() { const lights = {}; return { get: function ( light ) { if ( lights[ light.id ] !== undefined ) { return lights[ light.id ]; } let uniforms; switch ( light.type ) { case 'DirectionalLight': uniforms = { shadowIntensity: 1, shadowBias: 0, shadowNormalBias: 0, shadowRadius: 1, shadowMapSize: new Vector2() }; break; case 'SpotLight': uniforms = { shadowIntensity: 1, shadowBias: 0, shadowNormalBias: 0, shadowRadius: 1, shadowMapSize: new Vector2() }; break; case 'PointLight': uniforms = { shadowIntensity: 1, shadowBias: 0, shadowNormalBias: 0, shadowRadius: 1, shadowMapSize: new Vector2(), shadowCameraNear: 1, shadowCameraFar: 1000 }; break; // TODO (abelnation): set RectAreaLight shadow uniforms } lights[ light.id ] = uniforms; return uniforms; } }; } let nextVersion = 0; function shadowCastingAndTexturingLightsFirst( lightA, lightB ) { return ( lightB.castShadow ? 2 : 0 ) - ( lightA.castShadow ? 2 : 0 ) + ( lightB.map ? 1 : 0 ) - ( lightA.map ? 1 : 0 ); } function WebGLLights( extensions ) { const cache = new UniformsCache(); const shadowCache = ShadowUniformsCache(); const state = { version: 0, hash: { directionalLength: -1, pointLength: -1, spotLength: -1, rectAreaLength: -1, hemiLength: -1, numDirectionalShadows: -1, numPointShadows: -1, numSpotShadows: -1, numSpotMaps: -1, numLightProbes: -1 }, ambient: [ 0, 0, 0 ], probe: [], directional: [], directionalShadow: [], directionalShadowMap: [], directionalShadowMatrix: [], spot: [], spotLightMap: [], spotShadow: [], spotShadowMap: [], spotLightMatrix: [], rectArea: [], rectAreaLTC1: null, rectAreaLTC2: null, point: [], pointShadow: [], pointShadowMap: [], pointShadowMatrix: [], hemi: [], numSpotLightShadowsWithMaps: 0, numLightProbes: 0 }; for ( let i = 0; i < 9; i ++ ) state.probe.push( new Vector3() ); const vector3 = new Vector3(); const matrix4 = new Matrix4(); const matrix42 = new Matrix4(); function setup( lights ) { let r = 0, g = 0, b = 0; for ( let i = 0; i < 9; i ++ ) state.probe[ i ].set( 0, 0, 0 ); let directionalLength = 0; let pointLength = 0; let spotLength = 0; let rectAreaLength = 0; let hemiLength = 0; let numDirectionalShadows = 0; let numPointShadows = 0; let numSpotShadows = 0; let numSpotMaps = 0; let numSpotShadowsWithMaps = 0; let numLightProbes = 0; // ordering : [shadow casting + map texturing, map texturing, shadow casting, none ] lights.sort( shadowCastingAndTexturingLightsFirst ); for ( let i = 0, l = lights.length; i < l; i ++ ) { const light = lights[ i ]; const color = light.color; const intensity = light.intensity; const distance = light.distance; const shadowMap = ( light.shadow && light.shadow.map ) ? light.shadow.map.texture : null; if ( light.isAmbientLight ) { r += color.r * intensity; g += color.g * intensity; b += color.b * intensity; } else if ( light.isLightProbe ) { for ( let j = 0; j < 9; j ++ ) { state.probe[ j ].addScaledVector( light.sh.coefficients[ j ], intensity ); } numLightProbes ++; } else if ( light.isDirectionalLight ) { const uniforms = cache.get( light ); uniforms.color.copy( light.color ).multiplyScalar( light.intensity ); if ( light.castShadow ) { const shadow = light.shadow; const shadowUniforms = shadowCache.get( light ); shadowUniforms.shadowIntensity = shadow.intensity; shadowUniforms.shadowBias = shadow.bias; shadowUniforms.shadowNormalBias = shadow.normalBias; shadowUniforms.shadowRadius = shadow.radius; shadowUniforms.shadowMapSize = shadow.mapSize; state.directionalShadow[ directionalLength ] = shadowUniforms; state.directionalShadowMap[ directionalLength ] = shadowMap; state.directionalShadowMatrix[ directionalLength ] = light.shadow.matrix; numDirectionalShadows ++; } state.directional[ directionalLength ] = uniforms; directionalLength ++; } else if ( light.isSpotLight ) { const uniforms = cache.get( light ); uniforms.position.setFromMatrixPosition( light.matrixWorld ); uniforms.color.copy( color ).multiplyScalar( intensity ); uniforms.distance = distance; uniforms.coneCos = Math.cos( light.angle ); uniforms.penumbraCos = Math.cos( light.angle * ( 1 - light.penumbra ) ); uniforms.decay = light.decay; state.spot[ spotLength ] = uniforms; const shadow = light.shadow; if ( light.map ) { state.spotLightMap[ numSpotMaps ] = light.map; numSpotMaps ++; // make sure the lightMatrix is up to date // TODO : do it if required only shadow.updateMatrices( light ); if ( light.castShadow ) numSpotShadowsWithMaps ++; } state.spotLightMatrix[ spotLength ] = shadow.matrix; if ( light.castShadow ) { const shadowUniforms = shadowCache.get( light ); shadowUniforms.shadowIntensity = shadow.intensity; shadowUniforms.shadowBias = shadow.bias; shadowUniforms.shadowNormalBias = shadow.normalBias; shadowUniforms.shadowRadius = shadow.radius; shadowUniforms.shadowMapSize = shadow.mapSize; state.spotShadow[ spotLength ] = shadowUniforms; state.spotShadowMap[ spotLength ] = shadowMap; numSpotShadows ++; } spotLength ++; } else if ( light.isRectAreaLight ) { const uniforms = cache.get( light ); uniforms.color.copy( color ).multiplyScalar( intensity ); uniforms.halfWidth.set( light.width * 0.5, 0.0, 0.0 ); uniforms.halfHeight.set( 0.0, light.height * 0.5, 0.0 ); state.rectArea[ rectAreaLength ] = uniforms; rectAreaLength ++; } else if ( light.isPointLight ) { const uniforms = cache.get( light ); uniforms.color.copy( light.color ).multiplyScalar( light.intensity ); uniforms.distance = light.distance; uniforms.decay = light.decay; if ( light.castShadow ) { const shadow = light.shadow; const shadowUniforms = shadowCache.get( light ); shadowUniforms.shadowIntensity = shadow.intensity; shadowUniforms.shadowBias = shadow.bias; shadowUniforms.shadowNormalBias = shadow.normalBias; shadowUniforms.shadowRadius = shadow.radius; shadowUniforms.shadowMapSize = shadow.mapSize; shadowUniforms.shadowCameraNear = shadow.camera.near; shadowUniforms.shadowCameraFar = shadow.camera.far; state.pointShadow[ pointLength ] = shadowUniforms; state.pointShadowMap[ pointLength ] = shadowMap; state.pointShadowMatrix[ pointLength ] = light.shadow.matrix; numPointShadows ++; } state.point[ pointLength ] = uniforms; pointLength ++; } else if ( light.isHemisphereLight ) { const uniforms = cache.get( light ); uniforms.skyColor.copy( light.color ).multiplyScalar( intensity ); uniforms.groundColor.copy( light.groundColor ).multiplyScalar( intensity ); state.hemi[ hemiLength ] = uniforms; hemiLength ++; } } if ( rectAreaLength > 0 ) { if ( extensions.has( 'OES_texture_float_linear' ) === true ) { state.rectAreaLTC1 = UniformsLib.LTC_FLOAT_1; state.rectAreaLTC2 = UniformsLib.LTC_FLOAT_2; } else { state.rectAreaLTC1 = UniformsLib.LTC_HALF_1; state.rectAreaLTC2 = UniformsLib.LTC_HALF_2; } } state.ambient[ 0 ] = r; state.ambient[ 1 ] = g; state.ambient[ 2 ] = b; const hash = state.hash; if ( hash.directionalLength !== directionalLength || hash.pointLength !== pointLength || hash.spotLength !== spotLength || hash.rectAreaLength !== rectAreaLength || hash.hemiLength !== hemiLength || hash.numDirectionalShadows !== numDirectionalShadows || hash.numPointShadows !== numPointShadows || hash.numSpotShadows !== numSpotShadows || hash.numSpotMaps !== numSpotMaps || hash.numLightProbes !== numLightProbes ) { state.directional.length = directionalLength; state.spot.length = spotLength; state.rectArea.length = rectAreaLength; state.point.length = pointLength; state.hemi.length = hemiLength; state.directionalShadow.length = numDirectionalShadows; state.directionalShadowMap.length = numDirectionalShadows; state.pointShadow.length = numPointShadows; state.pointShadowMap.length = numPointShadows; state.spotShadow.length = numSpotShadows; state.spotShadowMap.length = numSpotShadows; state.directionalShadowMatrix.length = numDirectionalShadows; state.pointShadowMatrix.length = numPointShadows; state.spotLightMatrix.length = numSpotShadows + numSpotMaps - numSpotShadowsWithMaps; state.spotLightMap.length = numSpotMaps; state.numSpotLightShadowsWithMaps = numSpotShadowsWithMaps; state.numLightProbes = numLightProbes; hash.directionalLength = directionalLength; hash.pointLength = pointLength; hash.spotLength = spotLength; hash.rectAreaLength = rectAreaLength; hash.hemiLength = hemiLength; hash.numDirectionalShadows = numDirectionalShadows; hash.numPointShadows = numPointShadows; hash.numSpotShadows = numSpotShadows; hash.numSpotMaps = numSpotMaps; hash.numLightProbes = numLightProbes; state.version = nextVersion ++; } } function setupView( lights, camera ) { let directionalLength = 0; let pointLength = 0; let spotLength = 0; let rectAreaLength = 0; let hemiLength = 0; const viewMatrix = camera.matrixWorldInverse; for ( let i = 0, l = lights.length; i < l; i ++ ) { const light = lights[ i ]; if ( light.isDirectionalLight ) { const uniforms = state.directional[ directionalLength ]; uniforms.direction.setFromMatrixPosition( light.matrixWorld ); vector3.setFromMatrixPosition( light.target.matrixWorld ); uniforms.direction.sub( vector3 ); uniforms.direction.transformDirection( viewMatrix ); directionalLength ++; } else if ( light.isSpotLight ) { const uniforms = state.spot[ spotLength ]; uniforms.position.setFromMatrixPosition( light.matrixWorld ); uniforms.position.applyMatrix4( viewMatrix ); uniforms.direction.setFromMatrixPosition( light.matrixWorld ); vector3.setFromMatrixPosition( light.target.matrixWorld ); uniforms.direction.sub( vector3 ); uniforms.direction.transformDirection( viewMatrix ); spotLength ++; } else if ( light.isRectAreaLight ) { const uniforms = state.rectArea[ rectAreaLength ]; uniforms.position.setFromMatrixPosition( light.matrixWorld ); uniforms.position.applyMatrix4( viewMatrix ); // extract local rotation of light to derive width/height half vectors matrix42.identity(); matrix4.copy( light.matrixWorld ); matrix4.premultiply( viewMatrix ); matrix42.extractRotation( matrix4 ); uniforms.halfWidth.set( light.width * 0.5, 0.0, 0.0 ); uniforms.halfHeight.set( 0.0, light.height * 0.5, 0.0 ); uniforms.halfWidth.applyMatrix4( matrix42 ); uniforms.halfHeight.applyMatrix4( matrix42 ); rectAreaLength ++; } else if ( light.isPointLight ) { const uniforms = state.point[ pointLength ]; uniforms.position.setFromMatrixPosition( light.matrixWorld ); uniforms.position.applyMatrix4( viewMatrix ); pointLength ++; } else if ( light.isHemisphereLight ) { const uniforms = state.hemi[ hemiLength ]; uniforms.direction.setFromMatrixPosition( light.matrixWorld ); uniforms.direction.transformDirection( viewMatrix ); hemiLength ++; } } } return { setup: setup, setupView: setupView, state: state }; } function WebGLRenderState( extensions ) { const lights = new WebGLLights( extensions ); const lightsArray = []; const shadowsArray = []; function init( camera ) { state.camera = camera; lightsArray.length = 0; shadowsArray.length = 0; } function pushLight( light ) { lightsArray.push( light ); } function pushShadow( shadowLight ) { shadowsArray.push( shadowLight ); } function setupLights() { lights.setup( lightsArray ); } function setupLightsView( camera ) { lights.setupView( lightsArray, camera ); } const state = { lightsArray: lightsArray, shadowsArray: shadowsArray, camera: null, lights: lights, transmissionRenderTarget: {} }; return { init: init, state: state, setupLights: setupLights, setupLightsView: setupLightsView, pushLight: pushLight, pushShadow: pushShadow }; } function WebGLRenderStates( extensions ) { let renderStates = new WeakMap(); function get( scene, renderCallDepth = 0 ) { const renderStateArray = renderStates.get( scene ); let renderState; if ( renderStateArray === undefined ) { renderState = new WebGLRenderState( extensions ); renderStates.set( scene, [ renderState ] ); } else { if ( renderCallDepth >= renderStateArray.length ) { renderState = new WebGLRenderState( extensions ); renderStateArray.push( renderState ); } else { renderState = renderStateArray[ renderCallDepth ]; } } return renderState; } function dispose() { renderStates = new WeakMap(); } return { get: get, dispose: dispose }; } const vertex = "void main() {\n\tgl_Position = vec4( position, 1.0 );\n}"; const fragment = "uniform sampler2D shadow_pass;\nuniform vec2 resolution;\nuniform float radius;\n#include \nvoid main() {\n\tconst float samples = float( VSM_SAMPLES );\n\tfloat mean = 0.0;\n\tfloat squared_mean = 0.0;\n\tfloat uvStride = samples <= 1.0 ? 0.0 : 2.0 / ( samples - 1.0 );\n\tfloat uvStart = samples <= 1.0 ? 0.0 : - 1.0;\n\tfor ( float i = 0.0; i < samples; i ++ ) {\n\t\tfloat uvOffset = uvStart + i * uvStride;\n\t\t#ifdef HORIZONTAL_PASS\n\t\t\tvec2 distribution = unpackRGBATo2Half( texture2D( shadow_pass, ( gl_FragCoord.xy + vec2( uvOffset, 0.0 ) * radius ) / resolution ) );\n\t\t\tmean += distribution.x;\n\t\t\tsquared_mean += distribution.y * distribution.y + distribution.x * distribution.x;\n\t\t#else\n\t\t\tfloat depth = unpackRGBAToDepth( texture2D( shadow_pass, ( gl_FragCoord.xy + vec2( 0.0, uvOffset ) * radius ) / resolution ) );\n\t\t\tmean += depth;\n\t\t\tsquared_mean += depth * depth;\n\t\t#endif\n\t}\n\tmean = mean / samples;\n\tsquared_mean = squared_mean / samples;\n\tfloat std_dev = sqrt( squared_mean - mean * mean );\n\tgl_FragColor = pack2HalfToRGBA( vec2( mean, std_dev ) );\n}"; function WebGLShadowMap( renderer, objects, capabilities ) { let _frustum = new Frustum(); const _shadowMapSize = new Vector2(), _viewportSize = new Vector2(), _viewport = new Vector4(), _depthMaterial = new MeshDepthMaterial( { depthPacking: RGBADepthPacking } ), _distanceMaterial = new MeshDistanceMaterial(), _materialCache = {}, _maxTextureSize = capabilities.maxTextureSize; const shadowSide = { [ FrontSide ]: BackSide, [ BackSide ]: FrontSide, [ DoubleSide ]: DoubleSide }; const shadowMaterialVertical = new ShaderMaterial( { defines: { VSM_SAMPLES: 8 }, uniforms: { shadow_pass: { value: null }, resolution: { value: new Vector2() }, radius: { value: 4.0 } }, vertexShader: vertex, fragmentShader: fragment } ); const shadowMaterialHorizontal = shadowMaterialVertical.clone(); shadowMaterialHorizontal.defines.HORIZONTAL_PASS = 1; const fullScreenTri = new BufferGeometry(); fullScreenTri.setAttribute( 'position', new BufferAttribute( new Float32Array( [ -1, -1, 0.5, 3, -1, 0.5, -1, 3, 0.5 ] ), 3 ) ); const fullScreenMesh = new Mesh( fullScreenTri, shadowMaterialVertical ); const scope = this; this.enabled = false; this.autoUpdate = true; this.needsUpdate = false; this.type = PCFShadowMap; let _previousType = this.type; this.render = function ( lights, scene, camera ) { if ( scope.enabled === false ) return; if ( scope.autoUpdate === false && scope.needsUpdate === false ) return; if ( lights.length === 0 ) return; const currentRenderTarget = renderer.getRenderTarget(); const activeCubeFace = renderer.getActiveCubeFace(); const activeMipmapLevel = renderer.getActiveMipmapLevel(); const _state = renderer.state; // Set GL state for depth map. _state.setBlending( NoBlending ); _state.buffers.color.setClear( 1, 1, 1, 1 ); _state.buffers.depth.setTest( true ); _state.setScissorTest( false ); // check for shadow map type changes const toVSM = ( _previousType !== VSMShadowMap && this.type === VSMShadowMap ); const fromVSM = ( _previousType === VSMShadowMap && this.type !== VSMShadowMap ); // render depth map for ( let i = 0, il = lights.length; i < il; i ++ ) { const light = lights[ i ]; const shadow = light.shadow; if ( shadow === undefined ) { console.warn( 'THREE.WebGLShadowMap:', light, 'has no shadow.' ); continue; } if ( shadow.autoUpdate === false && shadow.needsUpdate === false ) continue; _shadowMapSize.copy( shadow.mapSize ); const shadowFrameExtents = shadow.getFrameExtents(); _shadowMapSize.multiply( shadowFrameExtents ); _viewportSize.copy( shadow.mapSize ); if ( _shadowMapSize.x > _maxTextureSize || _shadowMapSize.y > _maxTextureSize ) { if ( _shadowMapSize.x > _maxTextureSize ) { _viewportSize.x = Math.floor( _maxTextureSize / shadowFrameExtents.x ); _shadowMapSize.x = _viewportSize.x * shadowFrameExtents.x; shadow.mapSize.x = _viewportSize.x; } if ( _shadowMapSize.y > _maxTextureSize ) { _viewportSize.y = Math.floor( _maxTextureSize / shadowFrameExtents.y ); _shadowMapSize.y = _viewportSize.y * shadowFrameExtents.y; shadow.mapSize.y = _viewportSize.y; } } if ( shadow.map === null || toVSM === true || fromVSM === true ) { const pars = ( this.type !== VSMShadowMap ) ? { minFilter: NearestFilter, magFilter: NearestFilter } : {}; if ( shadow.map !== null ) { shadow.map.dispose(); } shadow.map = new WebGLRenderTarget( _shadowMapSize.x, _shadowMapSize.y, pars ); shadow.map.texture.name = light.name + '.shadowMap'; shadow.camera.updateProjectionMatrix(); } renderer.setRenderTarget( shadow.map ); renderer.clear(); const viewportCount = shadow.getViewportCount(); for ( let vp = 0; vp < viewportCount; vp ++ ) { const viewport = shadow.getViewport( vp ); _viewport.set( _viewportSize.x * viewport.x, _viewportSize.y * viewport.y, _viewportSize.x * viewport.z, _viewportSize.y * viewport.w ); _state.viewport( _viewport ); shadow.updateMatrices( light, vp ); _frustum = shadow.getFrustum(); renderObject( scene, camera, shadow.camera, light, this.type ); } // do blur pass for VSM if ( shadow.isPointLightShadow !== true && this.type === VSMShadowMap ) { VSMPass( shadow, camera ); } shadow.needsUpdate = false; } _previousType = this.type; scope.needsUpdate = false; renderer.setRenderTarget( currentRenderTarget, activeCubeFace, activeMipmapLevel ); }; function VSMPass( shadow, camera ) { const geometry = objects.update( fullScreenMesh ); if ( shadowMaterialVertical.defines.VSM_SAMPLES !== shadow.blurSamples ) { shadowMaterialVertical.defines.VSM_SAMPLES = shadow.blurSamples; shadowMaterialHorizontal.defines.VSM_SAMPLES = shadow.blurSamples; shadowMaterialVertical.needsUpdate = true; shadowMaterialHorizontal.needsUpdate = true; } if ( shadow.mapPass === null ) { shadow.mapPass = new WebGLRenderTarget( _shadowMapSize.x, _shadowMapSize.y ); } // vertical pass shadowMaterialVertical.uniforms.shadow_pass.value = shadow.map.texture; shadowMaterialVertical.uniforms.resolution.value = shadow.mapSize; shadowMaterialVertical.uniforms.radius.value = shadow.radius; renderer.setRenderTarget( shadow.mapPass ); renderer.clear(); renderer.renderBufferDirect( camera, null, geometry, shadowMaterialVertical, fullScreenMesh, null ); // horizontal pass shadowMaterialHorizontal.uniforms.shadow_pass.value = shadow.mapPass.texture; shadowMaterialHorizontal.uniforms.resolution.value = shadow.mapSize; shadowMaterialHorizontal.uniforms.radius.value = shadow.radius; renderer.setRenderTarget( shadow.map ); renderer.clear(); renderer.renderBufferDirect( camera, null, geometry, shadowMaterialHorizontal, fullScreenMesh, null ); } function getDepthMaterial( object, material, light, type ) { let result = null; const customMaterial = ( light.isPointLight === true ) ? object.customDistanceMaterial : object.customDepthMaterial; if ( customMaterial !== undefined ) { result = customMaterial; } else { result = ( light.isPointLight === true ) ? _distanceMaterial : _depthMaterial; if ( ( renderer.localClippingEnabled && material.clipShadows === true && Array.isArray( material.clippingPlanes ) && material.clippingPlanes.length !== 0 ) || ( material.displacementMap && material.displacementScale !== 0 ) || ( material.alphaMap && material.alphaTest > 0 ) || ( material.map && material.alphaTest > 0 ) ) { // in this case we need a unique material instance reflecting the // appropriate state const keyA = result.uuid, keyB = material.uuid; let materialsForVariant = _materialCache[ keyA ]; if ( materialsForVariant === undefined ) { materialsForVariant = {}; _materialCache[ keyA ] = materialsForVariant; } let cachedMaterial = materialsForVariant[ keyB ]; if ( cachedMaterial === undefined ) { cachedMaterial = result.clone(); materialsForVariant[ keyB ] = cachedMaterial; material.addEventListener( 'dispose', onMaterialDispose ); } result = cachedMaterial; } } result.visible = material.visible; result.wireframe = material.wireframe; if ( type === VSMShadowMap ) { result.side = ( material.shadowSide !== null ) ? material.shadowSide : material.side; } else { result.side = ( material.shadowSide !== null ) ? material.shadowSide : shadowSide[ material.side ]; } result.alphaMap = material.alphaMap; result.alphaTest = material.alphaTest; result.map = material.map; result.clipShadows = material.clipShadows; result.clippingPlanes = material.clippingPlanes; result.clipIntersection = material.clipIntersection; result.displacementMap = material.displacementMap; result.displacementScale = material.displacementScale; result.displacementBias = material.displacementBias; result.wireframeLinewidth = material.wireframeLinewidth; result.linewidth = material.linewidth; if ( light.isPointLight === true && result.isMeshDistanceMaterial === true ) { const materialProperties = renderer.properties.get( result ); materialProperties.light = light; } return result; } function renderObject( object, camera, shadowCamera, light, type ) { if ( object.visible === false ) return; const visible = object.layers.test( camera.layers ); if ( visible && ( object.isMesh || object.isLine || object.isPoints ) ) { if ( ( object.castShadow || ( object.receiveShadow && type === VSMShadowMap ) ) && ( ! object.frustumCulled || _frustum.intersectsObject( object ) ) ) { object.modelViewMatrix.multiplyMatrices( shadowCamera.matrixWorldInverse, object.matrixWorld ); const geometry = objects.update( object ); const material = object.material; if ( Array.isArray( material ) ) { const groups = geometry.groups; for ( let k = 0, kl = groups.length; k < kl; k ++ ) { const group = groups[ k ]; const groupMaterial = material[ group.materialIndex ]; if ( groupMaterial && groupMaterial.visible ) { const depthMaterial = getDepthMaterial( object, groupMaterial, light, type ); object.onBeforeShadow( renderer, object, camera, shadowCamera, geometry, depthMaterial, group ); renderer.renderBufferDirect( shadowCamera, null, geometry, depthMaterial, object, group ); object.onAfterShadow( renderer, object, camera, shadowCamera, geometry, depthMaterial, group ); } } } else if ( material.visible ) { const depthMaterial = getDepthMaterial( object, material, light, type ); object.onBeforeShadow( renderer, object, camera, shadowCamera, geometry, depthMaterial, null ); renderer.renderBufferDirect( shadowCamera, null, geometry, depthMaterial, object, null ); object.onAfterShadow( renderer, object, camera, shadowCamera, geometry, depthMaterial, null ); } } } const children = object.children; for ( let i = 0, l = children.length; i < l; i ++ ) { renderObject( children[ i ], camera, shadowCamera, light, type ); } } function onMaterialDispose( event ) { const material = event.target; material.removeEventListener( 'dispose', onMaterialDispose ); // make sure to remove the unique distance/depth materials used for shadow map rendering for ( const id in _materialCache ) { const cache = _materialCache[ id ]; const uuid = event.target.uuid; if ( uuid in cache ) { const shadowMaterial = cache[ uuid ]; shadowMaterial.dispose(); delete cache[ uuid ]; } } } } const reversedFuncs = { [ NeverDepth ]: AlwaysDepth, [ LessDepth ]: GreaterDepth, [ EqualDepth ]: NotEqualDepth, [ LessEqualDepth ]: GreaterEqualDepth, [ AlwaysDepth ]: NeverDepth, [ GreaterDepth ]: LessDepth, [ NotEqualDepth ]: EqualDepth, [ GreaterEqualDepth ]: LessEqualDepth, }; function WebGLState( gl, extensions ) { function ColorBuffer() { let locked = false; const color = new Vector4(); let currentColorMask = null; const currentColorClear = new Vector4( 0, 0, 0, 0 ); return { setMask: function ( colorMask ) { if ( currentColorMask !== colorMask && ! locked ) { gl.colorMask( colorMask, colorMask, colorMask, colorMask ); currentColorMask = colorMask; } }, setLocked: function ( lock ) { locked = lock; }, setClear: function ( r, g, b, a, premultipliedAlpha ) { if ( premultipliedAlpha === true ) { r *= a; g *= a; b *= a; } color.set( r, g, b, a ); if ( currentColorClear.equals( color ) === false ) { gl.clearColor( r, g, b, a ); currentColorClear.copy( color ); } }, reset: function () { locked = false; currentColorMask = null; currentColorClear.set( -1, 0, 0, 0 ); // set to invalid state } }; } function DepthBuffer() { let locked = false; let currentReversed = false; let currentDepthMask = null; let currentDepthFunc = null; let currentDepthClear = null; return { setReversed: function ( reversed ) { if ( currentReversed !== reversed ) { const ext = extensions.get( 'EXT_clip_control' ); if ( reversed ) { ext.clipControlEXT( ext.LOWER_LEFT_EXT, ext.ZERO_TO_ONE_EXT ); } else { ext.clipControlEXT( ext.LOWER_LEFT_EXT, ext.NEGATIVE_ONE_TO_ONE_EXT ); } currentReversed = reversed; const oldDepth = currentDepthClear; currentDepthClear = null; this.setClear( oldDepth ); } }, getReversed: function () { return currentReversed; }, setTest: function ( depthTest ) { if ( depthTest ) { enable( gl.DEPTH_TEST ); } else { disable( gl.DEPTH_TEST ); } }, setMask: function ( depthMask ) { if ( currentDepthMask !== depthMask && ! locked ) { gl.depthMask( depthMask ); currentDepthMask = depthMask; } }, setFunc: function ( depthFunc ) { if ( currentReversed ) depthFunc = reversedFuncs[ depthFunc ]; if ( currentDepthFunc !== depthFunc ) { switch ( depthFunc ) { case NeverDepth: gl.depthFunc( gl.NEVER ); break; case AlwaysDepth: gl.depthFunc( gl.ALWAYS ); break; case LessDepth: gl.depthFunc( gl.LESS ); break; case LessEqualDepth: gl.depthFunc( gl.LEQUAL ); break; case EqualDepth: gl.depthFunc( gl.EQUAL ); break; case GreaterEqualDepth: gl.depthFunc( gl.GEQUAL ); break; case GreaterDepth: gl.depthFunc( gl.GREATER ); break; case NotEqualDepth: gl.depthFunc( gl.NOTEQUAL ); break; default: gl.depthFunc( gl.LEQUAL ); } currentDepthFunc = depthFunc; } }, setLocked: function ( lock ) { locked = lock; }, setClear: function ( depth ) { if ( currentDepthClear !== depth ) { if ( currentReversed ) { depth = 1 - depth; } gl.clearDepth( depth ); currentDepthClear = depth; } }, reset: function () { locked = false; currentDepthMask = null; currentDepthFunc = null; currentDepthClear = null; currentReversed = false; } }; } function StencilBuffer() { let locked = false; let currentStencilMask = null; let currentStencilFunc = null; let currentStencilRef = null; let currentStencilFuncMask = null; let currentStencilFail = null; let currentStencilZFail = null; let currentStencilZPass = null; let currentStencilClear = null; return { setTest: function ( stencilTest ) { if ( ! locked ) { if ( stencilTest ) { enable( gl.STENCIL_TEST ); } else { disable( gl.STENCIL_TEST ); } } }, setMask: function ( stencilMask ) { if ( currentStencilMask !== stencilMask && ! locked ) { gl.stencilMask( stencilMask ); currentStencilMask = stencilMask; } }, setFunc: function ( stencilFunc, stencilRef, stencilMask ) { if ( currentStencilFunc !== stencilFunc || currentStencilRef !== stencilRef || currentStencilFuncMask !== stencilMask ) { gl.stencilFunc( stencilFunc, stencilRef, stencilMask ); currentStencilFunc = stencilFunc; currentStencilRef = stencilRef; currentStencilFuncMask = stencilMask; } }, setOp: function ( stencilFail, stencilZFail, stencilZPass ) { if ( currentStencilFail !== stencilFail || currentStencilZFail !== stencilZFail || currentStencilZPass !== stencilZPass ) { gl.stencilOp( stencilFail, stencilZFail, stencilZPass ); currentStencilFail = stencilFail; currentStencilZFail = stencilZFail; currentStencilZPass = stencilZPass; } }, setLocked: function ( lock ) { locked = lock; }, setClear: function ( stencil ) { if ( currentStencilClear !== stencil ) { gl.clearStencil( stencil ); currentStencilClear = stencil; } }, reset: function () { locked = false; currentStencilMask = null; currentStencilFunc = null; currentStencilRef = null; currentStencilFuncMask = null; currentStencilFail = null; currentStencilZFail = null; currentStencilZPass = null; currentStencilClear = null; } }; } // const colorBuffer = new ColorBuffer(); const depthBuffer = new DepthBuffer(); const stencilBuffer = new StencilBuffer(); const uboBindings = new WeakMap(); const uboProgramMap = new WeakMap(); let enabledCapabilities = {}; let currentBoundFramebuffers = {}; let currentDrawbuffers = new WeakMap(); let defaultDrawbuffers = []; let currentProgram = null; let currentBlendingEnabled = false; let currentBlending = null; let currentBlendEquation = null; let currentBlendSrc = null; let currentBlendDst = null; let currentBlendEquationAlpha = null; let currentBlendSrcAlpha = null; let currentBlendDstAlpha = null; let currentBlendColor = new Color( 0, 0, 0 ); let currentBlendAlpha = 0; let currentPremultipledAlpha = false; let currentFlipSided = null; let currentCullFace = null; let currentLineWidth = null; let currentPolygonOffsetFactor = null; let currentPolygonOffsetUnits = null; const maxTextures = gl.getParameter( gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS ); let lineWidthAvailable = false; let version = 0; const glVersion = gl.getParameter( gl.VERSION ); if ( glVersion.indexOf( 'WebGL' ) !== -1 ) { version = parseFloat( /^WebGL (\d)/.exec( glVersion )[ 1 ] ); lineWidthAvailable = ( version >= 1.0 ); } else if ( glVersion.indexOf( 'OpenGL ES' ) !== -1 ) { version = parseFloat( /^OpenGL ES (\d)/.exec( glVersion )[ 1 ] ); lineWidthAvailable = ( version >= 2.0 ); } let currentTextureSlot = null; let currentBoundTextures = {}; const scissorParam = gl.getParameter( gl.SCISSOR_BOX ); const viewportParam = gl.getParameter( gl.VIEWPORT ); const currentScissor = new Vector4().fromArray( scissorParam ); const currentViewport = new Vector4().fromArray( viewportParam ); function createTexture( type, target, count, dimensions ) { const data = new Uint8Array( 4 ); // 4 is required to match default unpack alignment of 4. const texture = gl.createTexture(); gl.bindTexture( type, texture ); gl.texParameteri( type, gl.TEXTURE_MIN_FILTER, gl.NEAREST ); gl.texParameteri( type, gl.TEXTURE_MAG_FILTER, gl.NEAREST ); for ( let i = 0; i < count; i ++ ) { if ( type === gl.TEXTURE_3D || type === gl.TEXTURE_2D_ARRAY ) { gl.texImage3D( target, 0, gl.RGBA, 1, 1, dimensions, 0, gl.RGBA, gl.UNSIGNED_BYTE, data ); } else { gl.texImage2D( target + i, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, data ); } } return texture; } const emptyTextures = {}; emptyTextures[ gl.TEXTURE_2D ] = createTexture( gl.TEXTURE_2D, gl.TEXTURE_2D, 1 ); emptyTextures[ gl.TEXTURE_CUBE_MAP ] = createTexture( gl.TEXTURE_CUBE_MAP, gl.TEXTURE_CUBE_MAP_POSITIVE_X, 6 ); emptyTextures[ gl.TEXTURE_2D_ARRAY ] = createTexture( gl.TEXTURE_2D_ARRAY, gl.TEXTURE_2D_ARRAY, 1, 1 ); emptyTextures[ gl.TEXTURE_3D ] = createTexture( gl.TEXTURE_3D, gl.TEXTURE_3D, 1, 1 ); // init colorBuffer.setClear( 0, 0, 0, 1 ); depthBuffer.setClear( 1 ); stencilBuffer.setClear( 0 ); enable( gl.DEPTH_TEST ); depthBuffer.setFunc( LessEqualDepth ); setFlipSided( false ); setCullFace( CullFaceBack ); enable( gl.CULL_FACE ); setBlending( NoBlending ); // function enable( id ) { if ( enabledCapabilities[ id ] !== true ) { gl.enable( id ); enabledCapabilities[ id ] = true; } } function disable( id ) { if ( enabledCapabilities[ id ] !== false ) { gl.disable( id ); enabledCapabilities[ id ] = false; } } function bindFramebuffer( target, framebuffer ) { if ( currentBoundFramebuffers[ target ] !== framebuffer ) { gl.bindFramebuffer( target, framebuffer ); currentBoundFramebuffers[ target ] = framebuffer; // gl.DRAW_FRAMEBUFFER is equivalent to gl.FRAMEBUFFER if ( target === gl.DRAW_FRAMEBUFFER ) { currentBoundFramebuffers[ gl.FRAMEBUFFER ] = framebuffer; } if ( target === gl.FRAMEBUFFER ) { currentBoundFramebuffers[ gl.DRAW_FRAMEBUFFER ] = framebuffer; } return true; } return false; } function drawBuffers( renderTarget, framebuffer ) { let drawBuffers = defaultDrawbuffers; let needsUpdate = false; if ( renderTarget ) { drawBuffers = currentDrawbuffers.get( framebuffer ); if ( drawBuffers === undefined ) { drawBuffers = []; currentDrawbuffers.set( framebuffer, drawBuffers ); } const textures = renderTarget.textures; if ( drawBuffers.length !== textures.length || drawBuffers[ 0 ] !== gl.COLOR_ATTACHMENT0 ) { for ( let i = 0, il = textures.length; i < il; i ++ ) { drawBuffers[ i ] = gl.COLOR_ATTACHMENT0 + i; } drawBuffers.length = textures.length; needsUpdate = true; } } else { if ( drawBuffers[ 0 ] !== gl.BACK ) { drawBuffers[ 0 ] = gl.BACK; needsUpdate = true; } } if ( needsUpdate ) { gl.drawBuffers( drawBuffers ); } } function useProgram( program ) { if ( currentProgram !== program ) { gl.useProgram( program ); currentProgram = program; return true; } return false; } const equationToGL = { [ AddEquation ]: gl.FUNC_ADD, [ SubtractEquation ]: gl.FUNC_SUBTRACT, [ ReverseSubtractEquation ]: gl.FUNC_REVERSE_SUBTRACT }; equationToGL[ MinEquation ] = gl.MIN; equationToGL[ MaxEquation ] = gl.MAX; const factorToGL = { [ ZeroFactor ]: gl.ZERO, [ OneFactor ]: gl.ONE, [ SrcColorFactor ]: gl.SRC_COLOR, [ SrcAlphaFactor ]: gl.SRC_ALPHA, [ SrcAlphaSaturateFactor ]: gl.SRC_ALPHA_SATURATE, [ DstColorFactor ]: gl.DST_COLOR, [ DstAlphaFactor ]: gl.DST_ALPHA, [ OneMinusSrcColorFactor ]: gl.ONE_MINUS_SRC_COLOR, [ OneMinusSrcAlphaFactor ]: gl.ONE_MINUS_SRC_ALPHA, [ OneMinusDstColorFactor ]: gl.ONE_MINUS_DST_COLOR, [ OneMinusDstAlphaFactor ]: gl.ONE_MINUS_DST_ALPHA, [ ConstantColorFactor ]: gl.CONSTANT_COLOR, [ OneMinusConstantColorFactor ]: gl.ONE_MINUS_CONSTANT_COLOR, [ ConstantAlphaFactor ]: gl.CONSTANT_ALPHA, [ OneMinusConstantAlphaFactor ]: gl.ONE_MINUS_CONSTANT_ALPHA }; function setBlending( blending, blendEquation, blendSrc, blendDst, blendEquationAlpha, blendSrcAlpha, blendDstAlpha, blendColor, blendAlpha, premultipliedAlpha ) { if ( blending === NoBlending ) { if ( currentBlendingEnabled === true ) { disable( gl.BLEND ); currentBlendingEnabled = false; } return; } if ( currentBlendingEnabled === false ) { enable( gl.BLEND ); currentBlendingEnabled = true; } if ( blending !== CustomBlending ) { if ( blending !== currentBlending || premultipliedAlpha !== currentPremultipledAlpha ) { if ( currentBlendEquation !== AddEquation || currentBlendEquationAlpha !== AddEquation ) { gl.blendEquation( gl.FUNC_ADD ); currentBlendEquation = AddEquation; currentBlendEquationAlpha = AddEquation; } if ( premultipliedAlpha ) { switch ( blending ) { case NormalBlending: gl.blendFuncSeparate( gl.ONE, gl.ONE_MINUS_SRC_ALPHA, gl.ONE, gl.ONE_MINUS_SRC_ALPHA ); break; case AdditiveBlending: gl.blendFunc( gl.ONE, gl.ONE ); break; case SubtractiveBlending: gl.blendFuncSeparate( gl.ZERO, gl.ONE_MINUS_SRC_COLOR, gl.ZERO, gl.ONE ); break; case MultiplyBlending: gl.blendFuncSeparate( gl.ZERO, gl.SRC_COLOR, gl.ZERO, gl.SRC_ALPHA ); break; default: console.error( 'THREE.WebGLState: Invalid blending: ', blending ); break; } } else { switch ( blending ) { case NormalBlending: gl.blendFuncSeparate( gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA, gl.ONE, gl.ONE_MINUS_SRC_ALPHA ); break; case AdditiveBlending: gl.blendFunc( gl.SRC_ALPHA, gl.ONE ); break; case SubtractiveBlending: gl.blendFuncSeparate( gl.ZERO, gl.ONE_MINUS_SRC_COLOR, gl.ZERO, gl.ONE ); break; case MultiplyBlending: gl.blendFunc( gl.ZERO, gl.SRC_COLOR ); break; default: console.error( 'THREE.WebGLState: Invalid blending: ', blending ); break; } } currentBlendSrc = null; currentBlendDst = null; currentBlendSrcAlpha = null; currentBlendDstAlpha = null; currentBlendColor.set( 0, 0, 0 ); currentBlendAlpha = 0; currentBlending = blending; currentPremultipledAlpha = premultipliedAlpha; } return; } // custom blending blendEquationAlpha = blendEquationAlpha || blendEquation; blendSrcAlpha = blendSrcAlpha || blendSrc; blendDstAlpha = blendDstAlpha || blendDst; if ( blendEquation !== currentBlendEquation || blendEquationAlpha !== currentBlendEquationAlpha ) { gl.blendEquationSeparate( equationToGL[ blendEquation ], equationToGL[ blendEquationAlpha ] ); currentBlendEquation = blendEquation; currentBlendEquationAlpha = blendEquationAlpha; } if ( blendSrc !== currentBlendSrc || blendDst !== currentBlendDst || blendSrcAlpha !== currentBlendSrcAlpha || blendDstAlpha !== currentBlendDstAlpha ) { gl.blendFuncSeparate( factorToGL[ blendSrc ], factorToGL[ blendDst ], factorToGL[ blendSrcAlpha ], factorToGL[ blendDstAlpha ] ); currentBlendSrc = blendSrc; currentBlendDst = blendDst; currentBlendSrcAlpha = blendSrcAlpha; currentBlendDstAlpha = blendDstAlpha; } if ( blendColor.equals( currentBlendColor ) === false || blendAlpha !== currentBlendAlpha ) { gl.blendColor( blendColor.r, blendColor.g, blendColor.b, blendAlpha ); currentBlendColor.copy( blendColor ); currentBlendAlpha = blendAlpha; } currentBlending = blending; currentPremultipledAlpha = false; } function setMaterial( material, frontFaceCW ) { material.side === DoubleSide ? disable( gl.CULL_FACE ) : enable( gl.CULL_FACE ); let flipSided = ( material.side === BackSide ); if ( frontFaceCW ) flipSided = ! flipSided; setFlipSided( flipSided ); ( material.blending === NormalBlending && material.transparent === false ) ? setBlending( NoBlending ) : setBlending( material.blending, material.blendEquation, material.blendSrc, material.blendDst, material.blendEquationAlpha, material.blendSrcAlpha, material.blendDstAlpha, material.blendColor, material.blendAlpha, material.premultipliedAlpha ); depthBuffer.setFunc( material.depthFunc ); depthBuffer.setTest( material.depthTest ); depthBuffer.setMask( material.depthWrite ); colorBuffer.setMask( material.colorWrite ); const stencilWrite = material.stencilWrite; stencilBuffer.setTest( stencilWrite ); if ( stencilWrite ) { stencilBuffer.setMask( material.stencilWriteMask ); stencilBuffer.setFunc( material.stencilFunc, material.stencilRef, material.stencilFuncMask ); stencilBuffer.setOp( material.stencilFail, material.stencilZFail, material.stencilZPass ); } setPolygonOffset( material.polygonOffset, material.polygonOffsetFactor, material.polygonOffsetUnits ); material.alphaToCoverage === true ? enable( gl.SAMPLE_ALPHA_TO_COVERAGE ) : disable( gl.SAMPLE_ALPHA_TO_COVERAGE ); } // function setFlipSided( flipSided ) { if ( currentFlipSided !== flipSided ) { if ( flipSided ) { gl.frontFace( gl.CW ); } else { gl.frontFace( gl.CCW ); } currentFlipSided = flipSided; } } function setCullFace( cullFace ) { if ( cullFace !== CullFaceNone ) { enable( gl.CULL_FACE ); if ( cullFace !== currentCullFace ) { if ( cullFace === CullFaceBack ) { gl.cullFace( gl.BACK ); } else if ( cullFace === CullFaceFront ) { gl.cullFace( gl.FRONT ); } else { gl.cullFace( gl.FRONT_AND_BACK ); } } } else { disable( gl.CULL_FACE ); } currentCullFace = cullFace; } function setLineWidth( width ) { if ( width !== currentLineWidth ) { if ( lineWidthAvailable ) gl.lineWidth( width ); currentLineWidth = width; } } function setPolygonOffset( polygonOffset, factor, units ) { if ( polygonOffset ) { enable( gl.POLYGON_OFFSET_FILL ); if ( currentPolygonOffsetFactor !== factor || currentPolygonOffsetUnits !== units ) { gl.polygonOffset( factor, units ); currentPolygonOffsetFactor = factor; currentPolygonOffsetUnits = units; } } else { disable( gl.POLYGON_OFFSET_FILL ); } } function setScissorTest( scissorTest ) { if ( scissorTest ) { enable( gl.SCISSOR_TEST ); } else { disable( gl.SCISSOR_TEST ); } } // texture function activeTexture( webglSlot ) { if ( webglSlot === undefined ) webglSlot = gl.TEXTURE0 + maxTextures - 1; if ( currentTextureSlot !== webglSlot ) { gl.activeTexture( webglSlot ); currentTextureSlot = webglSlot; } } function bindTexture( webglType, webglTexture, webglSlot ) { if ( webglSlot === undefined ) { if ( currentTextureSlot === null ) { webglSlot = gl.TEXTURE0 + maxTextures - 1; } else { webglSlot = currentTextureSlot; } } let boundTexture = currentBoundTextures[ webglSlot ]; if ( boundTexture === undefined ) { boundTexture = { type: undefined, texture: undefined }; currentBoundTextures[ webglSlot ] = boundTexture; } if ( boundTexture.type !== webglType || boundTexture.texture !== webglTexture ) { if ( currentTextureSlot !== webglSlot ) { gl.activeTexture( webglSlot ); currentTextureSlot = webglSlot; } gl.bindTexture( webglType, webglTexture || emptyTextures[ webglType ] ); boundTexture.type = webglType; boundTexture.texture = webglTexture; } } function unbindTexture() { const boundTexture = currentBoundTextures[ currentTextureSlot ]; if ( boundTexture !== undefined && boundTexture.type !== undefined ) { gl.bindTexture( boundTexture.type, null ); boundTexture.type = undefined; boundTexture.texture = undefined; } } function compressedTexImage2D() { try { gl.compressedTexImage2D( ...arguments ); } catch ( error ) { console.error( 'THREE.WebGLState:', error ); } } function compressedTexImage3D() { try { gl.compressedTexImage3D( ...arguments ); } catch ( error ) { console.error( 'THREE.WebGLState:', error ); } } function texSubImage2D() { try { gl.texSubImage2D( ...arguments ); } catch ( error ) { console.error( 'THREE.WebGLState:', error ); } } function texSubImage3D() { try { gl.texSubImage3D( ...arguments ); } catch ( error ) { console.error( 'THREE.WebGLState:', error ); } } function compressedTexSubImage2D() { try { gl.compressedTexSubImage2D( ...arguments ); } catch ( error ) { console.error( 'THREE.WebGLState:', error ); } } function compressedTexSubImage3D() { try { gl.compressedTexSubImage3D( ...arguments ); } catch ( error ) { console.error( 'THREE.WebGLState:', error ); } } function texStorage2D() { try { gl.texStorage2D( ...arguments ); } catch ( error ) { console.error( 'THREE.WebGLState:', error ); } } function texStorage3D() { try { gl.texStorage3D( ...arguments ); } catch ( error ) { console.error( 'THREE.WebGLState:', error ); } } function texImage2D() { try { gl.texImage2D( ...arguments ); } catch ( error ) { console.error( 'THREE.WebGLState:', error ); } } function texImage3D() { try { gl.texImage3D( ...arguments ); } catch ( error ) { console.error( 'THREE.WebGLState:', error ); } } // function scissor( scissor ) { if ( currentScissor.equals( scissor ) === false ) { gl.scissor( scissor.x, scissor.y, scissor.z, scissor.w ); currentScissor.copy( scissor ); } } function viewport( viewport ) { if ( currentViewport.equals( viewport ) === false ) { gl.viewport( viewport.x, viewport.y, viewport.z, viewport.w ); currentViewport.copy( viewport ); } } function updateUBOMapping( uniformsGroup, program ) { let mapping = uboProgramMap.get( program ); if ( mapping === undefined ) { mapping = new WeakMap(); uboProgramMap.set( program, mapping ); } let blockIndex = mapping.get( uniformsGroup ); if ( blockIndex === undefined ) { blockIndex = gl.getUniformBlockIndex( program, uniformsGroup.name ); mapping.set( uniformsGroup, blockIndex ); } } function uniformBlockBinding( uniformsGroup, program ) { const mapping = uboProgramMap.get( program ); const blockIndex = mapping.get( uniformsGroup ); if ( uboBindings.get( program ) !== blockIndex ) { // bind shader specific block index to global block point gl.uniformBlockBinding( program, blockIndex, uniformsGroup.__bindingPointIndex ); uboBindings.set( program, blockIndex ); } } // function reset() { // reset state gl.disable( gl.BLEND ); gl.disable( gl.CULL_FACE ); gl.disable( gl.DEPTH_TEST ); gl.disable( gl.POLYGON_OFFSET_FILL ); gl.disable( gl.SCISSOR_TEST ); gl.disable( gl.STENCIL_TEST ); gl.disable( gl.SAMPLE_ALPHA_TO_COVERAGE ); gl.blendEquation( gl.FUNC_ADD ); gl.blendFunc( gl.ONE, gl.ZERO ); gl.blendFuncSeparate( gl.ONE, gl.ZERO, gl.ONE, gl.ZERO ); gl.blendColor( 0, 0, 0, 0 ); gl.colorMask( true, true, true, true ); gl.clearColor( 0, 0, 0, 0 ); gl.depthMask( true ); gl.depthFunc( gl.LESS ); depthBuffer.setReversed( false ); gl.clearDepth( 1 ); gl.stencilMask( 0xffffffff ); gl.stencilFunc( gl.ALWAYS, 0, 0xffffffff ); gl.stencilOp( gl.KEEP, gl.KEEP, gl.KEEP ); gl.clearStencil( 0 ); gl.cullFace( gl.BACK ); gl.frontFace( gl.CCW ); gl.polygonOffset( 0, 0 ); gl.activeTexture( gl.TEXTURE0 ); gl.bindFramebuffer( gl.FRAMEBUFFER, null ); gl.bindFramebuffer( gl.DRAW_FRAMEBUFFER, null ); gl.bindFramebuffer( gl.READ_FRAMEBUFFER, null ); gl.useProgram( null ); gl.lineWidth( 1 ); gl.scissor( 0, 0, gl.canvas.width, gl.canvas.height ); gl.viewport( 0, 0, gl.canvas.width, gl.canvas.height ); // reset internals enabledCapabilities = {}; currentTextureSlot = null; currentBoundTextures = {}; currentBoundFramebuffers = {}; currentDrawbuffers = new WeakMap(); defaultDrawbuffers = []; currentProgram = null; currentBlendingEnabled = false; currentBlending = null; currentBlendEquation = null; currentBlendSrc = null; currentBlendDst = null; currentBlendEquationAlpha = null; currentBlendSrcAlpha = null; currentBlendDstAlpha = null; currentBlendColor = new Color( 0, 0, 0 ); currentBlendAlpha = 0; currentPremultipledAlpha = false; currentFlipSided = null; currentCullFace = null; currentLineWidth = null; currentPolygonOffsetFactor = null; currentPolygonOffsetUnits = null; currentScissor.set( 0, 0, gl.canvas.width, gl.canvas.height ); currentViewport.set( 0, 0, gl.canvas.width, gl.canvas.height ); colorBuffer.reset(); depthBuffer.reset(); stencilBuffer.reset(); } return { buffers: { color: colorBuffer, depth: depthBuffer, stencil: stencilBuffer }, enable: enable, disable: disable, bindFramebuffer: bindFramebuffer, drawBuffers: drawBuffers, useProgram: useProgram, setBlending: setBlending, setMaterial: setMaterial, setFlipSided: setFlipSided, setCullFace: setCullFace, setLineWidth: setLineWidth, setPolygonOffset: setPolygonOffset, setScissorTest: setScissorTest, activeTexture: activeTexture, bindTexture: bindTexture, unbindTexture: unbindTexture, compressedTexImage2D: compressedTexImage2D, compressedTexImage3D: compressedTexImage3D, texImage2D: texImage2D, texImage3D: texImage3D, updateUBOMapping: updateUBOMapping, uniformBlockBinding: uniformBlockBinding, texStorage2D: texStorage2D, texStorage3D: texStorage3D, texSubImage2D: texSubImage2D, texSubImage3D: texSubImage3D, compressedTexSubImage2D: compressedTexSubImage2D, compressedTexSubImage3D: compressedTexSubImage3D, scissor: scissor, viewport: viewport, reset: reset }; } function WebGLTextures( _gl, extensions, state, properties, capabilities, utils, info ) { const multisampledRTTExt = extensions.has( 'WEBGL_multisampled_render_to_texture' ) ? extensions.get( 'WEBGL_multisampled_render_to_texture' ) : null; const supportsInvalidateFramebuffer = typeof navigator === 'undefined' ? false : /OculusBrowser/g.test( navigator.userAgent ); const _imageDimensions = new Vector2(); const _videoTextures = new WeakMap(); let _canvas; const _sources = new WeakMap(); // maps WebglTexture objects to instances of Source // cordova iOS (as of 5.0) still uses UIWebView, which provides OffscreenCanvas, // also OffscreenCanvas.getContext("webgl"), but not OffscreenCanvas.getContext("2d")! // Some implementations may only implement OffscreenCanvas partially (e.g. lacking 2d). let useOffscreenCanvas = false; try { useOffscreenCanvas = typeof OffscreenCanvas !== 'undefined' // eslint-disable-next-line compat/compat && ( new OffscreenCanvas( 1, 1 ).getContext( '2d' ) ) !== null; } catch ( err ) { // Ignore any errors } function createCanvas( width, height ) { // Use OffscreenCanvas when available. Specially needed in web workers return useOffscreenCanvas ? // eslint-disable-next-line compat/compat new OffscreenCanvas( width, height ) : createElementNS( 'canvas' ); } function resizeImage( image, needsNewCanvas, maxSize ) { let scale = 1; const dimensions = getDimensions( image ); // handle case if texture exceeds max size if ( dimensions.width > maxSize || dimensions.height > maxSize ) { scale = maxSize / Math.max( dimensions.width, dimensions.height ); } // only perform resize if necessary if ( scale < 1 ) { // only perform resize for certain image types if ( ( typeof HTMLImageElement !== 'undefined' && image instanceof HTMLImageElement ) || ( typeof HTMLCanvasElement !== 'undefined' && image instanceof HTMLCanvasElement ) || ( typeof ImageBitmap !== 'undefined' && image instanceof ImageBitmap ) || ( typeof VideoFrame !== 'undefined' && image instanceof VideoFrame ) ) { const width = Math.floor( scale * dimensions.width ); const height = Math.floor( scale * dimensions.height ); if ( _canvas === undefined ) _canvas = createCanvas( width, height ); // cube textures can't reuse the same canvas const canvas = needsNewCanvas ? createCanvas( width, height ) : _canvas; canvas.width = width; canvas.height = height; const context = canvas.getContext( '2d' ); context.drawImage( image, 0, 0, width, height ); console.warn( 'THREE.WebGLRenderer: Texture has been resized from (' + dimensions.width + 'x' + dimensions.height + ') to (' + width + 'x' + height + ').' ); return canvas; } else { if ( 'data' in image ) { console.warn( 'THREE.WebGLRenderer: Image in DataTexture is too big (' + dimensions.width + 'x' + dimensions.height + ').' ); } return image; } } return image; } function textureNeedsGenerateMipmaps( texture ) { return texture.generateMipmaps; } function generateMipmap( target ) { _gl.generateMipmap( target ); } function getTargetType( texture ) { if ( texture.isWebGLCubeRenderTarget ) return _gl.TEXTURE_CUBE_MAP; if ( texture.isWebGL3DRenderTarget ) return _gl.TEXTURE_3D; if ( texture.isWebGLArrayRenderTarget || texture.isCompressedArrayTexture ) return _gl.TEXTURE_2D_ARRAY; return _gl.TEXTURE_2D; } function getInternalFormat( internalFormatName, glFormat, glType, colorSpace, forceLinearTransfer = false ) { if ( internalFormatName !== null ) { if ( _gl[ internalFormatName ] !== undefined ) return _gl[ internalFormatName ]; console.warn( 'THREE.WebGLRenderer: Attempt to use non-existing WebGL internal format \'' + internalFormatName + '\'' ); } let internalFormat = glFormat; if ( glFormat === _gl.RED ) { if ( glType === _gl.FLOAT ) internalFormat = _gl.R32F; if ( glType === _gl.HALF_FLOAT ) internalFormat = _gl.R16F; if ( glType === _gl.UNSIGNED_BYTE ) internalFormat = _gl.R8; } if ( glFormat === _gl.RED_INTEGER ) { if ( glType === _gl.UNSIGNED_BYTE ) internalFormat = _gl.R8UI; if ( glType === _gl.UNSIGNED_SHORT ) internalFormat = _gl.R16UI; if ( glType === _gl.UNSIGNED_INT ) internalFormat = _gl.R32UI; if ( glType === _gl.BYTE ) internalFormat = _gl.R8I; if ( glType === _gl.SHORT ) internalFormat = _gl.R16I; if ( glType === _gl.INT ) internalFormat = _gl.R32I; } if ( glFormat === _gl.RG ) { if ( glType === _gl.FLOAT ) internalFormat = _gl.RG32F; if ( glType === _gl.HALF_FLOAT ) internalFormat = _gl.RG16F; if ( glType === _gl.UNSIGNED_BYTE ) internalFormat = _gl.RG8; } if ( glFormat === _gl.RG_INTEGER ) { if ( glType === _gl.UNSIGNED_BYTE ) internalFormat = _gl.RG8UI; if ( glType === _gl.UNSIGNED_SHORT ) internalFormat = _gl.RG16UI; if ( glType === _gl.UNSIGNED_INT ) internalFormat = _gl.RG32UI; if ( glType === _gl.BYTE ) internalFormat = _gl.RG8I; if ( glType === _gl.SHORT ) internalFormat = _gl.RG16I; if ( glType === _gl.INT ) internalFormat = _gl.RG32I; } if ( glFormat === _gl.RGB_INTEGER ) { if ( glType === _gl.UNSIGNED_BYTE ) internalFormat = _gl.RGB8UI; if ( glType === _gl.UNSIGNED_SHORT ) internalFormat = _gl.RGB16UI; if ( glType === _gl.UNSIGNED_INT ) internalFormat = _gl.RGB32UI; if ( glType === _gl.BYTE ) internalFormat = _gl.RGB8I; if ( glType === _gl.SHORT ) internalFormat = _gl.RGB16I; if ( glType === _gl.INT ) internalFormat = _gl.RGB32I; } if ( glFormat === _gl.RGBA_INTEGER ) { if ( glType === _gl.UNSIGNED_BYTE ) internalFormat = _gl.RGBA8UI; if ( glType === _gl.UNSIGNED_SHORT ) internalFormat = _gl.RGBA16UI; if ( glType === _gl.UNSIGNED_INT ) internalFormat = _gl.RGBA32UI; if ( glType === _gl.BYTE ) internalFormat = _gl.RGBA8I; if ( glType === _gl.SHORT ) internalFormat = _gl.RGBA16I; if ( glType === _gl.INT ) internalFormat = _gl.RGBA32I; } if ( glFormat === _gl.RGB ) { if ( glType === _gl.UNSIGNED_INT_5_9_9_9_REV ) internalFormat = _gl.RGB9_E5; } if ( glFormat === _gl.RGBA ) { const transfer = forceLinearTransfer ? LinearTransfer : ColorManagement.getTransfer( colorSpace ); if ( glType === _gl.FLOAT ) internalFormat = _gl.RGBA32F; if ( glType === _gl.HALF_FLOAT ) internalFormat = _gl.RGBA16F; if ( glType === _gl.UNSIGNED_BYTE ) internalFormat = ( transfer === SRGBTransfer ) ? _gl.SRGB8_ALPHA8 : _gl.RGBA8; if ( glType === _gl.UNSIGNED_SHORT_4_4_4_4 ) internalFormat = _gl.RGBA4; if ( glType === _gl.UNSIGNED_SHORT_5_5_5_1 ) internalFormat = _gl.RGB5_A1; } if ( internalFormat === _gl.R16F || internalFormat === _gl.R32F || internalFormat === _gl.RG16F || internalFormat === _gl.RG32F || internalFormat === _gl.RGBA16F || internalFormat === _gl.RGBA32F ) { extensions.get( 'EXT_color_buffer_float' ); } return internalFormat; } function getInternalDepthFormat( useStencil, depthType ) { let glInternalFormat; if ( useStencil ) { if ( depthType === null || depthType === UnsignedIntType || depthType === UnsignedInt248Type ) { glInternalFormat = _gl.DEPTH24_STENCIL8; } else if ( depthType === FloatType ) { glInternalFormat = _gl.DEPTH32F_STENCIL8; } else if ( depthType === UnsignedShortType ) { glInternalFormat = _gl.DEPTH24_STENCIL8; console.warn( 'DepthTexture: 16 bit depth attachment is not supported with stencil. Using 24-bit attachment.' ); } } else { if ( depthType === null || depthType === UnsignedIntType || depthType === UnsignedInt248Type ) { glInternalFormat = _gl.DEPTH_COMPONENT24; } else if ( depthType === FloatType ) { glInternalFormat = _gl.DEPTH_COMPONENT32F; } else if ( depthType === UnsignedShortType ) { glInternalFormat = _gl.DEPTH_COMPONENT16; } } return glInternalFormat; } function getMipLevels( texture, image ) { if ( textureNeedsGenerateMipmaps( texture ) === true || ( texture.isFramebufferTexture && texture.minFilter !== NearestFilter && texture.minFilter !== LinearFilter ) ) { return Math.log2( Math.max( image.width, image.height ) ) + 1; } else if ( texture.mipmaps !== undefined && texture.mipmaps.length > 0 ) { // user-defined mipmaps return texture.mipmaps.length; } else if ( texture.isCompressedTexture && Array.isArray( texture.image ) ) { return image.mipmaps.length; } else { // texture without mipmaps (only base level) return 1; } } // function onTextureDispose( event ) { const texture = event.target; texture.removeEventListener( 'dispose', onTextureDispose ); deallocateTexture( texture ); if ( texture.isVideoTexture ) { _videoTextures.delete( texture ); } } function onRenderTargetDispose( event ) { const renderTarget = event.target; renderTarget.removeEventListener( 'dispose', onRenderTargetDispose ); deallocateRenderTarget( renderTarget ); } // function deallocateTexture( texture ) { const textureProperties = properties.get( texture ); if ( textureProperties.__webglInit === undefined ) return; // check if it's necessary to remove the WebGLTexture object const source = texture.source; const webglTextures = _sources.get( source ); if ( webglTextures ) { const webglTexture = webglTextures[ textureProperties.__cacheKey ]; webglTexture.usedTimes --; // the WebGLTexture object is not used anymore, remove it if ( webglTexture.usedTimes === 0 ) { deleteTexture( texture ); } // remove the weak map entry if no WebGLTexture uses the source anymore if ( Object.keys( webglTextures ).length === 0 ) { _sources.delete( source ); } } properties.remove( texture ); } function deleteTexture( texture ) { const textureProperties = properties.get( texture ); _gl.deleteTexture( textureProperties.__webglTexture ); const source = texture.source; const webglTextures = _sources.get( source ); delete webglTextures[ textureProperties.__cacheKey ]; info.memory.textures --; } function deallocateRenderTarget( renderTarget ) { const renderTargetProperties = properties.get( renderTarget ); if ( renderTarget.depthTexture ) { renderTarget.depthTexture.dispose(); properties.remove( renderTarget.depthTexture ); } if ( renderTarget.isWebGLCubeRenderTarget ) { for ( let i = 0; i < 6; i ++ ) { if ( Array.isArray( renderTargetProperties.__webglFramebuffer[ i ] ) ) { for ( let level = 0; level < renderTargetProperties.__webglFramebuffer[ i ].length; level ++ ) _gl.deleteFramebuffer( renderTargetProperties.__webglFramebuffer[ i ][ level ] ); } else { _gl.deleteFramebuffer( renderTargetProperties.__webglFramebuffer[ i ] ); } if ( renderTargetProperties.__webglDepthbuffer ) _gl.deleteRenderbuffer( renderTargetProperties.__webglDepthbuffer[ i ] ); } } else { if ( Array.isArray( renderTargetProperties.__webglFramebuffer ) ) { for ( let level = 0; level < renderTargetProperties.__webglFramebuffer.length; level ++ ) _gl.deleteFramebuffer( renderTargetProperties.__webglFramebuffer[ level ] ); } else { _gl.deleteFramebuffer( renderTargetProperties.__webglFramebuffer ); } if ( renderTargetProperties.__webglDepthbuffer ) _gl.deleteRenderbuffer( renderTargetProperties.__webglDepthbuffer ); if ( renderTargetProperties.__webglMultisampledFramebuffer ) _gl.deleteFramebuffer( renderTargetProperties.__webglMultisampledFramebuffer ); if ( renderTargetProperties.__webglColorRenderbuffer ) { for ( let i = 0; i < renderTargetProperties.__webglColorRenderbuffer.length; i ++ ) { if ( renderTargetProperties.__webglColorRenderbuffer[ i ] ) _gl.deleteRenderbuffer( renderTargetProperties.__webglColorRenderbuffer[ i ] ); } } if ( renderTargetProperties.__webglDepthRenderbuffer ) _gl.deleteRenderbuffer( renderTargetProperties.__webglDepthRenderbuffer ); } const textures = renderTarget.textures; for ( let i = 0, il = textures.length; i < il; i ++ ) { const attachmentProperties = properties.get( textures[ i ] ); if ( attachmentProperties.__webglTexture ) { _gl.deleteTexture( attachmentProperties.__webglTexture ); info.memory.textures --; } properties.remove( textures[ i ] ); } properties.remove( renderTarget ); } // let textureUnits = 0; function resetTextureUnits() { textureUnits = 0; } function allocateTextureUnit() { const textureUnit = textureUnits; if ( textureUnit >= capabilities.maxTextures ) { console.warn( 'THREE.WebGLTextures: Trying to use ' + textureUnit + ' texture units while this GPU supports only ' + capabilities.maxTextures ); } textureUnits += 1; return textureUnit; } function getTextureCacheKey( texture ) { const array = []; array.push( texture.wrapS ); array.push( texture.wrapT ); array.push( texture.wrapR || 0 ); array.push( texture.magFilter ); array.push( texture.minFilter ); array.push( texture.anisotropy ); array.push( texture.internalFormat ); array.push( texture.format ); array.push( texture.type ); array.push( texture.generateMipmaps ); array.push( texture.premultiplyAlpha ); array.push( texture.flipY ); array.push( texture.unpackAlignment ); array.push( texture.colorSpace ); return array.join(); } // function setTexture2D( texture, slot ) { const textureProperties = properties.get( texture ); if ( texture.isVideoTexture ) updateVideoTexture( texture ); if ( texture.isRenderTargetTexture === false && texture.version > 0 && textureProperties.__version !== texture.version ) { const image = texture.image; if ( image === null ) { console.warn( 'THREE.WebGLRenderer: Texture marked for update but no image data found.' ); } else if ( image.complete === false ) { console.warn( 'THREE.WebGLRenderer: Texture marked for update but image is incomplete' ); } else { uploadTexture( textureProperties, texture, slot ); return; } } state.bindTexture( _gl.TEXTURE_2D, textureProperties.__webglTexture, _gl.TEXTURE0 + slot ); } function setTexture2DArray( texture, slot ) { const textureProperties = properties.get( texture ); if ( texture.version > 0 && textureProperties.__version !== texture.version ) { uploadTexture( textureProperties, texture, slot ); return; } state.bindTexture( _gl.TEXTURE_2D_ARRAY, textureProperties.__webglTexture, _gl.TEXTURE0 + slot ); } function setTexture3D( texture, slot ) { const textureProperties = properties.get( texture ); if ( texture.version > 0 && textureProperties.__version !== texture.version ) { uploadTexture( textureProperties, texture, slot ); return; } state.bindTexture( _gl.TEXTURE_3D, textureProperties.__webglTexture, _gl.TEXTURE0 + slot ); } function setTextureCube( texture, slot ) { const textureProperties = properties.get( texture ); if ( texture.version > 0 && textureProperties.__version !== texture.version ) { uploadCubeTexture( textureProperties, texture, slot ); return; } state.bindTexture( _gl.TEXTURE_CUBE_MAP, textureProperties.__webglTexture, _gl.TEXTURE0 + slot ); } const wrappingToGL = { [ RepeatWrapping ]: _gl.REPEAT, [ ClampToEdgeWrapping ]: _gl.CLAMP_TO_EDGE, [ MirroredRepeatWrapping ]: _gl.MIRRORED_REPEAT }; const filterToGL = { [ NearestFilter ]: _gl.NEAREST, [ NearestMipmapNearestFilter ]: _gl.NEAREST_MIPMAP_NEAREST, [ NearestMipmapLinearFilter ]: _gl.NEAREST_MIPMAP_LINEAR, [ LinearFilter ]: _gl.LINEAR, [ LinearMipmapNearestFilter ]: _gl.LINEAR_MIPMAP_NEAREST, [ LinearMipmapLinearFilter ]: _gl.LINEAR_MIPMAP_LINEAR }; const compareToGL = { [ NeverCompare ]: _gl.NEVER, [ AlwaysCompare ]: _gl.ALWAYS, [ LessCompare ]: _gl.LESS, [ LessEqualCompare ]: _gl.LEQUAL, [ EqualCompare ]: _gl.EQUAL, [ GreaterEqualCompare ]: _gl.GEQUAL, [ GreaterCompare ]: _gl.GREATER, [ NotEqualCompare ]: _gl.NOTEQUAL }; function setTextureParameters( textureType, texture ) { if ( texture.type === FloatType && extensions.has( 'OES_texture_float_linear' ) === false && ( texture.magFilter === LinearFilter || texture.magFilter === LinearMipmapNearestFilter || texture.magFilter === NearestMipmapLinearFilter || texture.magFilter === LinearMipmapLinearFilter || texture.minFilter === LinearFilter || texture.minFilter === LinearMipmapNearestFilter || texture.minFilter === NearestMipmapLinearFilter || texture.minFilter === LinearMipmapLinearFilter ) ) { console.warn( 'THREE.WebGLRenderer: Unable to use linear filtering with floating point textures. OES_texture_float_linear not supported on this device.' ); } _gl.texParameteri( textureType, _gl.TEXTURE_WRAP_S, wrappingToGL[ texture.wrapS ] ); _gl.texParameteri( textureType, _gl.TEXTURE_WRAP_T, wrappingToGL[ texture.wrapT ] ); if ( textureType === _gl.TEXTURE_3D || textureType === _gl.TEXTURE_2D_ARRAY ) { _gl.texParameteri( textureType, _gl.TEXTURE_WRAP_R, wrappingToGL[ texture.wrapR ] ); } _gl.texParameteri( textureType, _gl.TEXTURE_MAG_FILTER, filterToGL[ texture.magFilter ] ); _gl.texParameteri( textureType, _gl.TEXTURE_MIN_FILTER, filterToGL[ texture.minFilter ] ); if ( texture.compareFunction ) { _gl.texParameteri( textureType, _gl.TEXTURE_COMPARE_MODE, _gl.COMPARE_REF_TO_TEXTURE ); _gl.texParameteri( textureType, _gl.TEXTURE_COMPARE_FUNC, compareToGL[ texture.compareFunction ] ); } if ( extensions.has( 'EXT_texture_filter_anisotropic' ) === true ) { if ( texture.magFilter === NearestFilter ) return; if ( texture.minFilter !== NearestMipmapLinearFilter && texture.minFilter !== LinearMipmapLinearFilter ) return; if ( texture.type === FloatType && extensions.has( 'OES_texture_float_linear' ) === false ) return; // verify extension if ( texture.anisotropy > 1 || properties.get( texture ).__currentAnisotropy ) { const extension = extensions.get( 'EXT_texture_filter_anisotropic' ); _gl.texParameterf( textureType, extension.TEXTURE_MAX_ANISOTROPY_EXT, Math.min( texture.anisotropy, capabilities.getMaxAnisotropy() ) ); properties.get( texture ).__currentAnisotropy = texture.anisotropy; } } } function initTexture( textureProperties, texture ) { let forceUpload = false; if ( textureProperties.__webglInit === undefined ) { textureProperties.__webglInit = true; texture.addEventListener( 'dispose', onTextureDispose ); } // create Source <-> WebGLTextures mapping if necessary const source = texture.source; let webglTextures = _sources.get( source ); if ( webglTextures === undefined ) { webglTextures = {}; _sources.set( source, webglTextures ); } // check if there is already a WebGLTexture object for the given texture parameters const textureCacheKey = getTextureCacheKey( texture ); if ( textureCacheKey !== textureProperties.__cacheKey ) { // if not, create a new instance of WebGLTexture if ( webglTextures[ textureCacheKey ] === undefined ) { // create new entry webglTextures[ textureCacheKey ] = { texture: _gl.createTexture(), usedTimes: 0 }; info.memory.textures ++; // when a new instance of WebGLTexture was created, a texture upload is required // even if the image contents are identical forceUpload = true; } webglTextures[ textureCacheKey ].usedTimes ++; // every time the texture cache key changes, it's necessary to check if an instance of // WebGLTexture can be deleted in order to avoid a memory leak. const webglTexture = webglTextures[ textureProperties.__cacheKey ]; if ( webglTexture !== undefined ) { webglTextures[ textureProperties.__cacheKey ].usedTimes --; if ( webglTexture.usedTimes === 0 ) { deleteTexture( texture ); } } // store references to cache key and WebGLTexture object textureProperties.__cacheKey = textureCacheKey; textureProperties.__webglTexture = webglTextures[ textureCacheKey ].texture; } return forceUpload; } function uploadTexture( textureProperties, texture, slot ) { let textureType = _gl.TEXTURE_2D; if ( texture.isDataArrayTexture || texture.isCompressedArrayTexture ) textureType = _gl.TEXTURE_2D_ARRAY; if ( texture.isData3DTexture ) textureType = _gl.TEXTURE_3D; const forceUpload = initTexture( textureProperties, texture ); const source = texture.source; state.bindTexture( textureType, textureProperties.__webglTexture, _gl.TEXTURE0 + slot ); const sourceProperties = properties.get( source ); if ( source.version !== sourceProperties.__version || forceUpload === true ) { state.activeTexture( _gl.TEXTURE0 + slot ); const workingPrimaries = ColorManagement.getPrimaries( ColorManagement.workingColorSpace ); const texturePrimaries = texture.colorSpace === NoColorSpace ? null : ColorManagement.getPrimaries( texture.colorSpace ); const unpackConversion = texture.colorSpace === NoColorSpace || workingPrimaries === texturePrimaries ? _gl.NONE : _gl.BROWSER_DEFAULT_WEBGL; _gl.pixelStorei( _gl.UNPACK_FLIP_Y_WEBGL, texture.flipY ); _gl.pixelStorei( _gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, texture.premultiplyAlpha ); _gl.pixelStorei( _gl.UNPACK_ALIGNMENT, texture.unpackAlignment ); _gl.pixelStorei( _gl.UNPACK_COLORSPACE_CONVERSION_WEBGL, unpackConversion ); let image = resizeImage( texture.image, false, capabilities.maxTextureSize ); image = verifyColorSpace( texture, image ); const glFormat = utils.convert( texture.format, texture.colorSpace ); const glType = utils.convert( texture.type ); let glInternalFormat = getInternalFormat( texture.internalFormat, glFormat, glType, texture.colorSpace, texture.isVideoTexture ); setTextureParameters( textureType, texture ); let mipmap; const mipmaps = texture.mipmaps; const useTexStorage = ( texture.isVideoTexture !== true ); const allocateMemory = ( sourceProperties.__version === undefined ) || ( forceUpload === true ); const dataReady = source.dataReady; const levels = getMipLevels( texture, image ); if ( texture.isDepthTexture ) { glInternalFormat = getInternalDepthFormat( texture.format === DepthStencilFormat, texture.type ); // if ( allocateMemory ) { if ( useTexStorage ) { state.texStorage2D( _gl.TEXTURE_2D, 1, glInternalFormat, image.width, image.height ); } else { state.texImage2D( _gl.TEXTURE_2D, 0, glInternalFormat, image.width, image.height, 0, glFormat, glType, null ); } } } else if ( texture.isDataTexture ) { // use manually created mipmaps if available // if there are no manual mipmaps // set 0 level mipmap and then use GL to generate other mipmap levels if ( mipmaps.length > 0 ) { if ( useTexStorage && allocateMemory ) { state.texStorage2D( _gl.TEXTURE_2D, levels, glInternalFormat, mipmaps[ 0 ].width, mipmaps[ 0 ].height ); } for ( let i = 0, il = mipmaps.length; i < il; i ++ ) { mipmap = mipmaps[ i ]; if ( useTexStorage ) { if ( dataReady ) { state.texSubImage2D( _gl.TEXTURE_2D, i, 0, 0, mipmap.width, mipmap.height, glFormat, glType, mipmap.data ); } } else { state.texImage2D( _gl.TEXTURE_2D, i, glInternalFormat, mipmap.width, mipmap.height, 0, glFormat, glType, mipmap.data ); } } texture.generateMipmaps = false; } else { if ( useTexStorage ) { if ( allocateMemory ) { state.texStorage2D( _gl.TEXTURE_2D, levels, glInternalFormat, image.width, image.height ); } if ( dataReady ) { state.texSubImage2D( _gl.TEXTURE_2D, 0, 0, 0, image.width, image.height, glFormat, glType, image.data ); } } else { state.texImage2D( _gl.TEXTURE_2D, 0, glInternalFormat, image.width, image.height, 0, glFormat, glType, image.data ); } } } else if ( texture.isCompressedTexture ) { if ( texture.isCompressedArrayTexture ) { if ( useTexStorage && allocateMemory ) { state.texStorage3D( _gl.TEXTURE_2D_ARRAY, levels, glInternalFormat, mipmaps[ 0 ].width, mipmaps[ 0 ].height, image.depth ); } for ( let i = 0, il = mipmaps.length; i < il; i ++ ) { mipmap = mipmaps[ i ]; if ( texture.format !== RGBAFormat ) { if ( glFormat !== null ) { if ( useTexStorage ) { if ( dataReady ) { if ( texture.layerUpdates.size > 0 ) { const layerByteLength = getByteLength( mipmap.width, mipmap.height, texture.format, texture.type ); for ( const layerIndex of texture.layerUpdates ) { const layerData = mipmap.data.subarray( layerIndex * layerByteLength / mipmap.data.BYTES_PER_ELEMENT, ( layerIndex + 1 ) * layerByteLength / mipmap.data.BYTES_PER_ELEMENT ); state.compressedTexSubImage3D( _gl.TEXTURE_2D_ARRAY, i, 0, 0, layerIndex, mipmap.width, mipmap.height, 1, glFormat, layerData ); } texture.clearLayerUpdates(); } else { state.compressedTexSubImage3D( _gl.TEXTURE_2D_ARRAY, i, 0, 0, 0, mipmap.width, mipmap.height, image.depth, glFormat, mipmap.data ); } } } else { state.compressedTexImage3D( _gl.TEXTURE_2D_ARRAY, i, glInternalFormat, mipmap.width, mipmap.height, image.depth, 0, mipmap.data, 0, 0 ); } } else { console.warn( 'THREE.WebGLRenderer: Attempt to load unsupported compressed texture format in .uploadTexture()' ); } } else { if ( useTexStorage ) { if ( dataReady ) { state.texSubImage3D( _gl.TEXTURE_2D_ARRAY, i, 0, 0, 0, mipmap.width, mipmap.height, image.depth, glFormat, glType, mipmap.data ); } } else { state.texImage3D( _gl.TEXTURE_2D_ARRAY, i, glInternalFormat, mipmap.width, mipmap.height, image.depth, 0, glFormat, glType, mipmap.data ); } } } } else { if ( useTexStorage && allocateMemory ) { state.texStorage2D( _gl.TEXTURE_2D, levels, glInternalFormat, mipmaps[ 0 ].width, mipmaps[ 0 ].height ); } for ( let i = 0, il = mipmaps.length; i < il; i ++ ) { mipmap = mipmaps[ i ]; if ( texture.format !== RGBAFormat ) { if ( glFormat !== null ) { if ( useTexStorage ) { if ( dataReady ) { state.compressedTexSubImage2D( _gl.TEXTURE_2D, i, 0, 0, mipmap.width, mipmap.height, glFormat, mipmap.data ); } } else { state.compressedTexImage2D( _gl.TEXTURE_2D, i, glInternalFormat, mipmap.width, mipmap.height, 0, mipmap.data ); } } else { console.warn( 'THREE.WebGLRenderer: Attempt to load unsupported compressed texture format in .uploadTexture()' ); } } else { if ( useTexStorage ) { if ( dataReady ) { state.texSubImage2D( _gl.TEXTURE_2D, i, 0, 0, mipmap.width, mipmap.height, glFormat, glType, mipmap.data ); } } else { state.texImage2D( _gl.TEXTURE_2D, i, glInternalFormat, mipmap.width, mipmap.height, 0, glFormat, glType, mipmap.data ); } } } } } else if ( texture.isDataArrayTexture ) { if ( useTexStorage ) { if ( allocateMemory ) { state.texStorage3D( _gl.TEXTURE_2D_ARRAY, levels, glInternalFormat, image.width, image.height, image.depth ); } if ( dataReady ) { if ( texture.layerUpdates.size > 0 ) { const layerByteLength = getByteLength( image.width, image.height, texture.format, texture.type ); for ( const layerIndex of texture.layerUpdates ) { const layerData = image.data.subarray( layerIndex * layerByteLength / image.data.BYTES_PER_ELEMENT, ( layerIndex + 1 ) * layerByteLength / image.data.BYTES_PER_ELEMENT ); state.texSubImage3D( _gl.TEXTURE_2D_ARRAY, 0, 0, 0, layerIndex, image.width, image.height, 1, glFormat, glType, layerData ); } texture.clearLayerUpdates(); } else { state.texSubImage3D( _gl.TEXTURE_2D_ARRAY, 0, 0, 0, 0, image.width, image.height, image.depth, glFormat, glType, image.data ); } } } else { state.texImage3D( _gl.TEXTURE_2D_ARRAY, 0, glInternalFormat, image.width, image.height, image.depth, 0, glFormat, glType, image.data ); } } else if ( texture.isData3DTexture ) { if ( useTexStorage ) { if ( allocateMemory ) { state.texStorage3D( _gl.TEXTURE_3D, levels, glInternalFormat, image.width, image.height, image.depth ); } if ( dataReady ) { state.texSubImage3D( _gl.TEXTURE_3D, 0, 0, 0, 0, image.width, image.height, image.depth, glFormat, glType, image.data ); } } else { state.texImage3D( _gl.TEXTURE_3D, 0, glInternalFormat, image.width, image.height, image.depth, 0, glFormat, glType, image.data ); } } else if ( texture.isFramebufferTexture ) { if ( allocateMemory ) { if ( useTexStorage ) { state.texStorage2D( _gl.TEXTURE_2D, levels, glInternalFormat, image.width, image.height ); } else { let width = image.width, height = image.height; for ( let i = 0; i < levels; i ++ ) { state.texImage2D( _gl.TEXTURE_2D, i, glInternalFormat, width, height, 0, glFormat, glType, null ); width >>= 1; height >>= 1; } } } } else { // regular Texture (image, video, canvas) // use manually created mipmaps if available // if there are no manual mipmaps // set 0 level mipmap and then use GL to generate other mipmap levels if ( mipmaps.length > 0 ) { if ( useTexStorage && allocateMemory ) { const dimensions = getDimensions( mipmaps[ 0 ] ); state.texStorage2D( _gl.TEXTURE_2D, levels, glInternalFormat, dimensions.width, dimensions.height ); } for ( let i = 0, il = mipmaps.length; i < il; i ++ ) { mipmap = mipmaps[ i ]; if ( useTexStorage ) { if ( dataReady ) { state.texSubImage2D( _gl.TEXTURE_2D, i, 0, 0, glFormat, glType, mipmap ); } } else { state.texImage2D( _gl.TEXTURE_2D, i, glInternalFormat, glFormat, glType, mipmap ); } } texture.generateMipmaps = false; } else { if ( useTexStorage ) { if ( allocateMemory ) { const dimensions = getDimensions( image ); state.texStorage2D( _gl.TEXTURE_2D, levels, glInternalFormat, dimensions.width, dimensions.height ); } if ( dataReady ) { state.texSubImage2D( _gl.TEXTURE_2D, 0, 0, 0, glFormat, glType, image ); } } else { state.texImage2D( _gl.TEXTURE_2D, 0, glInternalFormat, glFormat, glType, image ); } } } if ( textureNeedsGenerateMipmaps( texture ) ) { generateMipmap( textureType ); } sourceProperties.__version = source.version; if ( texture.onUpdate ) texture.onUpdate( texture ); } textureProperties.__version = texture.version; } function uploadCubeTexture( textureProperties, texture, slot ) { if ( texture.image.length !== 6 ) return; const forceUpload = initTexture( textureProperties, texture ); const source = texture.source; state.bindTexture( _gl.TEXTURE_CUBE_MAP, textureProperties.__webglTexture, _gl.TEXTURE0 + slot ); const sourceProperties = properties.get( source ); if ( source.version !== sourceProperties.__version || forceUpload === true ) { state.activeTexture( _gl.TEXTURE0 + slot ); const workingPrimaries = ColorManagement.getPrimaries( ColorManagement.workingColorSpace ); const texturePrimaries = texture.colorSpace === NoColorSpace ? null : ColorManagement.getPrimaries( texture.colorSpace ); const unpackConversion = texture.colorSpace === NoColorSpace || workingPrimaries === texturePrimaries ? _gl.NONE : _gl.BROWSER_DEFAULT_WEBGL; _gl.pixelStorei( _gl.UNPACK_FLIP_Y_WEBGL, texture.flipY ); _gl.pixelStorei( _gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, texture.premultiplyAlpha ); _gl.pixelStorei( _gl.UNPACK_ALIGNMENT, texture.unpackAlignment ); _gl.pixelStorei( _gl.UNPACK_COLORSPACE_CONVERSION_WEBGL, unpackConversion ); const isCompressed = ( texture.isCompressedTexture || texture.image[ 0 ].isCompressedTexture ); const isDataTexture = ( texture.image[ 0 ] && texture.image[ 0 ].isDataTexture ); const cubeImage = []; for ( let i = 0; i < 6; i ++ ) { if ( ! isCompressed && ! isDataTexture ) { cubeImage[ i ] = resizeImage( texture.image[ i ], true, capabilities.maxCubemapSize ); } else { cubeImage[ i ] = isDataTexture ? texture.image[ i ].image : texture.image[ i ]; } cubeImage[ i ] = verifyColorSpace( texture, cubeImage[ i ] ); } const image = cubeImage[ 0 ], glFormat = utils.convert( texture.format, texture.colorSpace ), glType = utils.convert( texture.type ), glInternalFormat = getInternalFormat( texture.internalFormat, glFormat, glType, texture.colorSpace ); const useTexStorage = ( texture.isVideoTexture !== true ); const allocateMemory = ( sourceProperties.__version === undefined ) || ( forceUpload === true ); const dataReady = source.dataReady; let levels = getMipLevels( texture, image ); setTextureParameters( _gl.TEXTURE_CUBE_MAP, texture ); let mipmaps; if ( isCompressed ) { if ( useTexStorage && allocateMemory ) { state.texStorage2D( _gl.TEXTURE_CUBE_MAP, levels, glInternalFormat, image.width, image.height ); } for ( let i = 0; i < 6; i ++ ) { mipmaps = cubeImage[ i ].mipmaps; for ( let j = 0; j < mipmaps.length; j ++ ) { const mipmap = mipmaps[ j ]; if ( texture.format !== RGBAFormat ) { if ( glFormat !== null ) { if ( useTexStorage ) { if ( dataReady ) { state.compressedTexSubImage2D( _gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, j, 0, 0, mipmap.width, mipmap.height, glFormat, mipmap.data ); } } else { state.compressedTexImage2D( _gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, j, glInternalFormat, mipmap.width, mipmap.height, 0, mipmap.data ); } } else { console.warn( 'THREE.WebGLRenderer: Attempt to load unsupported compressed texture format in .setTextureCube()' ); } } else { if ( useTexStorage ) { if ( dataReady ) { state.texSubImage2D( _gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, j, 0, 0, mipmap.width, mipmap.height, glFormat, glType, mipmap.data ); } } else { state.texImage2D( _gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, j, glInternalFormat, mipmap.width, mipmap.height, 0, glFormat, glType, mipmap.data ); } } } } } else { mipmaps = texture.mipmaps; if ( useTexStorage && allocateMemory ) { // TODO: Uniformly handle mipmap definitions // Normal textures and compressed cube textures define base level + mips with their mipmap array // Uncompressed cube textures use their mipmap array only for mips (no base level) if ( mipmaps.length > 0 ) levels ++; const dimensions = getDimensions( cubeImage[ 0 ] ); state.texStorage2D( _gl.TEXTURE_CUBE_MAP, levels, glInternalFormat, dimensions.width, dimensions.height ); } for ( let i = 0; i < 6; i ++ ) { if ( isDataTexture ) { if ( useTexStorage ) { if ( dataReady ) { state.texSubImage2D( _gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, 0, 0, cubeImage[ i ].width, cubeImage[ i ].height, glFormat, glType, cubeImage[ i ].data ); } } else { state.texImage2D( _gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, glInternalFormat, cubeImage[ i ].width, cubeImage[ i ].height, 0, glFormat, glType, cubeImage[ i ].data ); } for ( let j = 0; j < mipmaps.length; j ++ ) { const mipmap = mipmaps[ j ]; const mipmapImage = mipmap.image[ i ].image; if ( useTexStorage ) { if ( dataReady ) { state.texSubImage2D( _gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, j + 1, 0, 0, mipmapImage.width, mipmapImage.height, glFormat, glType, mipmapImage.data ); } } else { state.texImage2D( _gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, j + 1, glInternalFormat, mipmapImage.width, mipmapImage.height, 0, glFormat, glType, mipmapImage.data ); } } } else { if ( useTexStorage ) { if ( dataReady ) { state.texSubImage2D( _gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, 0, 0, glFormat, glType, cubeImage[ i ] ); } } else { state.texImage2D( _gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, glInternalFormat, glFormat, glType, cubeImage[ i ] ); } for ( let j = 0; j < mipmaps.length; j ++ ) { const mipmap = mipmaps[ j ]; if ( useTexStorage ) { if ( dataReady ) { state.texSubImage2D( _gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, j + 1, 0, 0, glFormat, glType, mipmap.image[ i ] ); } } else { state.texImage2D( _gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, j + 1, glInternalFormat, glFormat, glType, mipmap.image[ i ] ); } } } } } if ( textureNeedsGenerateMipmaps( texture ) ) { // We assume images for cube map have the same size. generateMipmap( _gl.TEXTURE_CUBE_MAP ); } sourceProperties.__version = source.version; if ( texture.onUpdate ) texture.onUpdate( texture ); } textureProperties.__version = texture.version; } // Render targets // Setup storage for target texture and bind it to correct framebuffer function setupFrameBufferTexture( framebuffer, renderTarget, texture, attachment, textureTarget, level ) { const glFormat = utils.convert( texture.format, texture.colorSpace ); const glType = utils.convert( texture.type ); const glInternalFormat = getInternalFormat( texture.internalFormat, glFormat, glType, texture.colorSpace ); const renderTargetProperties = properties.get( renderTarget ); const textureProperties = properties.get( texture ); textureProperties.__renderTarget = renderTarget; if ( ! renderTargetProperties.__hasExternalTextures ) { const width = Math.max( 1, renderTarget.width >> level ); const height = Math.max( 1, renderTarget.height >> level ); if ( textureTarget === _gl.TEXTURE_3D || textureTarget === _gl.TEXTURE_2D_ARRAY ) { state.texImage3D( textureTarget, level, glInternalFormat, width, height, renderTarget.depth, 0, glFormat, glType, null ); } else { state.texImage2D( textureTarget, level, glInternalFormat, width, height, 0, glFormat, glType, null ); } } state.bindFramebuffer( _gl.FRAMEBUFFER, framebuffer ); if ( useMultisampledRTT( renderTarget ) ) { multisampledRTTExt.framebufferTexture2DMultisampleEXT( _gl.FRAMEBUFFER, attachment, textureTarget, textureProperties.__webglTexture, 0, getRenderTargetSamples( renderTarget ) ); } else if ( textureTarget === _gl.TEXTURE_2D || ( textureTarget >= _gl.TEXTURE_CUBE_MAP_POSITIVE_X && textureTarget <= _gl.TEXTURE_CUBE_MAP_NEGATIVE_Z ) ) { // see #24753 _gl.framebufferTexture2D( _gl.FRAMEBUFFER, attachment, textureTarget, textureProperties.__webglTexture, level ); } state.bindFramebuffer( _gl.FRAMEBUFFER, null ); } // Setup storage for internal depth/stencil buffers and bind to correct framebuffer function setupRenderBufferStorage( renderbuffer, renderTarget, isMultisample ) { _gl.bindRenderbuffer( _gl.RENDERBUFFER, renderbuffer ); if ( renderTarget.depthBuffer ) { // retrieve the depth attachment types const depthTexture = renderTarget.depthTexture; const depthType = depthTexture && depthTexture.isDepthTexture ? depthTexture.type : null; const glInternalFormat = getInternalDepthFormat( renderTarget.stencilBuffer, depthType ); const glAttachmentType = renderTarget.stencilBuffer ? _gl.DEPTH_STENCIL_ATTACHMENT : _gl.DEPTH_ATTACHMENT; // set up the attachment const samples = getRenderTargetSamples( renderTarget ); const isUseMultisampledRTT = useMultisampledRTT( renderTarget ); if ( isUseMultisampledRTT ) { multisampledRTTExt.renderbufferStorageMultisampleEXT( _gl.RENDERBUFFER, samples, glInternalFormat, renderTarget.width, renderTarget.height ); } else if ( isMultisample ) { _gl.renderbufferStorageMultisample( _gl.RENDERBUFFER, samples, glInternalFormat, renderTarget.width, renderTarget.height ); } else { _gl.renderbufferStorage( _gl.RENDERBUFFER, glInternalFormat, renderTarget.width, renderTarget.height ); } _gl.framebufferRenderbuffer( _gl.FRAMEBUFFER, glAttachmentType, _gl.RENDERBUFFER, renderbuffer ); } else { const textures = renderTarget.textures; for ( let i = 0; i < textures.length; i ++ ) { const texture = textures[ i ]; const glFormat = utils.convert( texture.format, texture.colorSpace ); const glType = utils.convert( texture.type ); const glInternalFormat = getInternalFormat( texture.internalFormat, glFormat, glType, texture.colorSpace ); const samples = getRenderTargetSamples( renderTarget ); if ( isMultisample && useMultisampledRTT( renderTarget ) === false ) { _gl.renderbufferStorageMultisample( _gl.RENDERBUFFER, samples, glInternalFormat, renderTarget.width, renderTarget.height ); } else if ( useMultisampledRTT( renderTarget ) ) { multisampledRTTExt.renderbufferStorageMultisampleEXT( _gl.RENDERBUFFER, samples, glInternalFormat, renderTarget.width, renderTarget.height ); } else { _gl.renderbufferStorage( _gl.RENDERBUFFER, glInternalFormat, renderTarget.width, renderTarget.height ); } } } _gl.bindRenderbuffer( _gl.RENDERBUFFER, null ); } // Setup resources for a Depth Texture for a FBO (needs an extension) function setupDepthTexture( framebuffer, renderTarget ) { const isCube = ( renderTarget && renderTarget.isWebGLCubeRenderTarget ); if ( isCube ) throw new Error( 'Depth Texture with cube render targets is not supported' ); state.bindFramebuffer( _gl.FRAMEBUFFER, framebuffer ); if ( ! ( renderTarget.depthTexture && renderTarget.depthTexture.isDepthTexture ) ) { throw new Error( 'renderTarget.depthTexture must be an instance of THREE.DepthTexture' ); } const textureProperties = properties.get( renderTarget.depthTexture ); textureProperties.__renderTarget = renderTarget; // upload an empty depth texture with framebuffer size if ( ! textureProperties.__webglTexture || renderTarget.depthTexture.image.width !== renderTarget.width || renderTarget.depthTexture.image.height !== renderTarget.height ) { renderTarget.depthTexture.image.width = renderTarget.width; renderTarget.depthTexture.image.height = renderTarget.height; renderTarget.depthTexture.needsUpdate = true; } setTexture2D( renderTarget.depthTexture, 0 ); const webglDepthTexture = textureProperties.__webglTexture; const samples = getRenderTargetSamples( renderTarget ); if ( renderTarget.depthTexture.format === DepthFormat ) { if ( useMultisampledRTT( renderTarget ) ) { multisampledRTTExt.framebufferTexture2DMultisampleEXT( _gl.FRAMEBUFFER, _gl.DEPTH_ATTACHMENT, _gl.TEXTURE_2D, webglDepthTexture, 0, samples ); } else { _gl.framebufferTexture2D( _gl.FRAMEBUFFER, _gl.DEPTH_ATTACHMENT, _gl.TEXTURE_2D, webglDepthTexture, 0 ); } } else if ( renderTarget.depthTexture.format === DepthStencilFormat ) { if ( useMultisampledRTT( renderTarget ) ) { multisampledRTTExt.framebufferTexture2DMultisampleEXT( _gl.FRAMEBUFFER, _gl.DEPTH_STENCIL_ATTACHMENT, _gl.TEXTURE_2D, webglDepthTexture, 0, samples ); } else { _gl.framebufferTexture2D( _gl.FRAMEBUFFER, _gl.DEPTH_STENCIL_ATTACHMENT, _gl.TEXTURE_2D, webglDepthTexture, 0 ); } } else { throw new Error( 'Unknown depthTexture format' ); } } // Setup GL resources for a non-texture depth buffer function setupDepthRenderbuffer( renderTarget ) { const renderTargetProperties = properties.get( renderTarget ); const isCube = ( renderTarget.isWebGLCubeRenderTarget === true ); // if the bound depth texture has changed if ( renderTargetProperties.__boundDepthTexture !== renderTarget.depthTexture ) { // fire the dispose event to get rid of stored state associated with the previously bound depth buffer const depthTexture = renderTarget.depthTexture; if ( renderTargetProperties.__depthDisposeCallback ) { renderTargetProperties.__depthDisposeCallback(); } // set up dispose listeners to track when the currently attached buffer is implicitly unbound if ( depthTexture ) { const disposeEvent = () => { delete renderTargetProperties.__boundDepthTexture; delete renderTargetProperties.__depthDisposeCallback; depthTexture.removeEventListener( 'dispose', disposeEvent ); }; depthTexture.addEventListener( 'dispose', disposeEvent ); renderTargetProperties.__depthDisposeCallback = disposeEvent; } renderTargetProperties.__boundDepthTexture = depthTexture; } if ( renderTarget.depthTexture && ! renderTargetProperties.__autoAllocateDepthBuffer ) { if ( isCube ) throw new Error( 'target.depthTexture not supported in Cube render targets' ); setupDepthTexture( renderTargetProperties.__webglFramebuffer, renderTarget ); } else { if ( isCube ) { renderTargetProperties.__webglDepthbuffer = []; for ( let i = 0; i < 6; i ++ ) { state.bindFramebuffer( _gl.FRAMEBUFFER, renderTargetProperties.__webglFramebuffer[ i ] ); if ( renderTargetProperties.__webglDepthbuffer[ i ] === undefined ) { renderTargetProperties.__webglDepthbuffer[ i ] = _gl.createRenderbuffer(); setupRenderBufferStorage( renderTargetProperties.__webglDepthbuffer[ i ], renderTarget, false ); } else { // attach buffer if it's been created already const glAttachmentType = renderTarget.stencilBuffer ? _gl.DEPTH_STENCIL_ATTACHMENT : _gl.DEPTH_ATTACHMENT; const renderbuffer = renderTargetProperties.__webglDepthbuffer[ i ]; _gl.bindRenderbuffer( _gl.RENDERBUFFER, renderbuffer ); _gl.framebufferRenderbuffer( _gl.FRAMEBUFFER, glAttachmentType, _gl.RENDERBUFFER, renderbuffer ); } } } else { state.bindFramebuffer( _gl.FRAMEBUFFER, renderTargetProperties.__webglFramebuffer ); if ( renderTargetProperties.__webglDepthbuffer === undefined ) { renderTargetProperties.__webglDepthbuffer = _gl.createRenderbuffer(); setupRenderBufferStorage( renderTargetProperties.__webglDepthbuffer, renderTarget, false ); } else { // attach buffer if it's been created already const glAttachmentType = renderTarget.stencilBuffer ? _gl.DEPTH_STENCIL_ATTACHMENT : _gl.DEPTH_ATTACHMENT; const renderbuffer = renderTargetProperties.__webglDepthbuffer; _gl.bindRenderbuffer( _gl.RENDERBUFFER, renderbuffer ); _gl.framebufferRenderbuffer( _gl.FRAMEBUFFER, glAttachmentType, _gl.RENDERBUFFER, renderbuffer ); } } } state.bindFramebuffer( _gl.FRAMEBUFFER, null ); } // rebind framebuffer with external textures function rebindTextures( renderTarget, colorTexture, depthTexture ) { const renderTargetProperties = properties.get( renderTarget ); if ( colorTexture !== undefined ) { setupFrameBufferTexture( renderTargetProperties.__webglFramebuffer, renderTarget, renderTarget.texture, _gl.COLOR_ATTACHMENT0, _gl.TEXTURE_2D, 0 ); } if ( depthTexture !== undefined ) { setupDepthRenderbuffer( renderTarget ); } } // Set up GL resources for the render target function setupRenderTarget( renderTarget ) { const texture = renderTarget.texture; const renderTargetProperties = properties.get( renderTarget ); const textureProperties = properties.get( texture ); renderTarget.addEventListener( 'dispose', onRenderTargetDispose ); const textures = renderTarget.textures; const isCube = ( renderTarget.isWebGLCubeRenderTarget === true ); const isMultipleRenderTargets = ( textures.length > 1 ); if ( ! isMultipleRenderTargets ) { if ( textureProperties.__webglTexture === undefined ) { textureProperties.__webglTexture = _gl.createTexture(); } textureProperties.__version = texture.version; info.memory.textures ++; } // Setup framebuffer if ( isCube ) { renderTargetProperties.__webglFramebuffer = []; for ( let i = 0; i < 6; i ++ ) { if ( texture.mipmaps && texture.mipmaps.length > 0 ) { renderTargetProperties.__webglFramebuffer[ i ] = []; for ( let level = 0; level < texture.mipmaps.length; level ++ ) { renderTargetProperties.__webglFramebuffer[ i ][ level ] = _gl.createFramebuffer(); } } else { renderTargetProperties.__webglFramebuffer[ i ] = _gl.createFramebuffer(); } } } else { if ( texture.mipmaps && texture.mipmaps.length > 0 ) { renderTargetProperties.__webglFramebuffer = []; for ( let level = 0; level < texture.mipmaps.length; level ++ ) { renderTargetProperties.__webglFramebuffer[ level ] = _gl.createFramebuffer(); } } else { renderTargetProperties.__webglFramebuffer = _gl.createFramebuffer(); } if ( isMultipleRenderTargets ) { for ( let i = 0, il = textures.length; i < il; i ++ ) { const attachmentProperties = properties.get( textures[ i ] ); if ( attachmentProperties.__webglTexture === undefined ) { attachmentProperties.__webglTexture = _gl.createTexture(); info.memory.textures ++; } } } if ( ( renderTarget.samples > 0 ) && useMultisampledRTT( renderTarget ) === false ) { renderTargetProperties.__webglMultisampledFramebuffer = _gl.createFramebuffer(); renderTargetProperties.__webglColorRenderbuffer = []; state.bindFramebuffer( _gl.FRAMEBUFFER, renderTargetProperties.__webglMultisampledFramebuffer ); for ( let i = 0; i < textures.length; i ++ ) { const texture = textures[ i ]; renderTargetProperties.__webglColorRenderbuffer[ i ] = _gl.createRenderbuffer(); _gl.bindRenderbuffer( _gl.RENDERBUFFER, renderTargetProperties.__webglColorRenderbuffer[ i ] ); const glFormat = utils.convert( texture.format, texture.colorSpace ); const glType = utils.convert( texture.type ); const glInternalFormat = getInternalFormat( texture.internalFormat, glFormat, glType, texture.colorSpace, renderTarget.isXRRenderTarget === true ); const samples = getRenderTargetSamples( renderTarget ); _gl.renderbufferStorageMultisample( _gl.RENDERBUFFER, samples, glInternalFormat, renderTarget.width, renderTarget.height ); _gl.framebufferRenderbuffer( _gl.FRAMEBUFFER, _gl.COLOR_ATTACHMENT0 + i, _gl.RENDERBUFFER, renderTargetProperties.__webglColorRenderbuffer[ i ] ); } _gl.bindRenderbuffer( _gl.RENDERBUFFER, null ); if ( renderTarget.depthBuffer ) { renderTargetProperties.__webglDepthRenderbuffer = _gl.createRenderbuffer(); setupRenderBufferStorage( renderTargetProperties.__webglDepthRenderbuffer, renderTarget, true ); } state.bindFramebuffer( _gl.FRAMEBUFFER, null ); } } // Setup color buffer if ( isCube ) { state.bindTexture( _gl.TEXTURE_CUBE_MAP, textureProperties.__webglTexture ); setTextureParameters( _gl.TEXTURE_CUBE_MAP, texture ); for ( let i = 0; i < 6; i ++ ) { if ( texture.mipmaps && texture.mipmaps.length > 0 ) { for ( let level = 0; level < texture.mipmaps.length; level ++ ) { setupFrameBufferTexture( renderTargetProperties.__webglFramebuffer[ i ][ level ], renderTarget, texture, _gl.COLOR_ATTACHMENT0, _gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, level ); } } else { setupFrameBufferTexture( renderTargetProperties.__webglFramebuffer[ i ], renderTarget, texture, _gl.COLOR_ATTACHMENT0, _gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0 ); } } if ( textureNeedsGenerateMipmaps( texture ) ) { generateMipmap( _gl.TEXTURE_CUBE_MAP ); } state.unbindTexture(); } else if ( isMultipleRenderTargets ) { for ( let i = 0, il = textures.length; i < il; i ++ ) { const attachment = textures[ i ]; const attachmentProperties = properties.get( attachment ); state.bindTexture( _gl.TEXTURE_2D, attachmentProperties.__webglTexture ); setTextureParameters( _gl.TEXTURE_2D, attachment ); setupFrameBufferTexture( renderTargetProperties.__webglFramebuffer, renderTarget, attachment, _gl.COLOR_ATTACHMENT0 + i, _gl.TEXTURE_2D, 0 ); if ( textureNeedsGenerateMipmaps( attachment ) ) { generateMipmap( _gl.TEXTURE_2D ); } } state.unbindTexture(); } else { let glTextureType = _gl.TEXTURE_2D; if ( renderTarget.isWebGL3DRenderTarget || renderTarget.isWebGLArrayRenderTarget ) { glTextureType = renderTarget.isWebGL3DRenderTarget ? _gl.TEXTURE_3D : _gl.TEXTURE_2D_ARRAY; } state.bindTexture( glTextureType, textureProperties.__webglTexture ); setTextureParameters( glTextureType, texture ); if ( texture.mipmaps && texture.mipmaps.length > 0 ) { for ( let level = 0; level < texture.mipmaps.length; level ++ ) { setupFrameBufferTexture( renderTargetProperties.__webglFramebuffer[ level ], renderTarget, texture, _gl.COLOR_ATTACHMENT0, glTextureType, level ); } } else { setupFrameBufferTexture( renderTargetProperties.__webglFramebuffer, renderTarget, texture, _gl.COLOR_ATTACHMENT0, glTextureType, 0 ); } if ( textureNeedsGenerateMipmaps( texture ) ) { generateMipmap( glTextureType ); } state.unbindTexture(); } // Setup depth and stencil buffers if ( renderTarget.depthBuffer ) { setupDepthRenderbuffer( renderTarget ); } } function updateRenderTargetMipmap( renderTarget ) { const textures = renderTarget.textures; for ( let i = 0, il = textures.length; i < il; i ++ ) { const texture = textures[ i ]; if ( textureNeedsGenerateMipmaps( texture ) ) { const targetType = getTargetType( renderTarget ); const webglTexture = properties.get( texture ).__webglTexture; state.bindTexture( targetType, webglTexture ); generateMipmap( targetType ); state.unbindTexture(); } } } const invalidationArrayRead = []; const invalidationArrayDraw = []; function updateMultisampleRenderTarget( renderTarget ) { if ( renderTarget.samples > 0 ) { if ( useMultisampledRTT( renderTarget ) === false ) { const textures = renderTarget.textures; const width = renderTarget.width; const height = renderTarget.height; let mask = _gl.COLOR_BUFFER_BIT; const depthStyle = renderTarget.stencilBuffer ? _gl.DEPTH_STENCIL_ATTACHMENT : _gl.DEPTH_ATTACHMENT; const renderTargetProperties = properties.get( renderTarget ); const isMultipleRenderTargets = ( textures.length > 1 ); // If MRT we need to remove FBO attachments if ( isMultipleRenderTargets ) { for ( let i = 0; i < textures.length; i ++ ) { state.bindFramebuffer( _gl.FRAMEBUFFER, renderTargetProperties.__webglMultisampledFramebuffer ); _gl.framebufferRenderbuffer( _gl.FRAMEBUFFER, _gl.COLOR_ATTACHMENT0 + i, _gl.RENDERBUFFER, null ); state.bindFramebuffer( _gl.FRAMEBUFFER, renderTargetProperties.__webglFramebuffer ); _gl.framebufferTexture2D( _gl.DRAW_FRAMEBUFFER, _gl.COLOR_ATTACHMENT0 + i, _gl.TEXTURE_2D, null, 0 ); } } state.bindFramebuffer( _gl.READ_FRAMEBUFFER, renderTargetProperties.__webglMultisampledFramebuffer ); state.bindFramebuffer( _gl.DRAW_FRAMEBUFFER, renderTargetProperties.__webglFramebuffer ); for ( let i = 0; i < textures.length; i ++ ) { if ( renderTarget.resolveDepthBuffer ) { if ( renderTarget.depthBuffer ) mask |= _gl.DEPTH_BUFFER_BIT; // resolving stencil is slow with a D3D backend. disable it for all transmission render targets (see #27799) if ( renderTarget.stencilBuffer && renderTarget.resolveStencilBuffer ) mask |= _gl.STENCIL_BUFFER_BIT; } if ( isMultipleRenderTargets ) { _gl.framebufferRenderbuffer( _gl.READ_FRAMEBUFFER, _gl.COLOR_ATTACHMENT0, _gl.RENDERBUFFER, renderTargetProperties.__webglColorRenderbuffer[ i ] ); const webglTexture = properties.get( textures[ i ] ).__webglTexture; _gl.framebufferTexture2D( _gl.DRAW_FRAMEBUFFER, _gl.COLOR_ATTACHMENT0, _gl.TEXTURE_2D, webglTexture, 0 ); } _gl.blitFramebuffer( 0, 0, width, height, 0, 0, width, height, mask, _gl.NEAREST ); if ( supportsInvalidateFramebuffer === true ) { invalidationArrayRead.length = 0; invalidationArrayDraw.length = 0; invalidationArrayRead.push( _gl.COLOR_ATTACHMENT0 + i ); if ( renderTarget.depthBuffer && renderTarget.resolveDepthBuffer === false ) { invalidationArrayRead.push( depthStyle ); invalidationArrayDraw.push( depthStyle ); _gl.invalidateFramebuffer( _gl.DRAW_FRAMEBUFFER, invalidationArrayDraw ); } _gl.invalidateFramebuffer( _gl.READ_FRAMEBUFFER, invalidationArrayRead ); } } state.bindFramebuffer( _gl.READ_FRAMEBUFFER, null ); state.bindFramebuffer( _gl.DRAW_FRAMEBUFFER, null ); // If MRT since pre-blit we removed the FBO we need to reconstruct the attachments if ( isMultipleRenderTargets ) { for ( let i = 0; i < textures.length; i ++ ) { state.bindFramebuffer( _gl.FRAMEBUFFER, renderTargetProperties.__webglMultisampledFramebuffer ); _gl.framebufferRenderbuffer( _gl.FRAMEBUFFER, _gl.COLOR_ATTACHMENT0 + i, _gl.RENDERBUFFER, renderTargetProperties.__webglColorRenderbuffer[ i ] ); const webglTexture = properties.get( textures[ i ] ).__webglTexture; state.bindFramebuffer( _gl.FRAMEBUFFER, renderTargetProperties.__webglFramebuffer ); _gl.framebufferTexture2D( _gl.DRAW_FRAMEBUFFER, _gl.COLOR_ATTACHMENT0 + i, _gl.TEXTURE_2D, webglTexture, 0 ); } } state.bindFramebuffer( _gl.DRAW_FRAMEBUFFER, renderTargetProperties.__webglMultisampledFramebuffer ); } else { if ( renderTarget.depthBuffer && renderTarget.resolveDepthBuffer === false && supportsInvalidateFramebuffer ) { const depthStyle = renderTarget.stencilBuffer ? _gl.DEPTH_STENCIL_ATTACHMENT : _gl.DEPTH_ATTACHMENT; _gl.invalidateFramebuffer( _gl.DRAW_FRAMEBUFFER, [ depthStyle ] ); } } } } function getRenderTargetSamples( renderTarget ) { return Math.min( capabilities.maxSamples, renderTarget.samples ); } function useMultisampledRTT( renderTarget ) { const renderTargetProperties = properties.get( renderTarget ); return renderTarget.samples > 0 && extensions.has( 'WEBGL_multisampled_render_to_texture' ) === true && renderTargetProperties.__useRenderToTexture !== false; } function updateVideoTexture( texture ) { const frame = info.render.frame; // Check the last frame we updated the VideoTexture if ( _videoTextures.get( texture ) !== frame ) { _videoTextures.set( texture, frame ); texture.update(); } } function verifyColorSpace( texture, image ) { const colorSpace = texture.colorSpace; const format = texture.format; const type = texture.type; if ( texture.isCompressedTexture === true || texture.isVideoTexture === true ) return image; if ( colorSpace !== LinearSRGBColorSpace && colorSpace !== NoColorSpace ) { // sRGB if ( ColorManagement.getTransfer( colorSpace ) === SRGBTransfer ) { // in WebGL 2 uncompressed textures can only be sRGB encoded if they have the RGBA8 format if ( format !== RGBAFormat || type !== UnsignedByteType ) { console.warn( 'THREE.WebGLTextures: sRGB encoded textures have to use RGBAFormat and UnsignedByteType.' ); } } else { console.error( 'THREE.WebGLTextures: Unsupported texture color space:', colorSpace ); } } return image; } function getDimensions( image ) { if ( typeof HTMLImageElement !== 'undefined' && image instanceof HTMLImageElement ) { // if intrinsic data are not available, fallback to width/height _imageDimensions.width = image.naturalWidth || image.width; _imageDimensions.height = image.naturalHeight || image.height; } else if ( typeof VideoFrame !== 'undefined' && image instanceof VideoFrame ) { _imageDimensions.width = image.displayWidth; _imageDimensions.height = image.displayHeight; } else { _imageDimensions.width = image.width; _imageDimensions.height = image.height; } return _imageDimensions; } // this.allocateTextureUnit = allocateTextureUnit; this.resetTextureUnits = resetTextureUnits; this.setTexture2D = setTexture2D; this.setTexture2DArray = setTexture2DArray; this.setTexture3D = setTexture3D; this.setTextureCube = setTextureCube; this.rebindTextures = rebindTextures; this.setupRenderTarget = setupRenderTarget; this.updateRenderTargetMipmap = updateRenderTargetMipmap; this.updateMultisampleRenderTarget = updateMultisampleRenderTarget; this.setupDepthRenderbuffer = setupDepthRenderbuffer; this.setupFrameBufferTexture = setupFrameBufferTexture; this.useMultisampledRTT = useMultisampledRTT; } function WebGLUtils( gl, extensions ) { function convert( p, colorSpace = NoColorSpace ) { let extension; const transfer = ColorManagement.getTransfer( colorSpace ); if ( p === UnsignedByteType ) return gl.UNSIGNED_BYTE; if ( p === UnsignedShort4444Type ) return gl.UNSIGNED_SHORT_4_4_4_4; if ( p === UnsignedShort5551Type ) return gl.UNSIGNED_SHORT_5_5_5_1; if ( p === UnsignedInt5999Type ) return gl.UNSIGNED_INT_5_9_9_9_REV; if ( p === ByteType ) return gl.BYTE; if ( p === ShortType ) return gl.SHORT; if ( p === UnsignedShortType ) return gl.UNSIGNED_SHORT; if ( p === IntType ) return gl.INT; if ( p === UnsignedIntType ) return gl.UNSIGNED_INT; if ( p === FloatType ) return gl.FLOAT; if ( p === HalfFloatType ) return gl.HALF_FLOAT; if ( p === AlphaFormat ) return gl.ALPHA; if ( p === RGBFormat ) return gl.RGB; if ( p === RGBAFormat ) return gl.RGBA; if ( p === LuminanceFormat ) return gl.LUMINANCE; if ( p === LuminanceAlphaFormat ) return gl.LUMINANCE_ALPHA; if ( p === DepthFormat ) return gl.DEPTH_COMPONENT; if ( p === DepthStencilFormat ) return gl.DEPTH_STENCIL; // WebGL2 formats. if ( p === RedFormat ) return gl.RED; if ( p === RedIntegerFormat ) return gl.RED_INTEGER; if ( p === RGFormat ) return gl.RG; if ( p === RGIntegerFormat ) return gl.RG_INTEGER; if ( p === RGBAIntegerFormat ) return gl.RGBA_INTEGER; // S3TC if ( p === RGB_S3TC_DXT1_Format || p === RGBA_S3TC_DXT1_Format || p === RGBA_S3TC_DXT3_Format || p === RGBA_S3TC_DXT5_Format ) { if ( transfer === SRGBTransfer ) { extension = extensions.get( 'WEBGL_compressed_texture_s3tc_srgb' ); if ( extension !== null ) { if ( p === RGB_S3TC_DXT1_Format ) return extension.COMPRESSED_SRGB_S3TC_DXT1_EXT; if ( p === RGBA_S3TC_DXT1_Format ) return extension.COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT; if ( p === RGBA_S3TC_DXT3_Format ) return extension.COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT; if ( p === RGBA_S3TC_DXT5_Format ) return extension.COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT; } else { return null; } } else { extension = extensions.get( 'WEBGL_compressed_texture_s3tc' ); if ( extension !== null ) { if ( p === RGB_S3TC_DXT1_Format ) return extension.COMPRESSED_RGB_S3TC_DXT1_EXT; if ( p === RGBA_S3TC_DXT1_Format ) return extension.COMPRESSED_RGBA_S3TC_DXT1_EXT; if ( p === RGBA_S3TC_DXT3_Format ) return extension.COMPRESSED_RGBA_S3TC_DXT3_EXT; if ( p === RGBA_S3TC_DXT5_Format ) return extension.COMPRESSED_RGBA_S3TC_DXT5_EXT; } else { return null; } } } // PVRTC if ( p === RGB_PVRTC_4BPPV1_Format || p === RGB_PVRTC_2BPPV1_Format || p === RGBA_PVRTC_4BPPV1_Format || p === RGBA_PVRTC_2BPPV1_Format ) { extension = extensions.get( 'WEBGL_compressed_texture_pvrtc' ); if ( extension !== null ) { if ( p === RGB_PVRTC_4BPPV1_Format ) return extension.COMPRESSED_RGB_PVRTC_4BPPV1_IMG; if ( p === RGB_PVRTC_2BPPV1_Format ) return extension.COMPRESSED_RGB_PVRTC_2BPPV1_IMG; if ( p === RGBA_PVRTC_4BPPV1_Format ) return extension.COMPRESSED_RGBA_PVRTC_4BPPV1_IMG; if ( p === RGBA_PVRTC_2BPPV1_Format ) return extension.COMPRESSED_RGBA_PVRTC_2BPPV1_IMG; } else { return null; } } // ETC if ( p === RGB_ETC1_Format || p === RGB_ETC2_Format || p === RGBA_ETC2_EAC_Format ) { extension = extensions.get( 'WEBGL_compressed_texture_etc' ); if ( extension !== null ) { if ( p === RGB_ETC1_Format || p === RGB_ETC2_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB8_ETC2 : extension.COMPRESSED_RGB8_ETC2; if ( p === RGBA_ETC2_EAC_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB8_ALPHA8_ETC2_EAC : extension.COMPRESSED_RGBA8_ETC2_EAC; } else { return null; } } // ASTC if ( p === RGBA_ASTC_4x4_Format || p === RGBA_ASTC_5x4_Format || p === RGBA_ASTC_5x5_Format || p === RGBA_ASTC_6x5_Format || p === RGBA_ASTC_6x6_Format || p === RGBA_ASTC_8x5_Format || p === RGBA_ASTC_8x6_Format || p === RGBA_ASTC_8x8_Format || p === RGBA_ASTC_10x5_Format || p === RGBA_ASTC_10x6_Format || p === RGBA_ASTC_10x8_Format || p === RGBA_ASTC_10x10_Format || p === RGBA_ASTC_12x10_Format || p === RGBA_ASTC_12x12_Format ) { extension = extensions.get( 'WEBGL_compressed_texture_astc' ); if ( extension !== null ) { if ( p === RGBA_ASTC_4x4_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR : extension.COMPRESSED_RGBA_ASTC_4x4_KHR; if ( p === RGBA_ASTC_5x4_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR : extension.COMPRESSED_RGBA_ASTC_5x4_KHR; if ( p === RGBA_ASTC_5x5_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR : extension.COMPRESSED_RGBA_ASTC_5x5_KHR; if ( p === RGBA_ASTC_6x5_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR : extension.COMPRESSED_RGBA_ASTC_6x5_KHR; if ( p === RGBA_ASTC_6x6_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR : extension.COMPRESSED_RGBA_ASTC_6x6_KHR; if ( p === RGBA_ASTC_8x5_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR : extension.COMPRESSED_RGBA_ASTC_8x5_KHR; if ( p === RGBA_ASTC_8x6_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR : extension.COMPRESSED_RGBA_ASTC_8x6_KHR; if ( p === RGBA_ASTC_8x8_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR : extension.COMPRESSED_RGBA_ASTC_8x8_KHR; if ( p === RGBA_ASTC_10x5_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR : extension.COMPRESSED_RGBA_ASTC_10x5_KHR; if ( p === RGBA_ASTC_10x6_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR : extension.COMPRESSED_RGBA_ASTC_10x6_KHR; if ( p === RGBA_ASTC_10x8_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR : extension.COMPRESSED_RGBA_ASTC_10x8_KHR; if ( p === RGBA_ASTC_10x10_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR : extension.COMPRESSED_RGBA_ASTC_10x10_KHR; if ( p === RGBA_ASTC_12x10_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR : extension.COMPRESSED_RGBA_ASTC_12x10_KHR; if ( p === RGBA_ASTC_12x12_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR : extension.COMPRESSED_RGBA_ASTC_12x12_KHR; } else { return null; } } // BPTC if ( p === RGBA_BPTC_Format || p === RGB_BPTC_SIGNED_Format || p === RGB_BPTC_UNSIGNED_Format ) { extension = extensions.get( 'EXT_texture_compression_bptc' ); if ( extension !== null ) { if ( p === RGBA_BPTC_Format ) return ( transfer === SRGBTransfer ) ? extension.COMPRESSED_SRGB_ALPHA_BPTC_UNORM_EXT : extension.COMPRESSED_RGBA_BPTC_UNORM_EXT; if ( p === RGB_BPTC_SIGNED_Format ) return extension.COMPRESSED_RGB_BPTC_SIGNED_FLOAT_EXT; if ( p === RGB_BPTC_UNSIGNED_Format ) return extension.COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT_EXT; } else { return null; } } // RGTC if ( p === RED_RGTC1_Format || p === SIGNED_RED_RGTC1_Format || p === RED_GREEN_RGTC2_Format || p === SIGNED_RED_GREEN_RGTC2_Format ) { extension = extensions.get( 'EXT_texture_compression_rgtc' ); if ( extension !== null ) { if ( p === RGBA_BPTC_Format ) return extension.COMPRESSED_RED_RGTC1_EXT; if ( p === SIGNED_RED_RGTC1_Format ) return extension.COMPRESSED_SIGNED_RED_RGTC1_EXT; if ( p === RED_GREEN_RGTC2_Format ) return extension.COMPRESSED_RED_GREEN_RGTC2_EXT; if ( p === SIGNED_RED_GREEN_RGTC2_Format ) return extension.COMPRESSED_SIGNED_RED_GREEN_RGTC2_EXT; } else { return null; } } // if ( p === UnsignedInt248Type ) return gl.UNSIGNED_INT_24_8; // if "p" can't be resolved, assume the user defines a WebGL constant as a string (fallback/workaround for packed RGB formats) return ( gl[ p ] !== undefined ) ? gl[ p ] : null; } return { convert: convert }; } const _occlusion_vertex = ` void main() { gl_Position = vec4( position, 1.0 ); }`; const _occlusion_fragment = ` uniform sampler2DArray depthColor; uniform float depthWidth; uniform float depthHeight; void main() { vec2 coord = vec2( gl_FragCoord.x / depthWidth, gl_FragCoord.y / depthHeight ); if ( coord.x >= 1.0 ) { gl_FragDepth = texture( depthColor, vec3( coord.x - 1.0, coord.y, 1 ) ).r; } else { gl_FragDepth = texture( depthColor, vec3( coord.x, coord.y, 0 ) ).r; } }`; /** * A XR module that manages the access to the Depth Sensing API. */ class WebXRDepthSensing { /** * Constructs a new depth sensing module. */ constructor() { /** * A texture representing the depth of the user's environment. * * @type {?Texture} */ this.texture = null; /** * A plane mesh for visualizing the depth texture. * * @type {?Mesh} */ this.mesh = null; /** * The depth near value. * * @type {number} */ this.depthNear = 0; /** * The depth near far. * * @type {number} */ this.depthFar = 0; } /** * Inits the depth sensing module * * @param {WebGLRenderer} renderer - The renderer. * @param {XRWebGLDepthInformation} depthData - The XR depth data. * @param {XRRenderState} renderState - The XR render state. */ init( renderer, depthData, renderState ) { if ( this.texture === null ) { const texture = new Texture(); const texProps = renderer.properties.get( texture ); texProps.__webglTexture = depthData.texture; if ( ( depthData.depthNear !== renderState.depthNear ) || ( depthData.depthFar !== renderState.depthFar ) ) { this.depthNear = depthData.depthNear; this.depthFar = depthData.depthFar; } this.texture = texture; } } /** * Returns a plane mesh that visualizes the depth texture. * * @param {ArrayCamera} cameraXR - The XR camera. * @return {?Mesh} The plane mesh. */ getMesh( cameraXR ) { if ( this.texture !== null ) { if ( this.mesh === null ) { const viewport = cameraXR.cameras[ 0 ].viewport; const material = new ShaderMaterial( { vertexShader: _occlusion_vertex, fragmentShader: _occlusion_fragment, uniforms: { depthColor: { value: this.texture }, depthWidth: { value: viewport.z }, depthHeight: { value: viewport.w } } } ); this.mesh = new Mesh( new PlaneGeometry( 20, 20 ), material ); } } return this.mesh; } /** * Resets the module */ reset() { this.texture = null; this.mesh = null; } /** * Returns a texture representing the depth of the user's environment. * * @return {?Texture} The depth texture. */ getDepthTexture() { return this.texture; } } /** * This class represents an abstraction of the WebXR Device API and is * internally used by {@link WebGLRenderer}. `WebXRManager` also provides a public * interface that allows users to enable/disable XR and perform XR related * tasks like for instance retrieving controllers. * * @augments EventDispatcher * @hideconstructor */ class WebXRManager extends EventDispatcher { /** * Constructs a new WebGL renderer. * * @param {WebGLRenderer} renderer - The renderer. * @param {WebGL2RenderingContext} gl - The rendering context. */ constructor( renderer, gl ) { super(); const scope = this; let session = null; let framebufferScaleFactor = 1.0; let referenceSpace = null; let referenceSpaceType = 'local-floor'; // Set default foveation to maximum. let foveation = 1.0; let customReferenceSpace = null; let pose = null; let glBinding = null; let glProjLayer = null; let glBaseLayer = null; let xrFrame = null; const depthSensing = new WebXRDepthSensing(); const attributes = gl.getContextAttributes(); let initialRenderTarget = null; let newRenderTarget = null; const controllers = []; const controllerInputSources = []; const currentSize = new Vector2(); let currentPixelRatio = null; // const cameraL = new PerspectiveCamera(); cameraL.viewport = new Vector4(); const cameraR = new PerspectiveCamera(); cameraR.viewport = new Vector4(); const cameras = [ cameraL, cameraR ]; const cameraXR = new ArrayCamera(); let _currentDepthNear = null; let _currentDepthFar = null; // /** * Whether the manager's XR camera should be automatically updated or not. * * @type {boolean} * @default true */ this.cameraAutoUpdate = true; /** * This flag notifies the renderer to be ready for XR rendering. Set it to `true` * if you are going to use XR in your app. * * @type {boolean} * @default false */ this.enabled = false; /** * Whether XR presentation is active or not. * * @type {boolean} * @readonly * @default false */ this.isPresenting = false; /** * Returns a group representing the `target ray` space of the XR controller. * Use this space for visualizing 3D objects that support the user in pointing * tasks like UI interaction. * * @param {number} index - The index of the controller. * @return {Group} A group representing the `target ray` space. */ this.getController = function ( index ) { let controller = controllers[ index ]; if ( controller === undefined ) { controller = new WebXRController(); controllers[ index ] = controller; } return controller.getTargetRaySpace(); }; /** * Returns a group representing the `grip` space of the XR controller. * Use this space for visualizing 3D objects that support the user in pointing * tasks like UI interaction. * * Note: If you want to show something in the user's hand AND offer a * pointing ray at the same time, you'll want to attached the handheld object * to the group returned by `getControllerGrip()` and the ray to the * group returned by `getController()`. The idea is to have two * different groups in two different coordinate spaces for the same WebXR * controller. * * @param {number} index - The index of the controller. * @return {Group} A group representing the `grip` space. */ this.getControllerGrip = function ( index ) { let controller = controllers[ index ]; if ( controller === undefined ) { controller = new WebXRController(); controllers[ index ] = controller; } return controller.getGripSpace(); }; /** * Returns a group representing the `hand` space of the XR controller. * Use this space for visualizing 3D objects that support the user in pointing * tasks like UI interaction. * * @param {number} index - The index of the controller. * @return {Group} A group representing the `hand` space. */ this.getHand = function ( index ) { let controller = controllers[ index ]; if ( controller === undefined ) { controller = new WebXRController(); controllers[ index ] = controller; } return controller.getHandSpace(); }; // function onSessionEvent( event ) { const controllerIndex = controllerInputSources.indexOf( event.inputSource ); if ( controllerIndex === -1 ) { return; } const controller = controllers[ controllerIndex ]; if ( controller !== undefined ) { controller.update( event.inputSource, event.frame, customReferenceSpace || referenceSpace ); controller.dispatchEvent( { type: event.type, data: event.inputSource } ); } } function onSessionEnd() { session.removeEventListener( 'select', onSessionEvent ); session.removeEventListener( 'selectstart', onSessionEvent ); session.removeEventListener( 'selectend', onSessionEvent ); session.removeEventListener( 'squeeze', onSessionEvent ); session.removeEventListener( 'squeezestart', onSessionEvent ); session.removeEventListener( 'squeezeend', onSessionEvent ); session.removeEventListener( 'end', onSessionEnd ); session.removeEventListener( 'inputsourceschange', onInputSourcesChange ); for ( let i = 0; i < controllers.length; i ++ ) { const inputSource = controllerInputSources[ i ]; if ( inputSource === null ) continue; controllerInputSources[ i ] = null; controllers[ i ].disconnect( inputSource ); } _currentDepthNear = null; _currentDepthFar = null; depthSensing.reset(); // restore framebuffer/rendering state renderer.setRenderTarget( initialRenderTarget ); glBaseLayer = null; glProjLayer = null; glBinding = null; session = null; newRenderTarget = null; // animation.stop(); scope.isPresenting = false; renderer.setPixelRatio( currentPixelRatio ); renderer.setSize( currentSize.width, currentSize.height, false ); scope.dispatchEvent( { type: 'sessionend' } ); } /** * Sets the framebuffer scale factor. * * This method can not be used during a XR session. * * @param {number} value - The framebuffer scale factor. */ this.setFramebufferScaleFactor = function ( value ) { framebufferScaleFactor = value; if ( scope.isPresenting === true ) { console.warn( 'THREE.WebXRManager: Cannot change framebuffer scale while presenting.' ); } }; /** * Sets the reference space type. Can be used to configure a spatial relationship with the user's physical * environment. Depending on how the user moves in 3D space, setting an appropriate reference space can * improve tracking. Default is `local-floor`. * * This method can not be used during a XR session. * * @param {string} value - The reference space type. */ this.setReferenceSpaceType = function ( value ) { referenceSpaceType = value; if ( scope.isPresenting === true ) { console.warn( 'THREE.WebXRManager: Cannot change reference space type while presenting.' ); } }; /** * Returns the XR reference space. * * @return {XRReferenceSpace} The XR reference space. */ this.getReferenceSpace = function () { return customReferenceSpace || referenceSpace; }; /** * Sets a custom XR reference space. * * @param {XRReferenceSpace} space - The XR reference space. */ this.setReferenceSpace = function ( space ) { customReferenceSpace = space; }; /** * Returns the current base layer. * * @return {?(XRWebGLLayer|XRProjectionLayer)} The XR base layer. */ this.getBaseLayer = function () { return glProjLayer !== null ? glProjLayer : glBaseLayer; }; /** * Returns the current XR binding. * * @return {?XRWebGLBinding} The XR binding. */ this.getBinding = function () { return glBinding; }; /** * Returns the current XR frame. * * @return {?XRFrame} The XR frame. Returns `null` when used outside a XR session. */ this.getFrame = function () { return xrFrame; }; /** * Returns the current XR session. * * @return {?XRSession} The XR session. Returns `null` when used outside a XR session. */ this.getSession = function () { return session; }; /** * After a XR session has been requested usually with one of the `*Button` modules, it * is injected into the renderer with this method. This method triggers the start of * the actual XR rendering. * * @async * @param {XRSession} value - The XR session to set. * @return {Promise} A Promise that resolves when the session has been set. */ this.setSession = async function ( value ) { session = value; if ( session !== null ) { initialRenderTarget = renderer.getRenderTarget(); session.addEventListener( 'select', onSessionEvent ); session.addEventListener( 'selectstart', onSessionEvent ); session.addEventListener( 'selectend', onSessionEvent ); session.addEventListener( 'squeeze', onSessionEvent ); session.addEventListener( 'squeezestart', onSessionEvent ); session.addEventListener( 'squeezeend', onSessionEvent ); session.addEventListener( 'end', onSessionEnd ); session.addEventListener( 'inputsourceschange', onInputSourcesChange ); if ( attributes.xrCompatible !== true ) { await gl.makeXRCompatible(); } currentPixelRatio = renderer.getPixelRatio(); renderer.getSize( currentSize ); // Check that the browser implements the necessary APIs to use an // XRProjectionLayer rather than an XRWebGLLayer const useLayers = typeof XRWebGLBinding !== 'undefined' && 'createProjectionLayer' in XRWebGLBinding.prototype; if ( ! useLayers ) { const layerInit = { antialias: attributes.antialias, alpha: true, depth: attributes.depth, stencil: attributes.stencil, framebufferScaleFactor: framebufferScaleFactor }; glBaseLayer = new XRWebGLLayer( session, gl, layerInit ); session.updateRenderState( { baseLayer: glBaseLayer } ); renderer.setPixelRatio( 1 ); renderer.setSize( glBaseLayer.framebufferWidth, glBaseLayer.framebufferHeight, false ); newRenderTarget = new WebGLRenderTarget( glBaseLayer.framebufferWidth, glBaseLayer.framebufferHeight, { format: RGBAFormat, type: UnsignedByteType, colorSpace: renderer.outputColorSpace, stencilBuffer: attributes.stencil, resolveDepthBuffer: ( glBaseLayer.ignoreDepthValues === false ), resolveStencilBuffer: ( glBaseLayer.ignoreDepthValues === false ) } ); } else { let depthFormat = null; let depthType = null; let glDepthFormat = null; if ( attributes.depth ) { glDepthFormat = attributes.stencil ? gl.DEPTH24_STENCIL8 : gl.DEPTH_COMPONENT24; depthFormat = attributes.stencil ? DepthStencilFormat : DepthFormat; depthType = attributes.stencil ? UnsignedInt248Type : UnsignedIntType; } const projectionlayerInit = { colorFormat: gl.RGBA8, depthFormat: glDepthFormat, scaleFactor: framebufferScaleFactor }; glBinding = new XRWebGLBinding( session, gl ); glProjLayer = glBinding.createProjectionLayer( projectionlayerInit ); session.updateRenderState( { layers: [ glProjLayer ] } ); renderer.setPixelRatio( 1 ); renderer.setSize( glProjLayer.textureWidth, glProjLayer.textureHeight, false ); newRenderTarget = new WebGLRenderTarget( glProjLayer.textureWidth, glProjLayer.textureHeight, { format: RGBAFormat, type: UnsignedByteType, depthTexture: new DepthTexture( glProjLayer.textureWidth, glProjLayer.textureHeight, depthType, undefined, undefined, undefined, undefined, undefined, undefined, depthFormat ), stencilBuffer: attributes.stencil, colorSpace: renderer.outputColorSpace, samples: attributes.antialias ? 4 : 0, resolveDepthBuffer: ( glProjLayer.ignoreDepthValues === false ), resolveStencilBuffer: ( glProjLayer.ignoreDepthValues === false ) } ); } newRenderTarget.isXRRenderTarget = true; // TODO Remove this when possible, see #23278 this.setFoveation( foveation ); customReferenceSpace = null; referenceSpace = await session.requestReferenceSpace( referenceSpaceType ); animation.setContext( session ); animation.start(); scope.isPresenting = true; scope.dispatchEvent( { type: 'sessionstart' } ); } }; /** * Returns the environment blend mode from the current XR session. * * @return {'opaque'|'additive'|'alpha-blend'|undefined} The environment blend mode. Returns `undefined` when used outside of a XR session. */ this.getEnvironmentBlendMode = function () { if ( session !== null ) { return session.environmentBlendMode; } }; /** * Returns the current depth texture computed via depth sensing. * * @return {?Texture} The depth texture. */ this.getDepthTexture = function () { return depthSensing.getDepthTexture(); }; function onInputSourcesChange( event ) { // Notify disconnected for ( let i = 0; i < event.removed.length; i ++ ) { const inputSource = event.removed[ i ]; const index = controllerInputSources.indexOf( inputSource ); if ( index >= 0 ) { controllerInputSources[ index ] = null; controllers[ index ].disconnect( inputSource ); } } // Notify connected for ( let i = 0; i < event.added.length; i ++ ) { const inputSource = event.added[ i ]; let controllerIndex = controllerInputSources.indexOf( inputSource ); if ( controllerIndex === -1 ) { // Assign input source a controller that currently has no input source for ( let i = 0; i < controllers.length; i ++ ) { if ( i >= controllerInputSources.length ) { controllerInputSources.push( inputSource ); controllerIndex = i; break; } else if ( controllerInputSources[ i ] === null ) { controllerInputSources[ i ] = inputSource; controllerIndex = i; break; } } // If all controllers do currently receive input we ignore new ones if ( controllerIndex === -1 ) break; } const controller = controllers[ controllerIndex ]; if ( controller ) { controller.connect( inputSource ); } } } // const cameraLPos = new Vector3(); const cameraRPos = new Vector3(); /** * Assumes 2 cameras that are parallel and share an X-axis, and that * the cameras' projection and world matrices have already been set. * And that near and far planes are identical for both cameras. * Visualization of this technique: https://computergraphics.stackexchange.com/a/4765 * * @param {ArrayCamera} camera - The camera to update. * @param {PerspectiveCamera} cameraL - The left camera. * @param {PerspectiveCamera} cameraR - The right camera. */ function setProjectionFromUnion( camera, cameraL, cameraR ) { cameraLPos.setFromMatrixPosition( cameraL.matrixWorld ); cameraRPos.setFromMatrixPosition( cameraR.matrixWorld ); const ipd = cameraLPos.distanceTo( cameraRPos ); const projL = cameraL.projectionMatrix.elements; const projR = cameraR.projectionMatrix.elements; // VR systems will have identical far and near planes, and // most likely identical top and bottom frustum extents. // Use the left camera for these values. const near = projL[ 14 ] / ( projL[ 10 ] - 1 ); const far = projL[ 14 ] / ( projL[ 10 ] + 1 ); const topFov = ( projL[ 9 ] + 1 ) / projL[ 5 ]; const bottomFov = ( projL[ 9 ] - 1 ) / projL[ 5 ]; const leftFov = ( projL[ 8 ] - 1 ) / projL[ 0 ]; const rightFov = ( projR[ 8 ] + 1 ) / projR[ 0 ]; const left = near * leftFov; const right = near * rightFov; // Calculate the new camera's position offset from the // left camera. xOffset should be roughly half `ipd`. const zOffset = ipd / ( - leftFov + rightFov ); const xOffset = zOffset * - leftFov; // TODO: Better way to apply this offset? cameraL.matrixWorld.decompose( camera.position, camera.quaternion, camera.scale ); camera.translateX( xOffset ); camera.translateZ( zOffset ); camera.matrixWorld.compose( camera.position, camera.quaternion, camera.scale ); camera.matrixWorldInverse.copy( camera.matrixWorld ).invert(); // Check if the projection uses an infinite far plane. if ( projL[ 10 ] === -1 ) { // Use the projection matrix from the left eye. // The camera offset is sufficient to include the view volumes // of both eyes (assuming symmetric projections). camera.projectionMatrix.copy( cameraL.projectionMatrix ); camera.projectionMatrixInverse.copy( cameraL.projectionMatrixInverse ); } else { // Find the union of the frustum values of the cameras and scale // the values so that the near plane's position does not change in world space, // although must now be relative to the new union camera. const near2 = near + zOffset; const far2 = far + zOffset; const left2 = left - xOffset; const right2 = right + ( ipd - xOffset ); const top2 = topFov * far / far2 * near2; const bottom2 = bottomFov * far / far2 * near2; camera.projectionMatrix.makePerspective( left2, right2, top2, bottom2, near2, far2 ); camera.projectionMatrixInverse.copy( camera.projectionMatrix ).invert(); } } function updateCamera( camera, parent ) { if ( parent === null ) { camera.matrixWorld.copy( camera.matrix ); } else { camera.matrixWorld.multiplyMatrices( parent.matrixWorld, camera.matrix ); } camera.matrixWorldInverse.copy( camera.matrixWorld ).invert(); } /** * Updates the state of the XR camera. Use this method on app level if you * set cameraAutoUpdate` to `false`. The method requires the non-XR * camera of the scene as a parameter. The passed in camera's transformation * is automatically adjusted to the position of the XR camera when calling * this method. * * @param {Camera} camera - The camera. */ this.updateCamera = function ( camera ) { if ( session === null ) return; let depthNear = camera.near; let depthFar = camera.far; if ( depthSensing.texture !== null ) { if ( depthSensing.depthNear > 0 ) depthNear = depthSensing.depthNear; if ( depthSensing.depthFar > 0 ) depthFar = depthSensing.depthFar; } cameraXR.near = cameraR.near = cameraL.near = depthNear; cameraXR.far = cameraR.far = cameraL.far = depthFar; if ( _currentDepthNear !== cameraXR.near || _currentDepthFar !== cameraXR.far ) { // Note that the new renderState won't apply until the next frame. See #18320 session.updateRenderState( { depthNear: cameraXR.near, depthFar: cameraXR.far } ); _currentDepthNear = cameraXR.near; _currentDepthFar = cameraXR.far; } cameraL.layers.mask = camera.layers.mask | 0b010; cameraR.layers.mask = camera.layers.mask | 0b100; cameraXR.layers.mask = cameraL.layers.mask | cameraR.layers.mask; const parent = camera.parent; const cameras = cameraXR.cameras; updateCamera( cameraXR, parent ); for ( let i = 0; i < cameras.length; i ++ ) { updateCamera( cameras[ i ], parent ); } // update projection matrix for proper view frustum culling if ( cameras.length === 2 ) { setProjectionFromUnion( cameraXR, cameraL, cameraR ); } else { // assume single camera setup (AR) cameraXR.projectionMatrix.copy( cameraL.projectionMatrix ); } // update user camera and its children updateUserCamera( camera, cameraXR, parent ); }; function updateUserCamera( camera, cameraXR, parent ) { if ( parent === null ) { camera.matrix.copy( cameraXR.matrixWorld ); } else { camera.matrix.copy( parent.matrixWorld ); camera.matrix.invert(); camera.matrix.multiply( cameraXR.matrixWorld ); } camera.matrix.decompose( camera.position, camera.quaternion, camera.scale ); camera.updateMatrixWorld( true ); camera.projectionMatrix.copy( cameraXR.projectionMatrix ); camera.projectionMatrixInverse.copy( cameraXR.projectionMatrixInverse ); if ( camera.isPerspectiveCamera ) { camera.fov = RAD2DEG * 2 * Math.atan( 1 / camera.projectionMatrix.elements[ 5 ] ); camera.zoom = 1; } } /** * Returns an instance of {@link ArrayCamera} which represents the XR camera * of the active XR session. For each view it holds a separate camera object. * * The camera's `fov` is currently not used and does not reflect the fov of * the XR camera. If you need the fov on app level, you have to compute in * manually from the XR camera's projection matrices. * * @return {ArrayCamera} The XR camera. */ this.getCamera = function () { return cameraXR; }; /** * Returns the amount of foveation used by the XR compositor for the projection layer. * * @return {number} The amount of foveation. */ this.getFoveation = function () { if ( glProjLayer === null && glBaseLayer === null ) { return undefined; } return foveation; }; /** * Sets the foveation value. * * @param {number} value - A number in the range `[0,1]` where `0` means no foveation (full resolution) * and `1` means maximum foveation (the edges render at lower resolution). */ this.setFoveation = function ( value ) { // 0 = no foveation = full resolution // 1 = maximum foveation = the edges render at lower resolution foveation = value; if ( glProjLayer !== null ) { glProjLayer.fixedFoveation = value; } if ( glBaseLayer !== null && glBaseLayer.fixedFoveation !== undefined ) { glBaseLayer.fixedFoveation = value; } }; /** * Returns `true` if depth sensing is supported. * * @return {boolean} Whether depth sensing is supported or not. */ this.hasDepthSensing = function () { return depthSensing.texture !== null; }; /** * Returns the depth sensing mesh. * * @return {Mesh} The depth sensing mesh. */ this.getDepthSensingMesh = function () { return depthSensing.getMesh( cameraXR ); }; // Animation Loop let onAnimationFrameCallback = null; function onAnimationFrame( time, frame ) { pose = frame.getViewerPose( customReferenceSpace || referenceSpace ); xrFrame = frame; if ( pose !== null ) { const views = pose.views; if ( glBaseLayer !== null ) { renderer.setRenderTargetFramebuffer( newRenderTarget, glBaseLayer.framebuffer ); renderer.setRenderTarget( newRenderTarget ); } let cameraXRNeedsUpdate = false; // check if it's necessary to rebuild cameraXR's camera list if ( views.length !== cameraXR.cameras.length ) { cameraXR.cameras.length = 0; cameraXRNeedsUpdate = true; } for ( let i = 0; i < views.length; i ++ ) { const view = views[ i ]; let viewport = null; if ( glBaseLayer !== null ) { viewport = glBaseLayer.getViewport( view ); } else { const glSubImage = glBinding.getViewSubImage( glProjLayer, view ); viewport = glSubImage.viewport; // For side-by-side projection, we only produce a single texture for both eyes. if ( i === 0 ) { renderer.setRenderTargetTextures( newRenderTarget, glSubImage.colorTexture, glSubImage.depthStencilTexture ); renderer.setRenderTarget( newRenderTarget ); } } let camera = cameras[ i ]; if ( camera === undefined ) { camera = new PerspectiveCamera(); camera.layers.enable( i ); camera.viewport = new Vector4(); cameras[ i ] = camera; } camera.matrix.fromArray( view.transform.matrix ); camera.matrix.decompose( camera.position, camera.quaternion, camera.scale ); camera.projectionMatrix.fromArray( view.projectionMatrix ); camera.projectionMatrixInverse.copy( camera.projectionMatrix ).invert(); camera.viewport.set( viewport.x, viewport.y, viewport.width, viewport.height ); if ( i === 0 ) { cameraXR.matrix.copy( camera.matrix ); cameraXR.matrix.decompose( cameraXR.position, cameraXR.quaternion, cameraXR.scale ); } if ( cameraXRNeedsUpdate === true ) { cameraXR.cameras.push( camera ); } } // const enabledFeatures = session.enabledFeatures; const gpuDepthSensingEnabled = enabledFeatures && enabledFeatures.includes( 'depth-sensing' ) && session.depthUsage == 'gpu-optimized'; if ( gpuDepthSensingEnabled && glBinding ) { const depthData = glBinding.getDepthInformation( views[ 0 ] ); if ( depthData && depthData.isValid && depthData.texture ) { depthSensing.init( renderer, depthData, session.renderState ); } } } // for ( let i = 0; i < controllers.length; i ++ ) { const inputSource = controllerInputSources[ i ]; const controller = controllers[ i ]; if ( inputSource !== null && controller !== undefined ) { controller.update( inputSource, frame, customReferenceSpace || referenceSpace ); } } if ( onAnimationFrameCallback ) onAnimationFrameCallback( time, frame ); if ( frame.detectedPlanes ) { scope.dispatchEvent( { type: 'planesdetected', data: frame } ); } xrFrame = null; } const animation = new WebGLAnimation(); animation.setAnimationLoop( onAnimationFrame ); this.setAnimationLoop = function ( callback ) { onAnimationFrameCallback = callback; }; this.dispose = function () {}; } } const _e1 = /*@__PURE__*/ new Euler(); const _m1 = /*@__PURE__*/ new Matrix4(); function WebGLMaterials( renderer, properties ) { function refreshTransformUniform( map, uniform ) { if ( map.matrixAutoUpdate === true ) { map.updateMatrix(); } uniform.value.copy( map.matrix ); } function refreshFogUniforms( uniforms, fog ) { fog.color.getRGB( uniforms.fogColor.value, getUnlitUniformColorSpace( renderer ) ); if ( fog.isFog ) { uniforms.fogNear.value = fog.near; uniforms.fogFar.value = fog.far; } else if ( fog.isFogExp2 ) { uniforms.fogDensity.value = fog.density; } } function refreshMaterialUniforms( uniforms, material, pixelRatio, height, transmissionRenderTarget ) { if ( material.isMeshBasicMaterial ) { refreshUniformsCommon( uniforms, material ); } else if ( material.isMeshLambertMaterial ) { refreshUniformsCommon( uniforms, material ); } else if ( material.isMeshToonMaterial ) { refreshUniformsCommon( uniforms, material ); refreshUniformsToon( uniforms, material ); } else if ( material.isMeshPhongMaterial ) { refreshUniformsCommon( uniforms, material ); refreshUniformsPhong( uniforms, material ); } else if ( material.isMeshStandardMaterial ) { refreshUniformsCommon( uniforms, material ); refreshUniformsStandard( uniforms, material ); if ( material.isMeshPhysicalMaterial ) { refreshUniformsPhysical( uniforms, material, transmissionRenderTarget ); } } else if ( material.isMeshMatcapMaterial ) { refreshUniformsCommon( uniforms, material ); refreshUniformsMatcap( uniforms, material ); } else if ( material.isMeshDepthMaterial ) { refreshUniformsCommon( uniforms, material ); } else if ( material.isMeshDistanceMaterial ) { refreshUniformsCommon( uniforms, material ); refreshUniformsDistance( uniforms, material ); } else if ( material.isMeshNormalMaterial ) { refreshUniformsCommon( uniforms, material ); } else if ( material.isLineBasicMaterial ) { refreshUniformsLine( uniforms, material ); if ( material.isLineDashedMaterial ) { refreshUniformsDash( uniforms, material ); } } else if ( material.isPointsMaterial ) { refreshUniformsPoints( uniforms, material, pixelRatio, height ); } else if ( material.isSpriteMaterial ) { refreshUniformsSprites( uniforms, material ); } else if ( material.isShadowMaterial ) { uniforms.color.value.copy( material.color ); uniforms.opacity.value = material.opacity; } else if ( material.isShaderMaterial ) { material.uniformsNeedUpdate = false; // #15581 } } function refreshUniformsCommon( uniforms, material ) { uniforms.opacity.value = material.opacity; if ( material.color ) { uniforms.diffuse.value.copy( material.color ); } if ( material.emissive ) { uniforms.emissive.value.copy( material.emissive ).multiplyScalar( material.emissiveIntensity ); } if ( material.map ) { uniforms.map.value = material.map; refreshTransformUniform( material.map, uniforms.mapTransform ); } if ( material.alphaMap ) { uniforms.alphaMap.value = material.alphaMap; refreshTransformUniform( material.alphaMap, uniforms.alphaMapTransform ); } if ( material.bumpMap ) { uniforms.bumpMap.value = material.bumpMap; refreshTransformUniform( material.bumpMap, uniforms.bumpMapTransform ); uniforms.bumpScale.value = material.bumpScale; if ( material.side === BackSide ) { uniforms.bumpScale.value *= -1; } } if ( material.normalMap ) { uniforms.normalMap.value = material.normalMap; refreshTransformUniform( material.normalMap, uniforms.normalMapTransform ); uniforms.normalScale.value.copy( material.normalScale ); if ( material.side === BackSide ) { uniforms.normalScale.value.negate(); } } if ( material.displacementMap ) { uniforms.displacementMap.value = material.displacementMap; refreshTransformUniform( material.displacementMap, uniforms.displacementMapTransform ); uniforms.displacementScale.value = material.displacementScale; uniforms.displacementBias.value = material.displacementBias; } if ( material.emissiveMap ) { uniforms.emissiveMap.value = material.emissiveMap; refreshTransformUniform( material.emissiveMap, uniforms.emissiveMapTransform ); } if ( material.specularMap ) { uniforms.specularMap.value = material.specularMap; refreshTransformUniform( material.specularMap, uniforms.specularMapTransform ); } if ( material.alphaTest > 0 ) { uniforms.alphaTest.value = material.alphaTest; } const materialProperties = properties.get( material ); const envMap = materialProperties.envMap; const envMapRotation = materialProperties.envMapRotation; if ( envMap ) { uniforms.envMap.value = envMap; _e1.copy( envMapRotation ); // accommodate left-handed frame _e1.x *= -1; _e1.y *= -1; _e1.z *= -1; if ( envMap.isCubeTexture && envMap.isRenderTargetTexture === false ) { // environment maps which are not cube render targets or PMREMs follow a different convention _e1.y *= -1; _e1.z *= -1; } uniforms.envMapRotation.value.setFromMatrix4( _m1.makeRotationFromEuler( _e1 ) ); uniforms.flipEnvMap.value = ( envMap.isCubeTexture && envMap.isRenderTargetTexture === false ) ? -1 : 1; uniforms.reflectivity.value = material.reflectivity; uniforms.ior.value = material.ior; uniforms.refractionRatio.value = material.refractionRatio; } if ( material.lightMap ) { uniforms.lightMap.value = material.lightMap; uniforms.lightMapIntensity.value = material.lightMapIntensity; refreshTransformUniform( material.lightMap, uniforms.lightMapTransform ); } if ( material.aoMap ) { uniforms.aoMap.value = material.aoMap; uniforms.aoMapIntensity.value = material.aoMapIntensity; refreshTransformUniform( material.aoMap, uniforms.aoMapTransform ); } } function refreshUniformsLine( uniforms, material ) { uniforms.diffuse.value.copy( material.color ); uniforms.opacity.value = material.opacity; if ( material.map ) { uniforms.map.value = material.map; refreshTransformUniform( material.map, uniforms.mapTransform ); } } function refreshUniformsDash( uniforms, material ) { uniforms.dashSize.value = material.dashSize; uniforms.totalSize.value = material.dashSize + material.gapSize; uniforms.scale.value = material.scale; } function refreshUniformsPoints( uniforms, material, pixelRatio, height ) { uniforms.diffuse.value.copy( material.color ); uniforms.opacity.value = material.opacity; uniforms.size.value = material.size * pixelRatio; uniforms.scale.value = height * 0.5; if ( material.map ) { uniforms.map.value = material.map; refreshTransformUniform( material.map, uniforms.uvTransform ); } if ( material.alphaMap ) { uniforms.alphaMap.value = material.alphaMap; refreshTransformUniform( material.alphaMap, uniforms.alphaMapTransform ); } if ( material.alphaTest > 0 ) { uniforms.alphaTest.value = material.alphaTest; } } function refreshUniformsSprites( uniforms, material ) { uniforms.diffuse.value.copy( material.color ); uniforms.opacity.value = material.opacity; uniforms.rotation.value = material.rotation; if ( material.map ) { uniforms.map.value = material.map; refreshTransformUniform( material.map, uniforms.mapTransform ); } if ( material.alphaMap ) { uniforms.alphaMap.value = material.alphaMap; refreshTransformUniform( material.alphaMap, uniforms.alphaMapTransform ); } if ( material.alphaTest > 0 ) { uniforms.alphaTest.value = material.alphaTest; } } function refreshUniformsPhong( uniforms, material ) { uniforms.specular.value.copy( material.specular ); uniforms.shininess.value = Math.max( material.shininess, 1e-4 ); // to prevent pow( 0.0, 0.0 ) } function refreshUniformsToon( uniforms, material ) { if ( material.gradientMap ) { uniforms.gradientMap.value = material.gradientMap; } } function refreshUniformsStandard( uniforms, material ) { uniforms.metalness.value = material.metalness; if ( material.metalnessMap ) { uniforms.metalnessMap.value = material.metalnessMap; refreshTransformUniform( material.metalnessMap, uniforms.metalnessMapTransform ); } uniforms.roughness.value = material.roughness; if ( material.roughnessMap ) { uniforms.roughnessMap.value = material.roughnessMap; refreshTransformUniform( material.roughnessMap, uniforms.roughnessMapTransform ); } if ( material.envMap ) { //uniforms.envMap.value = material.envMap; // part of uniforms common uniforms.envMapIntensity.value = material.envMapIntensity; } } function refreshUniformsPhysical( uniforms, material, transmissionRenderTarget ) { uniforms.ior.value = material.ior; // also part of uniforms common if ( material.sheen > 0 ) { uniforms.sheenColor.value.copy( material.sheenColor ).multiplyScalar( material.sheen ); uniforms.sheenRoughness.value = material.sheenRoughness; if ( material.sheenColorMap ) { uniforms.sheenColorMap.value = material.sheenColorMap; refreshTransformUniform( material.sheenColorMap, uniforms.sheenColorMapTransform ); } if ( material.sheenRoughnessMap ) { uniforms.sheenRoughnessMap.value = material.sheenRoughnessMap; refreshTransformUniform( material.sheenRoughnessMap, uniforms.sheenRoughnessMapTransform ); } } if ( material.clearcoat > 0 ) { uniforms.clearcoat.value = material.clearcoat; uniforms.clearcoatRoughness.value = material.clearcoatRoughness; if ( material.clearcoatMap ) { uniforms.clearcoatMap.value = material.clearcoatMap; refreshTransformUniform( material.clearcoatMap, uniforms.clearcoatMapTransform ); } if ( material.clearcoatRoughnessMap ) { uniforms.clearcoatRoughnessMap.value = material.clearcoatRoughnessMap; refreshTransformUniform( material.clearcoatRoughnessMap, uniforms.clearcoatRoughnessMapTransform ); } if ( material.clearcoatNormalMap ) { uniforms.clearcoatNormalMap.value = material.clearcoatNormalMap; refreshTransformUniform( material.clearcoatNormalMap, uniforms.clearcoatNormalMapTransform ); uniforms.clearcoatNormalScale.value.copy( material.clearcoatNormalScale ); if ( material.side === BackSide ) { uniforms.clearcoatNormalScale.value.negate(); } } } if ( material.dispersion > 0 ) { uniforms.dispersion.value = material.dispersion; } if ( material.iridescence > 0 ) { uniforms.iridescence.value = material.iridescence; uniforms.iridescenceIOR.value = material.iridescenceIOR; uniforms.iridescenceThicknessMinimum.value = material.iridescenceThicknessRange[ 0 ]; uniforms.iridescenceThicknessMaximum.value = material.iridescenceThicknessRange[ 1 ]; if ( material.iridescenceMap ) { uniforms.iridescenceMap.value = material.iridescenceMap; refreshTransformUniform( material.iridescenceMap, uniforms.iridescenceMapTransform ); } if ( material.iridescenceThicknessMap ) { uniforms.iridescenceThicknessMap.value = material.iridescenceThicknessMap; refreshTransformUniform( material.iridescenceThicknessMap, uniforms.iridescenceThicknessMapTransform ); } } if ( material.transmission > 0 ) { uniforms.transmission.value = material.transmission; uniforms.transmissionSamplerMap.value = transmissionRenderTarget.texture; uniforms.transmissionSamplerSize.value.set( transmissionRenderTarget.width, transmissionRenderTarget.height ); if ( material.transmissionMap ) { uniforms.transmissionMap.value = material.transmissionMap; refreshTransformUniform( material.transmissionMap, uniforms.transmissionMapTransform ); } uniforms.thickness.value = material.thickness; if ( material.thicknessMap ) { uniforms.thicknessMap.value = material.thicknessMap; refreshTransformUniform( material.thicknessMap, uniforms.thicknessMapTransform ); } uniforms.attenuationDistance.value = material.attenuationDistance; uniforms.attenuationColor.value.copy( material.attenuationColor ); } if ( material.anisotropy > 0 ) { uniforms.anisotropyVector.value.set( material.anisotropy * Math.cos( material.anisotropyRotation ), material.anisotropy * Math.sin( material.anisotropyRotation ) ); if ( material.anisotropyMap ) { uniforms.anisotropyMap.value = material.anisotropyMap; refreshTransformUniform( material.anisotropyMap, uniforms.anisotropyMapTransform ); } } uniforms.specularIntensity.value = material.specularIntensity; uniforms.specularColor.value.copy( material.specularColor ); if ( material.specularColorMap ) { uniforms.specularColorMap.value = material.specularColorMap; refreshTransformUniform( material.specularColorMap, uniforms.specularColorMapTransform ); } if ( material.specularIntensityMap ) { uniforms.specularIntensityMap.value = material.specularIntensityMap; refreshTransformUniform( material.specularIntensityMap, uniforms.specularIntensityMapTransform ); } } function refreshUniformsMatcap( uniforms, material ) { if ( material.matcap ) { uniforms.matcap.value = material.matcap; } } function refreshUniformsDistance( uniforms, material ) { const light = properties.get( material ).light; uniforms.referencePosition.value.setFromMatrixPosition( light.matrixWorld ); uniforms.nearDistance.value = light.shadow.camera.near; uniforms.farDistance.value = light.shadow.camera.far; } return { refreshFogUniforms: refreshFogUniforms, refreshMaterialUniforms: refreshMaterialUniforms }; } function WebGLUniformsGroups( gl, info, capabilities, state ) { let buffers = {}; let updateList = {}; let allocatedBindingPoints = []; const maxBindingPoints = gl.getParameter( gl.MAX_UNIFORM_BUFFER_BINDINGS ); // binding points are global whereas block indices are per shader program function bind( uniformsGroup, program ) { const webglProgram = program.program; state.uniformBlockBinding( uniformsGroup, webglProgram ); } function update( uniformsGroup, program ) { let buffer = buffers[ uniformsGroup.id ]; if ( buffer === undefined ) { prepareUniformsGroup( uniformsGroup ); buffer = createBuffer( uniformsGroup ); buffers[ uniformsGroup.id ] = buffer; uniformsGroup.addEventListener( 'dispose', onUniformsGroupsDispose ); } // ensure to update the binding points/block indices mapping for this program const webglProgram = program.program; state.updateUBOMapping( uniformsGroup, webglProgram ); // update UBO once per frame const frame = info.render.frame; if ( updateList[ uniformsGroup.id ] !== frame ) { updateBufferData( uniformsGroup ); updateList[ uniformsGroup.id ] = frame; } } function createBuffer( uniformsGroup ) { // the setup of an UBO is independent of a particular shader program but global const bindingPointIndex = allocateBindingPointIndex(); uniformsGroup.__bindingPointIndex = bindingPointIndex; const buffer = gl.createBuffer(); const size = uniformsGroup.__size; const usage = uniformsGroup.usage; gl.bindBuffer( gl.UNIFORM_BUFFER, buffer ); gl.bufferData( gl.UNIFORM_BUFFER, size, usage ); gl.bindBuffer( gl.UNIFORM_BUFFER, null ); gl.bindBufferBase( gl.UNIFORM_BUFFER, bindingPointIndex, buffer ); return buffer; } function allocateBindingPointIndex() { for ( let i = 0; i < maxBindingPoints; i ++ ) { if ( allocatedBindingPoints.indexOf( i ) === -1 ) { allocatedBindingPoints.push( i ); return i; } } console.error( 'THREE.WebGLRenderer: Maximum number of simultaneously usable uniforms groups reached.' ); return 0; } function updateBufferData( uniformsGroup ) { const buffer = buffers[ uniformsGroup.id ]; const uniforms = uniformsGroup.uniforms; const cache = uniformsGroup.__cache; gl.bindBuffer( gl.UNIFORM_BUFFER, buffer ); for ( let i = 0, il = uniforms.length; i < il; i ++ ) { const uniformArray = Array.isArray( uniforms[ i ] ) ? uniforms[ i ] : [ uniforms[ i ] ]; for ( let j = 0, jl = uniformArray.length; j < jl; j ++ ) { const uniform = uniformArray[ j ]; if ( hasUniformChanged( uniform, i, j, cache ) === true ) { const offset = uniform.__offset; const values = Array.isArray( uniform.value ) ? uniform.value : [ uniform.value ]; let arrayOffset = 0; for ( let k = 0; k < values.length; k ++ ) { const value = values[ k ]; const info = getUniformSize( value ); // TODO add integer and struct support if ( typeof value === 'number' || typeof value === 'boolean' ) { uniform.__data[ 0 ] = value; gl.bufferSubData( gl.UNIFORM_BUFFER, offset + arrayOffset, uniform.__data ); } else if ( value.isMatrix3 ) { // manually converting 3x3 to 3x4 uniform.__data[ 0 ] = value.elements[ 0 ]; uniform.__data[ 1 ] = value.elements[ 1 ]; uniform.__data[ 2 ] = value.elements[ 2 ]; uniform.__data[ 3 ] = 0; uniform.__data[ 4 ] = value.elements[ 3 ]; uniform.__data[ 5 ] = value.elements[ 4 ]; uniform.__data[ 6 ] = value.elements[ 5 ]; uniform.__data[ 7 ] = 0; uniform.__data[ 8 ] = value.elements[ 6 ]; uniform.__data[ 9 ] = value.elements[ 7 ]; uniform.__data[ 10 ] = value.elements[ 8 ]; uniform.__data[ 11 ] = 0; } else { value.toArray( uniform.__data, arrayOffset ); arrayOffset += info.storage / Float32Array.BYTES_PER_ELEMENT; } } gl.bufferSubData( gl.UNIFORM_BUFFER, offset, uniform.__data ); } } } gl.bindBuffer( gl.UNIFORM_BUFFER, null ); } function hasUniformChanged( uniform, index, indexArray, cache ) { const value = uniform.value; const indexString = index + '_' + indexArray; if ( cache[ indexString ] === undefined ) { // cache entry does not exist so far if ( typeof value === 'number' || typeof value === 'boolean' ) { cache[ indexString ] = value; } else { cache[ indexString ] = value.clone(); } return true; } else { const cachedObject = cache[ indexString ]; // compare current value with cached entry if ( typeof value === 'number' || typeof value === 'boolean' ) { if ( cachedObject !== value ) { cache[ indexString ] = value; return true; } } else { if ( cachedObject.equals( value ) === false ) { cachedObject.copy( value ); return true; } } } return false; } function prepareUniformsGroup( uniformsGroup ) { // determine total buffer size according to the STD140 layout // Hint: STD140 is the only supported layout in WebGL 2 const uniforms = uniformsGroup.uniforms; let offset = 0; // global buffer offset in bytes const chunkSize = 16; // size of a chunk in bytes for ( let i = 0, l = uniforms.length; i < l; i ++ ) { const uniformArray = Array.isArray( uniforms[ i ] ) ? uniforms[ i ] : [ uniforms[ i ] ]; for ( let j = 0, jl = uniformArray.length; j < jl; j ++ ) { const uniform = uniformArray[ j ]; const values = Array.isArray( uniform.value ) ? uniform.value : [ uniform.value ]; for ( let k = 0, kl = values.length; k < kl; k ++ ) { const value = values[ k ]; const info = getUniformSize( value ); const chunkOffset = offset % chunkSize; // offset in the current chunk const chunkPadding = chunkOffset % info.boundary; // required padding to match boundary const chunkStart = chunkOffset + chunkPadding; // the start position in the current chunk for the data offset += chunkPadding; // Check for chunk overflow if ( chunkStart !== 0 && ( chunkSize - chunkStart ) < info.storage ) { // Add padding and adjust offset offset += ( chunkSize - chunkStart ); } // the following two properties will be used for partial buffer updates uniform.__data = new Float32Array( info.storage / Float32Array.BYTES_PER_ELEMENT ); uniform.__offset = offset; // Update the global offset offset += info.storage; } } } // ensure correct final padding const chunkOffset = offset % chunkSize; if ( chunkOffset > 0 ) offset += ( chunkSize - chunkOffset ); // uniformsGroup.__size = offset; uniformsGroup.__cache = {}; return this; } function getUniformSize( value ) { const info = { boundary: 0, // bytes storage: 0 // bytes }; // determine sizes according to STD140 if ( typeof value === 'number' || typeof value === 'boolean' ) { // float/int/bool info.boundary = 4; info.storage = 4; } else if ( value.isVector2 ) { // vec2 info.boundary = 8; info.storage = 8; } else if ( value.isVector3 || value.isColor ) { // vec3 info.boundary = 16; info.storage = 12; // evil: vec3 must start on a 16-byte boundary but it only consumes 12 bytes } else if ( value.isVector4 ) { // vec4 info.boundary = 16; info.storage = 16; } else if ( value.isMatrix3 ) { // mat3 (in STD140 a 3x3 matrix is represented as 3x4) info.boundary = 48; info.storage = 48; } else if ( value.isMatrix4 ) { // mat4 info.boundary = 64; info.storage = 64; } else if ( value.isTexture ) { console.warn( 'THREE.WebGLRenderer: Texture samplers can not be part of an uniforms group.' ); } else { console.warn( 'THREE.WebGLRenderer: Unsupported uniform value type.', value ); } return info; } function onUniformsGroupsDispose( event ) { const uniformsGroup = event.target; uniformsGroup.removeEventListener( 'dispose', onUniformsGroupsDispose ); const index = allocatedBindingPoints.indexOf( uniformsGroup.__bindingPointIndex ); allocatedBindingPoints.splice( index, 1 ); gl.deleteBuffer( buffers[ uniformsGroup.id ] ); delete buffers[ uniformsGroup.id ]; delete updateList[ uniformsGroup.id ]; } function dispose() { for ( const id in buffers ) { gl.deleteBuffer( buffers[ id ] ); } allocatedBindingPoints = []; buffers = {}; updateList = {}; } return { bind: bind, update: update, dispose: dispose }; } /** * This renderer uses WebGL 2 to display scenes. * * WebGL 1 is not supported since `r163`. */ class WebGLRenderer { /** * Constructs a new WebGL renderer. * * @param {WebGLRenderer~Options} [parameters] - The configuration parameter. */ constructor( parameters = {} ) { const { canvas = createCanvasElement(), context = null, depth = true, stencil = false, alpha = false, antialias = false, premultipliedAlpha = true, preserveDrawingBuffer = false, powerPreference = 'default', failIfMajorPerformanceCaveat = false, reverseDepthBuffer = false, } = parameters; /** * This flag can be used for type testing. * * @type {boolean} * @readonly * @default true */ this.isWebGLRenderer = true; let _alpha; if ( context !== null ) { if ( typeof WebGLRenderingContext !== 'undefined' && context instanceof WebGLRenderingContext ) { throw new Error( 'THREE.WebGLRenderer: WebGL 1 is not supported since r163.' ); } _alpha = context.getContextAttributes().alpha; } else { _alpha = alpha; } const uintClearColor = new Uint32Array( 4 ); const intClearColor = new Int32Array( 4 ); let currentRenderList = null; let currentRenderState = null; // render() can be called from within a callback triggered by another render. // We track this so that the nested render call gets its list and state isolated from the parent render call. const renderListStack = []; const renderStateStack = []; // public properties /** * A canvas where the renderer draws its output.This is automatically created by the renderer * in the constructor (if not provided already); you just need to add it to your page like so: * ```js * document.body.appendChild( renderer.domElement ); * ``` * * @type {DOMElement} */ this.domElement = canvas; /** * A object with debug configuration settings. * * - `checkShaderErrors`: If it is `true`, defines whether material shader programs are * checked for errors during compilation and linkage process. It may be useful to disable * this check in production for performance gain. It is strongly recommended to keep these * checks enabled during development. If the shader does not compile and link - it will not * work and associated material will not render. * - `onShaderError(gl, program, glVertexShader,glFragmentShader)`: A callback function that * can be used for custom error reporting. The callback receives the WebGL context, an instance * of WebGLProgram as well two instances of WebGLShader representing the vertex and fragment shader. * Assigning a custom function disables the default error reporting. * * @type {Object} */ this.debug = { /** * Enables error checking and reporting when shader programs are being compiled. * @type {boolean} */ checkShaderErrors: true, /** * Callback for custom error reporting. * @type {?Function} */ onShaderError: null }; // clearing /** * Whether the renderer should automatically clear its output before rendering a frame or not. * * @type {boolean} * @default true */ this.autoClear = true; /** * If {@link WebGLRenderer#autoClear} set to `true`, whether the renderer should clear * the color buffer or not. * * @type {boolean} * @default true */ this.autoClearColor = true; /** * If {@link WebGLRenderer#autoClear} set to `true`, whether the renderer should clear * the depth buffer or not. * * @type {boolean} * @default true */ this.autoClearDepth = true; /** * If {@link WebGLRenderer#autoClear} set to `true`, whether the renderer should clear * the stencil buffer or not. * * @type {boolean} * @default true */ this.autoClearStencil = true; // scene graph /** * Whether the renderer should sort objects or not. * * Note: Sorting is used to attempt to properly render objects that have some * degree of transparency. By definition, sorting objects may not work in all * cases. Depending on the needs of application, it may be necessary to turn * off sorting and use other methods to deal with transparency rendering e.g. * manually determining each object's rendering order. * * @type {boolean} * @default true */ this.sortObjects = true; // user-defined clipping /** * User-defined clipping planes specified in world space. These planes apply globally. * Points in space whose dot product with the plane is negative are cut away. * * @type {Array} */ this.clippingPlanes = []; /** * Whether the renderer respects object-level clipping planes or not. * * @type {boolean} * @default false */ this.localClippingEnabled = false; // tone mapping /** * The tone mapping technique of the renderer. * * @type {(NoToneMapping|LinearToneMapping|ReinhardToneMapping|CineonToneMapping|ACESFilmicToneMapping|CustomToneMapping|AgXToneMapping|NeutralToneMapping)} * @default NoToneMapping */ this.toneMapping = NoToneMapping; /** * Exposure level of tone mapping. * * @type {number} * @default 1 */ this.toneMappingExposure = 1.0; // transmission /** * The normalized resolution scale for the transmission render target, measured in percentage * of viewport dimensions. Lowering this value can result in significant performance improvements * when using {@link MeshPhysicalMaterial#transmission}. * * @type {number} * @default 1 */ this.transmissionResolutionScale = 1.0; // internal properties const _this = this; let _isContextLost = false; // internal state cache this._outputColorSpace = SRGBColorSpace; let _currentActiveCubeFace = 0; let _currentActiveMipmapLevel = 0; let _currentRenderTarget = null; let _currentMaterialId = -1; let _currentCamera = null; const _currentViewport = new Vector4(); const _currentScissor = new Vector4(); let _currentScissorTest = null; const _currentClearColor = new Color( 0x000000 ); let _currentClearAlpha = 0; // let _width = canvas.width; let _height = canvas.height; let _pixelRatio = 1; let _opaqueSort = null; let _transparentSort = null; const _viewport = new Vector4( 0, 0, _width, _height ); const _scissor = new Vector4( 0, 0, _width, _height ); let _scissorTest = false; // frustum const _frustum = new Frustum(); // clipping let _clippingEnabled = false; let _localClippingEnabled = false; // camera matrices cache const _currentProjectionMatrix = new Matrix4(); const _projScreenMatrix = new Matrix4(); const _vector3 = new Vector3(); const _vector4 = new Vector4(); const _emptyScene = { background: null, fog: null, environment: null, overrideMaterial: null, isScene: true }; let _renderBackground = false; function getTargetPixelRatio() { return _currentRenderTarget === null ? _pixelRatio : 1; } // initialize let _gl = context; function getContext( contextName, contextAttributes ) { return canvas.getContext( contextName, contextAttributes ); } try { const contextAttributes = { alpha: true, depth, stencil, antialias, premultipliedAlpha, preserveDrawingBuffer, powerPreference, failIfMajorPerformanceCaveat, }; // OffscreenCanvas does not have setAttribute, see #22811 if ( 'setAttribute' in canvas ) canvas.setAttribute( 'data-engine', `three.js r${REVISION}` ); // event listeners must be registered before WebGL context is created, see #12753 canvas.addEventListener( 'webglcontextlost', onContextLost, false ); canvas.addEventListener( 'webglcontextrestored', onContextRestore, false ); canvas.addEventListener( 'webglcontextcreationerror', onContextCreationError, false ); if ( _gl === null ) { const contextName = 'webgl2'; _gl = getContext( contextName, contextAttributes ); if ( _gl === null ) { if ( getContext( contextName ) ) { throw new Error( 'Error creating WebGL context with your selected attributes.' ); } else { throw new Error( 'Error creating WebGL context.' ); } } } } catch ( error ) { console.error( 'THREE.WebGLRenderer: ' + error.message ); throw error; } let extensions, capabilities, state, info; let properties, textures, cubemaps, cubeuvmaps, attributes, geometries, objects; let programCache, materials, renderLists, renderStates, clipping, shadowMap; let background, morphtargets, bufferRenderer, indexedBufferRenderer; let utils, bindingStates, uniformsGroups; function initGLContext() { extensions = new WebGLExtensions( _gl ); extensions.init(); utils = new WebGLUtils( _gl, extensions ); capabilities = new WebGLCapabilities( _gl, extensions, parameters, utils ); state = new WebGLState( _gl, extensions ); if ( capabilities.reverseDepthBuffer && reverseDepthBuffer ) { state.buffers.depth.setReversed( true ); } info = new WebGLInfo( _gl ); properties = new WebGLProperties(); textures = new WebGLTextures( _gl, extensions, state, properties, capabilities, utils, info ); cubemaps = new WebGLCubeMaps( _this ); cubeuvmaps = new WebGLCubeUVMaps( _this ); attributes = new WebGLAttributes( _gl ); bindingStates = new WebGLBindingStates( _gl, attributes ); geometries = new WebGLGeometries( _gl, attributes, info, bindingStates ); objects = new WebGLObjects( _gl, geometries, attributes, info ); morphtargets = new WebGLMorphtargets( _gl, capabilities, textures ); clipping = new WebGLClipping( properties ); programCache = new WebGLPrograms( _this, cubemaps, cubeuvmaps, extensions, capabilities, bindingStates, clipping ); materials = new WebGLMaterials( _this, properties ); renderLists = new WebGLRenderLists(); renderStates = new WebGLRenderStates( extensions ); background = new WebGLBackground( _this, cubemaps, cubeuvmaps, state, objects, _alpha, premultipliedAlpha ); shadowMap = new WebGLShadowMap( _this, objects, capabilities ); uniformsGroups = new WebGLUniformsGroups( _gl, info, capabilities, state ); bufferRenderer = new WebGLBufferRenderer( _gl, extensions, info ); indexedBufferRenderer = new WebGLIndexedBufferRenderer( _gl, extensions, info ); info.programs = programCache.programs; /** * Holds details about the capabilities of the current rendering context. * * @name WebGLRenderer#capabilities * @type {WebGLRenderer~Capabilities} */ _this.capabilities = capabilities; /** * Provides methods for retrieving and testing WebGL extensions. * * - `get(extensionName:string)`: Used to check whether a WebGL extension is supported * and return the extension object if available. * - `has(extensionName:string)`: returns `true` if the extension is supported. * * @name WebGLRenderer#extensions * @type {Object} */ _this.extensions = extensions; /** * Used to track properties of other objects like native WebGL objects. * * @name WebGLRenderer#properties * @type {Object} */ _this.properties = properties; /** * Manages the render lists of the renderer. * * @name WebGLRenderer#renderLists * @type {Object} */ _this.renderLists = renderLists; /** * Interface for managing shadows. * * @name WebGLRenderer#shadowMap * @type {WebGLRenderer~ShadowMap} */ _this.shadowMap = shadowMap; /** * Interface for managing the WebGL state. * * @name WebGLRenderer#state * @type {Object} */ _this.state = state; /** * Holds a series of statistical information about the GPU memory * and the rendering process. Useful for debugging and monitoring. * * By default these data are reset at each render call but when having * multiple render passes per frame (e.g. when using post processing) it can * be preferred to reset with a custom pattern. First, set `autoReset` to * `false`. * ```js * renderer.info.autoReset = false; * ``` * Call `reset()` whenever you have finished to render a single frame. * ```js * renderer.info.reset(); * ``` * * @name WebGLRenderer#info * @type {WebGLRenderer~Info} */ _this.info = info; } initGLContext(); // xr const xr = new WebXRManager( _this, _gl ); /** * A reference to the XR manager. * * @type {WebXRManager} */ this.xr = xr; /** * Returns the rendering context. * * @return {WebGL2RenderingContext} The rendering context. */ this.getContext = function () { return _gl; }; /** * Returns the rendering context attributes. * * @return {WebGLContextAttributes} The rendering context attributes. */ this.getContextAttributes = function () { return _gl.getContextAttributes(); }; /** * Simulates a loss of the WebGL context. This requires support for the `WEBGL_lose_context` extension. */ this.forceContextLoss = function () { const extension = extensions.get( 'WEBGL_lose_context' ); if ( extension ) extension.loseContext(); }; /** * Simulates a restore of the WebGL context. This requires support for the `WEBGL_lose_context` extension. */ this.forceContextRestore = function () { const extension = extensions.get( 'WEBGL_lose_context' ); if ( extension ) extension.restoreContext(); }; /** * Returns the pixel ratio. * * @return {number} The pixel ratio. */ this.getPixelRatio = function () { return _pixelRatio; }; /** * Sets the given pixel ratio and resizes the canvas if necessary. * * @param {number} value - The pixel ratio. */ this.setPixelRatio = function ( value ) { if ( value === undefined ) return; _pixelRatio = value; this.setSize( _width, _height, false ); }; /** * Returns the renderer's size in logical pixels. This method does not honor the pixel ratio. * * @param {Vector2} target - The method writes the result in this target object. * @return {Vector2} The renderer's size in logical pixels. */ this.getSize = function ( target ) { return target.set( _width, _height ); }; /** * Resizes the output canvas to (width, height) with device pixel ratio taken * into account, and also sets the viewport to fit that size, starting in (0, * 0). Setting `updateStyle` to false prevents any style changes to the output canvas. * * @param {number} width - The width in logical pixels. * @param {number} height - The height in logical pixels. * @param {boolean} [updateStyle=true] - Whether to update the `style` attribute of the canvas or not. */ this.setSize = function ( width, height, updateStyle = true ) { if ( xr.isPresenting ) { console.warn( 'THREE.WebGLRenderer: Can\'t change size while VR device is presenting.' ); return; } _width = width; _height = height; canvas.width = Math.floor( width * _pixelRatio ); canvas.height = Math.floor( height * _pixelRatio ); if ( updateStyle === true ) { canvas.style.width = width + 'px'; canvas.style.height = height + 'px'; } this.setViewport( 0, 0, width, height ); }; /** * Returns the drawing buffer size in physical pixels. This method honors the pixel ratio. * * @param {Vector2} target - The method writes the result in this target object. * @return {Vector2} The drawing buffer size. */ this.getDrawingBufferSize = function ( target ) { return target.set( _width * _pixelRatio, _height * _pixelRatio ).floor(); }; /** * This method allows to define the drawing buffer size by specifying * width, height and pixel ratio all at once. The size of the drawing * buffer is computed with this formula: * ```js * size.x = width * pixelRatio; * size.y = height * pixelRatio; * ``` * * @param {number} width - The width in logical pixels. * @param {number} height - The height in logical pixels. * @param {number} pixelRatio - The pixel ratio. */ this.setDrawingBufferSize = function ( width, height, pixelRatio ) { _width = width; _height = height; _pixelRatio = pixelRatio; canvas.width = Math.floor( width * pixelRatio ); canvas.height = Math.floor( height * pixelRatio ); this.setViewport( 0, 0, width, height ); }; /** * Returns the current viewport definition. * * @param {Vector2} target - The method writes the result in this target object. * @return {Vector2} The current viewport definition. */ this.getCurrentViewport = function ( target ) { return target.copy( _currentViewport ); }; /** * Returns the viewport definition. * * @param {Vector4} target - The method writes the result in this target object. * @return {Vector4} The viewport definition. */ this.getViewport = function ( target ) { return target.copy( _viewport ); }; /** * Sets the viewport to render from `(x, y)` to `(x + width, y + height)`. * * @param {number | Vector4} x - The horizontal coordinate for the lower left corner of the viewport origin in logical pixel unit. * Or alternatively a four-component vector specifying all the parameters of the viewport. * @param {number} y - The vertical coordinate for the lower left corner of the viewport origin in logical pixel unit. * @param {number} width - The width of the viewport in logical pixel unit. * @param {number} height - The height of the viewport in logical pixel unit. */ this.setViewport = function ( x, y, width, height ) { if ( x.isVector4 ) { _viewport.set( x.x, x.y, x.z, x.w ); } else { _viewport.set( x, y, width, height ); } state.viewport( _currentViewport.copy( _viewport ).multiplyScalar( _pixelRatio ).round() ); }; /** * Returns the scissor region. * * @param {Vector4} target - The method writes the result in this target object. * @return {Vector4} The scissor region. */ this.getScissor = function ( target ) { return target.copy( _scissor ); }; /** * Sets the scissor region to render from `(x, y)` to `(x + width, y + height)`. * * @param {number | Vector4} x - The horizontal coordinate for the lower left corner of the scissor region origin in logical pixel unit. * Or alternatively a four-component vector specifying all the parameters of the scissor region. * @param {number} y - The vertical coordinate for the lower left corner of the scissor region origin in logical pixel unit. * @param {number} width - The width of the scissor region in logical pixel unit. * @param {number} height - The height of the scissor region in logical pixel unit. */ this.setScissor = function ( x, y, width, height ) { if ( x.isVector4 ) { _scissor.set( x.x, x.y, x.z, x.w ); } else { _scissor.set( x, y, width, height ); } state.scissor( _currentScissor.copy( _scissor ).multiplyScalar( _pixelRatio ).round() ); }; /** * Returns `true` if the scissor test is enabled. * * @return {boolean} Whether the scissor test is enabled or not. */ this.getScissorTest = function () { return _scissorTest; }; /** * Enable or disable the scissor test. When this is enabled, only the pixels * within the defined scissor area will be affected by further renderer * actions. * * @param {boolean} boolean - Whether the scissor test is enabled or not. */ this.setScissorTest = function ( boolean ) { state.setScissorTest( _scissorTest = boolean ); }; /** * Sets a custom opaque sort function for the render lists. Pass `null` * to use the default `painterSortStable` function. * * @param {?Function} method - The opaque sort function. */ this.setOpaqueSort = function ( method ) { _opaqueSort = method; }; /** * Sets a custom transparent sort function for the render lists. Pass `null` * to use the default `reversePainterSortStable` function. * * @param {?Function} method - The opaque sort function. */ this.setTransparentSort = function ( method ) { _transparentSort = method; }; // Clearing /** * Returns the clear color. * * @param {Color} target - The method writes the result in this target object. * @return {Color} The clear color. */ this.getClearColor = function ( target ) { return target.copy( background.getClearColor() ); }; /** * Sets the clear color and alpha. * * @param {Color} color - The clear color. * @param {number} [alpha=1] - The clear alpha. */ this.setClearColor = function () { background.setClearColor( ...arguments ); }; /** * Returns the clear alpha. Ranges within `[0,1]`. * * @return {number} The clear alpha. */ this.getClearAlpha = function () { return background.getClearAlpha(); }; /** * Sets the clear alpha. * * @param {number} alpha - The clear alpha. */ this.setClearAlpha = function () { background.setClearAlpha( ...arguments ); }; /** * Tells the renderer to clear its color, depth or stencil drawing buffer(s). * This method initializes the buffers to the current clear color values. * * @param {boolean} [color=true] - Whether the color buffer should be cleared or not. * @param {boolean} [depth=true] - Whether the depth buffer should be cleared or not. * @param {boolean} [stencil=true] - Whether the stencil buffer should be cleared or not. */ this.clear = function ( color = true, depth = true, stencil = true ) { let bits = 0; if ( color ) { // check if we're trying to clear an integer target let isIntegerFormat = false; if ( _currentRenderTarget !== null ) { const targetFormat = _currentRenderTarget.texture.format; isIntegerFormat = targetFormat === RGBAIntegerFormat || targetFormat === RGIntegerFormat || targetFormat === RedIntegerFormat; } // use the appropriate clear functions to clear the target if it's a signed // or unsigned integer target if ( isIntegerFormat ) { const targetType = _currentRenderTarget.texture.type; const isUnsignedType = targetType === UnsignedByteType || targetType === UnsignedIntType || targetType === UnsignedShortType || targetType === UnsignedInt248Type || targetType === UnsignedShort4444Type || targetType === UnsignedShort5551Type; const clearColor = background.getClearColor(); const a = background.getClearAlpha(); const r = clearColor.r; const g = clearColor.g; const b = clearColor.b; if ( isUnsignedType ) { uintClearColor[ 0 ] = r; uintClearColor[ 1 ] = g; uintClearColor[ 2 ] = b; uintClearColor[ 3 ] = a; _gl.clearBufferuiv( _gl.COLOR, 0, uintClearColor ); } else { intClearColor[ 0 ] = r; intClearColor[ 1 ] = g; intClearColor[ 2 ] = b; intClearColor[ 3 ] = a; _gl.clearBufferiv( _gl.COLOR, 0, intClearColor ); } } else { bits |= _gl.COLOR_BUFFER_BIT; } } if ( depth ) { bits |= _gl.DEPTH_BUFFER_BIT; } if ( stencil ) { bits |= _gl.STENCIL_BUFFER_BIT; this.state.buffers.stencil.setMask( 0xffffffff ); } _gl.clear( bits ); }; /** * Clears the color buffer. Equivalent to calling `renderer.clear( true, false, false )`. */ this.clearColor = function () { this.clear( true, false, false ); }; /** * Clears the depth buffer. Equivalent to calling `renderer.clear( false, true, false )`. */ this.clearDepth = function () { this.clear( false, true, false ); }; /** * Clears the stencil buffer. Equivalent to calling `renderer.clear( false, false, true )`. */ this.clearStencil = function () { this.clear( false, false, true ); }; /** * Frees the GPU-related resources allocated by this instance. Call this * method whenever this instance is no longer used in your app. */ this.dispose = function () { canvas.removeEventListener( 'webglcontextlost', onContextLost, false ); canvas.removeEventListener( 'webglcontextrestored', onContextRestore, false ); canvas.removeEventListener( 'webglcontextcreationerror', onContextCreationError, false ); background.dispose(); renderLists.dispose(); renderStates.dispose(); properties.dispose(); cubemaps.dispose(); cubeuvmaps.dispose(); objects.dispose(); bindingStates.dispose(); uniformsGroups.dispose(); programCache.dispose(); xr.dispose(); xr.removeEventListener( 'sessionstart', onXRSessionStart ); xr.removeEventListener( 'sessionend', onXRSessionEnd ); animation.stop(); }; // Events function onContextLost( event ) { event.preventDefault(); console.log( 'THREE.WebGLRenderer: Context Lost.' ); _isContextLost = true; } function onContextRestore( /* event */ ) { console.log( 'THREE.WebGLRenderer: Context Restored.' ); _isContextLost = false; const infoAutoReset = info.autoReset; const shadowMapEnabled = shadowMap.enabled; const shadowMapAutoUpdate = shadowMap.autoUpdate; const shadowMapNeedsUpdate = shadowMap.needsUpdate; const shadowMapType = shadowMap.type; initGLContext(); info.autoReset = infoAutoReset; shadowMap.enabled = shadowMapEnabled; shadowMap.autoUpdate = shadowMapAutoUpdate; shadowMap.needsUpdate = shadowMapNeedsUpdate; shadowMap.type = shadowMapType; } function onContextCreationError( event ) { console.error( 'THREE.WebGLRenderer: A WebGL context could not be created. Reason: ', event.statusMessage ); } function onMaterialDispose( event ) { const material = event.target; material.removeEventListener( 'dispose', onMaterialDispose ); deallocateMaterial( material ); } // Buffer deallocation function deallocateMaterial( material ) { releaseMaterialProgramReferences( material ); properties.remove( material ); } function releaseMaterialProgramReferences( material ) { const programs = properties.get( material ).programs; if ( programs !== undefined ) { programs.forEach( function ( program ) { programCache.releaseProgram( program ); } ); if ( material.isShaderMaterial ) { programCache.releaseShaderCache( material ); } } } // Buffer rendering this.renderBufferDirect = function ( camera, scene, geometry, material, object, group ) { if ( scene === null ) scene = _emptyScene; // renderBufferDirect second parameter used to be fog (could be null) const frontFaceCW = ( object.isMesh && object.matrixWorld.determinant() < 0 ); const program = setProgram( camera, scene, geometry, material, object ); state.setMaterial( material, frontFaceCW ); // let index = geometry.index; let rangeFactor = 1; if ( material.wireframe === true ) { index = geometries.getWireframeAttribute( geometry ); if ( index === undefined ) return; rangeFactor = 2; } // const drawRange = geometry.drawRange; const position = geometry.attributes.position; let drawStart = drawRange.start * rangeFactor; let drawEnd = ( drawRange.start + drawRange.count ) * rangeFactor; if ( group !== null ) { drawStart = Math.max( drawStart, group.start * rangeFactor ); drawEnd = Math.min( drawEnd, ( group.start + group.count ) * rangeFactor ); } if ( index !== null ) { drawStart = Math.max( drawStart, 0 ); drawEnd = Math.min( drawEnd, index.count ); } else if ( position !== undefined && position !== null ) { drawStart = Math.max( drawStart, 0 ); drawEnd = Math.min( drawEnd, position.count ); } const drawCount = drawEnd - drawStart; if ( drawCount < 0 || drawCount === Infinity ) return; // bindingStates.setup( object, material, program, geometry, index ); let attribute; let renderer = bufferRenderer; if ( index !== null ) { attribute = attributes.get( index ); renderer = indexedBufferRenderer; renderer.setIndex( attribute ); } // if ( object.isMesh ) { if ( material.wireframe === true ) { state.setLineWidth( material.wireframeLinewidth * getTargetPixelRatio() ); renderer.setMode( _gl.LINES ); } else { renderer.setMode( _gl.TRIANGLES ); } } else if ( object.isLine ) { let lineWidth = material.linewidth; if ( lineWidth === undefined ) lineWidth = 1; // Not using Line*Material state.setLineWidth( lineWidth * getTargetPixelRatio() ); if ( object.isLineSegments ) { renderer.setMode( _gl.LINES ); } else if ( object.isLineLoop ) { renderer.setMode( _gl.LINE_LOOP ); } else { renderer.setMode( _gl.LINE_STRIP ); } } else if ( object.isPoints ) { renderer.setMode( _gl.POINTS ); } else if ( object.isSprite ) { renderer.setMode( _gl.TRIANGLES ); } if ( object.isBatchedMesh ) { if ( object._multiDrawInstances !== null ) { // @deprecated, r174 warnOnce( 'THREE.WebGLRenderer: renderMultiDrawInstances has been deprecated and will be removed in r184. Append to renderMultiDraw arguments and use indirection.' ); renderer.renderMultiDrawInstances( object._multiDrawStarts, object._multiDrawCounts, object._multiDrawCount, object._multiDrawInstances ); } else { if ( ! extensions.get( 'WEBGL_multi_draw' ) ) { const starts = object._multiDrawStarts; const counts = object._multiDrawCounts; const drawCount = object._multiDrawCount; const bytesPerElement = index ? attributes.get( index ).bytesPerElement : 1; const uniforms = properties.get( material ).currentProgram.getUniforms(); for ( let i = 0; i < drawCount; i ++ ) { uniforms.setValue( _gl, '_gl_DrawID', i ); renderer.render( starts[ i ] / bytesPerElement, counts[ i ] ); } } else { renderer.renderMultiDraw( object._multiDrawStarts, object._multiDrawCounts, object._multiDrawCount ); } } } else if ( object.isInstancedMesh ) { renderer.renderInstances( drawStart, drawCount, object.count ); } else if ( geometry.isInstancedBufferGeometry ) { const maxInstanceCount = geometry._maxInstanceCount !== undefined ? geometry._maxInstanceCount : Infinity; const instanceCount = Math.min( geometry.instanceCount, maxInstanceCount ); renderer.renderInstances( drawStart, drawCount, instanceCount ); } else { renderer.render( drawStart, drawCount ); } }; // Compile function prepareMaterial( material, scene, object ) { if ( material.transparent === true && material.side === DoubleSide && material.forceSinglePass === false ) { material.side = BackSide; material.needsUpdate = true; getProgram( material, scene, object ); material.side = FrontSide; material.needsUpdate = true; getProgram( material, scene, object ); material.side = DoubleSide; } else { getProgram( material, scene, object ); } } /** * Compiles all materials in the scene with the camera. This is useful to precompile shaders * before the first rendering. If you want to add a 3D object to an existing scene, use the third * optional parameter for applying the target scene. * * Note that the (target) scene's lighting and environment must be configured before calling this method. * * @param {Object3D} scene - The scene or another type of 3D object to precompile. * @param {Camera} camera - The camera. * @param {?Scene} [targetScene=null] - The target scene. * @return {?Set} The precompiled materials. */ this.compile = function ( scene, camera, targetScene = null ) { if ( targetScene === null ) targetScene = scene; currentRenderState = renderStates.get( targetScene ); currentRenderState.init( camera ); renderStateStack.push( currentRenderState ); // gather lights from both the target scene and the new object that will be added to the scene. targetScene.traverseVisible( function ( object ) { if ( object.isLight && object.layers.test( camera.layers ) ) { currentRenderState.pushLight( object ); if ( object.castShadow ) { currentRenderState.pushShadow( object ); } } } ); if ( scene !== targetScene ) { scene.traverseVisible( function ( object ) { if ( object.isLight && object.layers.test( camera.layers ) ) { currentRenderState.pushLight( object ); if ( object.castShadow ) { currentRenderState.pushShadow( object ); } } } ); } currentRenderState.setupLights(); // Only initialize materials in the new scene, not the targetScene. const materials = new Set(); scene.traverse( function ( object ) { if ( ! ( object.isMesh || object.isPoints || object.isLine || object.isSprite ) ) { return; } const material = object.material; if ( material ) { if ( Array.isArray( material ) ) { for ( let i = 0; i < material.length; i ++ ) { const material2 = material[ i ]; prepareMaterial( material2, targetScene, object ); materials.add( material2 ); } } else { prepareMaterial( material, targetScene, object ); materials.add( material ); } } } ); currentRenderState = renderStateStack.pop(); return materials; }; // compileAsync /** * Asynchronous version of {@link WebGLRenderer#compile}. * * This method makes use of the `KHR_parallel_shader_compile` WebGL extension. Hence, * it is recommended to use this version of `compile()` whenever possible. * * @async * @param {Object3D} scene - The scene or another type of 3D object to precompile. * @param {Camera} camera - The camera. * @param {?Scene} [targetScene=null] - The target scene. * @return {Promise} A Promise that resolves when the given scene can be rendered without unnecessary stalling due to shader compilation. */ this.compileAsync = function ( scene, camera, targetScene = null ) { const materials = this.compile( scene, camera, targetScene ); // Wait for all the materials in the new object to indicate that they're // ready to be used before resolving the promise. return new Promise( ( resolve ) => { function checkMaterialsReady() { materials.forEach( function ( material ) { const materialProperties = properties.get( material ); const program = materialProperties.currentProgram; if ( program.isReady() ) { // remove any programs that report they're ready to use from the list materials.delete( material ); } } ); // once the list of compiling materials is empty, call the callback if ( materials.size === 0 ) { resolve( scene ); return; } // if some materials are still not ready, wait a bit and check again setTimeout( checkMaterialsReady, 10 ); } if ( extensions.get( 'KHR_parallel_shader_compile' ) !== null ) { // If we can check the compilation status of the materials without // blocking then do so right away. checkMaterialsReady(); } else { // Otherwise start by waiting a bit to give the materials we just // initialized a chance to finish. setTimeout( checkMaterialsReady, 10 ); } } ); }; // Animation Loop let onAnimationFrameCallback = null; function onAnimationFrame( time ) { if ( onAnimationFrameCallback ) onAnimationFrameCallback( time ); } function onXRSessionStart() { animation.stop(); } function onXRSessionEnd() { animation.start(); } const animation = new WebGLAnimation(); animation.setAnimationLoop( onAnimationFrame ); if ( typeof self !== 'undefined' ) animation.setContext( self ); this.setAnimationLoop = function ( callback ) { onAnimationFrameCallback = callback; xr.setAnimationLoop( callback ); ( callback === null ) ? animation.stop() : animation.start(); }; xr.addEventListener( 'sessionstart', onXRSessionStart ); xr.addEventListener( 'sessionend', onXRSessionEnd ); // Rendering /** * Renders the given scene (or other type of 3D object) using the given camera. * * The render is done to a previously specified render target set by calling {@link WebGLRenderer#setRenderTarget} * or to the canvas as usual. * * By default render buffers are cleared before rendering but you can prevent * this by setting the property `autoClear` to `false`. If you want to prevent * only certain buffers being cleared you can `autoClearColor`, `autoClearDepth` * or `autoClearStencil` to `false`. To force a clear, use {@link WebGLRenderer#clear}. * * @param {Object3D} scene - The scene to render. * @param {Camera} camera - The camera. */ this.render = function ( scene, camera ) { if ( camera !== undefined && camera.isCamera !== true ) { console.error( 'THREE.WebGLRenderer.render: camera is not an instance of THREE.Camera.' ); return; } if ( _isContextLost === true ) return; // update scene graph if ( scene.matrixWorldAutoUpdate === true ) scene.updateMatrixWorld(); // update camera matrices and frustum if ( camera.parent === null && camera.matrixWorldAutoUpdate === true ) camera.updateMatrixWorld(); if ( xr.enabled === true && xr.isPresenting === true ) { if ( xr.cameraAutoUpdate === true ) xr.updateCamera( camera ); camera = xr.getCamera(); // use XR camera for rendering } // if ( scene.isScene === true ) scene.onBeforeRender( _this, scene, camera, _currentRenderTarget ); currentRenderState = renderStates.get( scene, renderStateStack.length ); currentRenderState.init( camera ); renderStateStack.push( currentRenderState ); _projScreenMatrix.multiplyMatrices( camera.projectionMatrix, camera.matrixWorldInverse ); _frustum.setFromProjectionMatrix( _projScreenMatrix ); _localClippingEnabled = this.localClippingEnabled; _clippingEnabled = clipping.init( this.clippingPlanes, _localClippingEnabled ); currentRenderList = renderLists.get( scene, renderListStack.length ); currentRenderList.init(); renderListStack.push( currentRenderList ); if ( xr.enabled === true && xr.isPresenting === true ) { const depthSensingMesh = _this.xr.getDepthSensingMesh(); if ( depthSensingMesh !== null ) { projectObject( depthSensingMesh, camera, - Infinity, _this.sortObjects ); } } projectObject( scene, camera, 0, _this.sortObjects ); currentRenderList.finish(); if ( _this.sortObjects === true ) { currentRenderList.sort( _opaqueSort, _transparentSort ); } _renderBackground = xr.enabled === false || xr.isPresenting === false || xr.hasDepthSensing() === false; if ( _renderBackground ) { background.addToRenderList( currentRenderList, scene ); } // this.info.render.frame ++; if ( _clippingEnabled === true ) clipping.beginShadows(); const shadowsArray = currentRenderState.state.shadowsArray; shadowMap.render( shadowsArray, scene, camera ); if ( _clippingEnabled === true ) clipping.endShadows(); // if ( this.info.autoReset === true ) this.info.reset(); // render scene const opaqueObjects = currentRenderList.opaque; const transmissiveObjects = currentRenderList.transmissive; currentRenderState.setupLights(); if ( camera.isArrayCamera ) { const cameras = camera.cameras; if ( transmissiveObjects.length > 0 ) { for ( let i = 0, l = cameras.length; i < l; i ++ ) { const camera2 = cameras[ i ]; renderTransmissionPass( opaqueObjects, transmissiveObjects, scene, camera2 ); } } if ( _renderBackground ) background.render( scene ); for ( let i = 0, l = cameras.length; i < l; i ++ ) { const camera2 = cameras[ i ]; renderScene( currentRenderList, scene, camera2, camera2.viewport ); } } else { if ( transmissiveObjects.length > 0 ) renderTransmissionPass( opaqueObjects, transmissiveObjects, scene, camera ); if ( _renderBackground ) background.render( scene ); renderScene( currentRenderList, scene, camera ); } // if ( _currentRenderTarget !== null && _currentActiveMipmapLevel === 0 ) { // resolve multisample renderbuffers to a single-sample texture if necessary textures.updateMultisampleRenderTarget( _currentRenderTarget ); // Generate mipmap if we're using any kind of mipmap filtering textures.updateRenderTargetMipmap( _currentRenderTarget ); } // if ( scene.isScene === true ) scene.onAfterRender( _this, scene, camera ); // _gl.finish(); bindingStates.resetDefaultState(); _currentMaterialId = -1; _currentCamera = null; renderStateStack.pop(); if ( renderStateStack.length > 0 ) { currentRenderState = renderStateStack[ renderStateStack.length - 1 ]; if ( _clippingEnabled === true ) clipping.setGlobalState( _this.clippingPlanes, currentRenderState.state.camera ); } else { currentRenderState = null; } renderListStack.pop(); if ( renderListStack.length > 0 ) { currentRenderList = renderListStack[ renderListStack.length - 1 ]; } else { currentRenderList = null; } }; function projectObject( object, camera, groupOrder, sortObjects ) { if ( object.visible === false ) return; const visible = object.layers.test( camera.layers ); if ( visible ) { if ( object.isGroup ) { groupOrder = object.renderOrder; } else if ( object.isLOD ) { if ( object.autoUpdate === true ) object.update( camera ); } else if ( object.isLight ) { currentRenderState.pushLight( object ); if ( object.castShadow ) { currentRenderState.pushShadow( object ); } } else if ( object.isSprite ) { if ( ! object.frustumCulled || _frustum.intersectsSprite( object ) ) { if ( sortObjects ) { _vector4.setFromMatrixPosition( object.matrixWorld ) .applyMatrix4( _projScreenMatrix ); } const geometry = objects.update( object ); const material = object.material; if ( material.visible ) { currentRenderList.push( object, geometry, material, groupOrder, _vector4.z, null ); } } } else if ( object.isMesh || object.isLine || object.isPoints ) { if ( ! object.frustumCulled || _frustum.intersectsObject( object ) ) { const geometry = objects.update( object ); const material = object.material; if ( sortObjects ) { if ( object.boundingSphere !== undefined ) { if ( object.boundingSphere === null ) object.computeBoundingSphere(); _vector4.copy( object.boundingSphere.center ); } else { if ( geometry.boundingSphere === null ) geometry.computeBoundingSphere(); _vector4.copy( geometry.boundingSphere.center ); } _vector4 .applyMatrix4( object.matrixWorld ) .applyMatrix4( _projScreenMatrix ); } if ( Array.isArray( material ) ) { const groups = geometry.groups; for ( let i = 0, l = groups.length; i < l; i ++ ) { const group = groups[ i ]; const groupMaterial = material[ group.materialIndex ]; if ( groupMaterial && groupMaterial.visible ) { currentRenderList.push( object, geometry, groupMaterial, groupOrder, _vector4.z, group ); } } } else if ( material.visible ) { currentRenderList.push( object, geometry, material, groupOrder, _vector4.z, null ); } } } } const children = object.children; for ( let i = 0, l = children.length; i < l; i ++ ) { projectObject( children[ i ], camera, groupOrder, sortObjects ); } } function renderScene( currentRenderList, scene, camera, viewport ) { const opaqueObjects = currentRenderList.opaque; const transmissiveObjects = currentRenderList.transmissive; const transparentObjects = currentRenderList.transparent; currentRenderState.setupLightsView( camera ); if ( _clippingEnabled === true ) clipping.setGlobalState( _this.clippingPlanes, camera ); if ( viewport ) state.viewport( _currentViewport.copy( viewport ) ); if ( opaqueObjects.length > 0 ) renderObjects( opaqueObjects, scene, camera ); if ( transmissiveObjects.length > 0 ) renderObjects( transmissiveObjects, scene, camera ); if ( transparentObjects.length > 0 ) renderObjects( transparentObjects, scene, camera ); // Ensure depth buffer writing is enabled so it can be cleared on next render state.buffers.depth.setTest( true ); state.buffers.depth.setMask( true ); state.buffers.color.setMask( true ); state.setPolygonOffset( false ); } function renderTransmissionPass( opaqueObjects, transmissiveObjects, scene, camera ) { const overrideMaterial = scene.isScene === true ? scene.overrideMaterial : null; if ( overrideMaterial !== null ) { return; } if ( currentRenderState.state.transmissionRenderTarget[ camera.id ] === undefined ) { currentRenderState.state.transmissionRenderTarget[ camera.id ] = new WebGLRenderTarget( 1, 1, { generateMipmaps: true, type: ( extensions.has( 'EXT_color_buffer_half_float' ) || extensions.has( 'EXT_color_buffer_float' ) ) ? HalfFloatType : UnsignedByteType, minFilter: LinearMipmapLinearFilter, samples: 4, stencilBuffer: stencil, resolveDepthBuffer: false, resolveStencilBuffer: false, colorSpace: ColorManagement.workingColorSpace, } ); // debug /* const geometry = new PlaneGeometry(); const material = new MeshBasicMaterial( { map: _transmissionRenderTarget.texture } ); const mesh = new Mesh( geometry, material ); scene.add( mesh ); */ } const transmissionRenderTarget = currentRenderState.state.transmissionRenderTarget[ camera.id ]; const activeViewport = camera.viewport || _currentViewport; transmissionRenderTarget.setSize( activeViewport.z * _this.transmissionResolutionScale, activeViewport.w * _this.transmissionResolutionScale ); // const currentRenderTarget = _this.getRenderTarget(); _this.setRenderTarget( transmissionRenderTarget ); _this.getClearColor( _currentClearColor ); _currentClearAlpha = _this.getClearAlpha(); if ( _currentClearAlpha < 1 ) _this.setClearColor( 0xffffff, 0.5 ); _this.clear(); if ( _renderBackground ) background.render( scene ); // Turn off the features which can affect the frag color for opaque objects pass. // Otherwise they are applied twice in opaque objects pass and transmission objects pass. const currentToneMapping = _this.toneMapping; _this.toneMapping = NoToneMapping; // Remove viewport from camera to avoid nested render calls resetting viewport to it (e.g Reflector). // Transmission render pass requires viewport to match the transmissionRenderTarget. const currentCameraViewport = camera.viewport; if ( camera.viewport !== undefined ) camera.viewport = undefined; currentRenderState.setupLightsView( camera ); if ( _clippingEnabled === true ) clipping.setGlobalState( _this.clippingPlanes, camera ); renderObjects( opaqueObjects, scene, camera ); textures.updateMultisampleRenderTarget( transmissionRenderTarget ); textures.updateRenderTargetMipmap( transmissionRenderTarget ); if ( extensions.has( 'WEBGL_multisampled_render_to_texture' ) === false ) { // see #28131 let renderTargetNeedsUpdate = false; for ( let i = 0, l = transmissiveObjects.length; i < l; i ++ ) { const renderItem = transmissiveObjects[ i ]; const object = renderItem.object; const geometry = renderItem.geometry; const material = renderItem.material; const group = renderItem.group; if ( material.side === DoubleSide && object.layers.test( camera.layers ) ) { const currentSide = material.side; material.side = BackSide; material.needsUpdate = true; renderObject( object, scene, camera, geometry, material, group ); material.side = currentSide; material.needsUpdate = true; renderTargetNeedsUpdate = true; } } if ( renderTargetNeedsUpdate === true ) { textures.updateMultisampleRenderTarget( transmissionRenderTarget ); textures.updateRenderTargetMipmap( transmissionRenderTarget ); } } _this.setRenderTarget( currentRenderTarget ); _this.setClearColor( _currentClearColor, _currentClearAlpha ); if ( currentCameraViewport !== undefined ) camera.viewport = currentCameraViewport; _this.toneMapping = currentToneMapping; } function renderObjects( renderList, scene, camera ) { const overrideMaterial = scene.isScene === true ? scene.overrideMaterial : null; for ( let i = 0, l = renderList.length; i < l; i ++ ) { const renderItem = renderList[ i ]; const object = renderItem.object; const geometry = renderItem.geometry; const group = renderItem.group; let material = renderItem.material; if ( material.allowOverride === true && overrideMaterial !== null ) { material = overrideMaterial; } if ( object.layers.test( camera.layers ) ) { renderObject( object, scene, camera, geometry, material, group ); } } } function renderObject( object, scene, camera, geometry, material, group ) { object.onBeforeRender( _this, scene, camera, geometry, material, group ); object.modelViewMatrix.multiplyMatrices( camera.matrixWorldInverse, object.matrixWorld ); object.normalMatrix.getNormalMatrix( object.modelViewMatrix ); material.onBeforeRender( _this, scene, camera, geometry, object, group ); if ( material.transparent === true && material.side === DoubleSide && material.forceSinglePass === false ) { material.side = BackSide; material.needsUpdate = true; _this.renderBufferDirect( camera, scene, geometry, material, object, group ); material.side = FrontSide; material.needsUpdate = true; _this.renderBufferDirect( camera, scene, geometry, material, object, group ); material.side = DoubleSide; } else { _this.renderBufferDirect( camera, scene, geometry, material, object, group ); } object.onAfterRender( _this, scene, camera, geometry, material, group ); } function getProgram( material, scene, object ) { if ( scene.isScene !== true ) scene = _emptyScene; // scene could be a Mesh, Line, Points, ... const materialProperties = properties.get( material ); const lights = currentRenderState.state.lights; const shadowsArray = currentRenderState.state.shadowsArray; const lightsStateVersion = lights.state.version; const parameters = programCache.getParameters( material, lights.state, shadowsArray, scene, object ); const programCacheKey = programCache.getProgramCacheKey( parameters ); let programs = materialProperties.programs; // always update environment and fog - changing these trigger an getProgram call, but it's possible that the program doesn't change materialProperties.environment = material.isMeshStandardMaterial ? scene.environment : null; materialProperties.fog = scene.fog; materialProperties.envMap = ( material.isMeshStandardMaterial ? cubeuvmaps : cubemaps ).get( material.envMap || materialProperties.environment ); materialProperties.envMapRotation = ( materialProperties.environment !== null && material.envMap === null ) ? scene.environmentRotation : material.envMapRotation; if ( programs === undefined ) { // new material material.addEventListener( 'dispose', onMaterialDispose ); programs = new Map(); materialProperties.programs = programs; } let program = programs.get( programCacheKey ); if ( program !== undefined ) { // early out if program and light state is identical if ( materialProperties.currentProgram === program && materialProperties.lightsStateVersion === lightsStateVersion ) { updateCommonMaterialProperties( material, parameters ); return program; } } else { parameters.uniforms = programCache.getUniforms( material ); material.onBeforeCompile( parameters, _this ); program = programCache.acquireProgram( parameters, programCacheKey ); programs.set( programCacheKey, program ); materialProperties.uniforms = parameters.uniforms; } const uniforms = materialProperties.uniforms; if ( ( ! material.isShaderMaterial && ! material.isRawShaderMaterial ) || material.clipping === true ) { uniforms.clippingPlanes = clipping.uniform; } updateCommonMaterialProperties( material, parameters ); // store the light setup it was created for materialProperties.needsLights = materialNeedsLights( material ); materialProperties.lightsStateVersion = lightsStateVersion; if ( materialProperties.needsLights ) { // wire up the material to this renderer's lighting state uniforms.ambientLightColor.value = lights.state.ambient; uniforms.lightProbe.value = lights.state.probe; uniforms.directionalLights.value = lights.state.directional; uniforms.directionalLightShadows.value = lights.state.directionalShadow; uniforms.spotLights.value = lights.state.spot; uniforms.spotLightShadows.value = lights.state.spotShadow; uniforms.rectAreaLights.value = lights.state.rectArea; uniforms.ltc_1.value = lights.state.rectAreaLTC1; uniforms.ltc_2.value = lights.state.rectAreaLTC2; uniforms.pointLights.value = lights.state.point; uniforms.pointLightShadows.value = lights.state.pointShadow; uniforms.hemisphereLights.value = lights.state.hemi; uniforms.directionalShadowMap.value = lights.state.directionalShadowMap; uniforms.directionalShadowMatrix.value = lights.state.directionalShadowMatrix; uniforms.spotShadowMap.value = lights.state.spotShadowMap; uniforms.spotLightMatrix.value = lights.state.spotLightMatrix; uniforms.spotLightMap.value = lights.state.spotLightMap; uniforms.pointShadowMap.value = lights.state.pointShadowMap; uniforms.pointShadowMatrix.value = lights.state.pointShadowMatrix; // TODO (abelnation): add area lights shadow info to uniforms } materialProperties.currentProgram = program; materialProperties.uniformsList = null; return program; } function getUniformList( materialProperties ) { if ( materialProperties.uniformsList === null ) { const progUniforms = materialProperties.currentProgram.getUniforms(); materialProperties.uniformsList = WebGLUniforms.seqWithValue( progUniforms.seq, materialProperties.uniforms ); } return materialProperties.uniformsList; } function updateCommonMaterialProperties( material, parameters ) { const materialProperties = properties.get( material ); materialProperties.outputColorSpace = parameters.outputColorSpace; materialProperties.batching = parameters.batching; materialProperties.batchingColor = parameters.batchingColor; materialProperties.instancing = parameters.instancing; materialProperties.instancingColor = parameters.instancingColor; materialProperties.instancingMorph = parameters.instancingMorph; materialProperties.skinning = parameters.skinning; materialProperties.morphTargets = parameters.morphTargets; materialProperties.morphNormals = parameters.morphNormals; materialProperties.morphColors = parameters.morphColors; materialProperties.morphTargetsCount = parameters.morphTargetsCount; materialProperties.numClippingPlanes = parameters.numClippingPlanes; materialProperties.numIntersection = parameters.numClipIntersection; materialProperties.vertexAlphas = parameters.vertexAlphas; materialProperties.vertexTangents = parameters.vertexTangents; materialProperties.toneMapping = parameters.toneMapping; } function setProgram( camera, scene, geometry, material, object ) { if ( scene.isScene !== true ) scene = _emptyScene; // scene could be a Mesh, Line, Points, ... textures.resetTextureUnits(); const fog = scene.fog; const environment = material.isMeshStandardMaterial ? scene.environment : null; const colorSpace = ( _currentRenderTarget === null ) ? _this.outputColorSpace : ( _currentRenderTarget.isXRRenderTarget === true ? _currentRenderTarget.texture.colorSpace : LinearSRGBColorSpace ); const envMap = ( material.isMeshStandardMaterial ? cubeuvmaps : cubemaps ).get( material.envMap || environment ); const vertexAlphas = material.vertexColors === true && !! geometry.attributes.color && geometry.attributes.color.itemSize === 4; const vertexTangents = !! geometry.attributes.tangent && ( !! material.normalMap || material.anisotropy > 0 ); const morphTargets = !! geometry.morphAttributes.position; const morphNormals = !! geometry.morphAttributes.normal; const morphColors = !! geometry.morphAttributes.color; let toneMapping = NoToneMapping; if ( material.toneMapped ) { if ( _currentRenderTarget === null || _currentRenderTarget.isXRRenderTarget === true ) { toneMapping = _this.toneMapping; } } const morphAttribute = geometry.morphAttributes.position || geometry.morphAttributes.normal || geometry.morphAttributes.color; const morphTargetsCount = ( morphAttribute !== undefined ) ? morphAttribute.length : 0; const materialProperties = properties.get( material ); const lights = currentRenderState.state.lights; if ( _clippingEnabled === true ) { if ( _localClippingEnabled === true || camera !== _currentCamera ) { const useCache = camera === _currentCamera && material.id === _currentMaterialId; // we might want to call this function with some ClippingGroup // object instead of the material, once it becomes feasible // (#8465, #8379) clipping.setState( material, camera, useCache ); } } // let needsProgramChange = false; if ( material.version === materialProperties.__version ) { if ( materialProperties.needsLights && ( materialProperties.lightsStateVersion !== lights.state.version ) ) { needsProgramChange = true; } else if ( materialProperties.outputColorSpace !== colorSpace ) { needsProgramChange = true; } else if ( object.isBatchedMesh && materialProperties.batching === false ) { needsProgramChange = true; } else if ( ! object.isBatchedMesh && materialProperties.batching === true ) { needsProgramChange = true; } else if ( object.isBatchedMesh && materialProperties.batchingColor === true && object.colorTexture === null ) { needsProgramChange = true; } else if ( object.isBatchedMesh && materialProperties.batchingColor === false && object.colorTexture !== null ) { needsProgramChange = true; } else if ( object.isInstancedMesh && materialProperties.instancing === false ) { needsProgramChange = true; } else if ( ! object.isInstancedMesh && materialProperties.instancing === true ) { needsProgramChange = true; } else if ( object.isSkinnedMesh && materialProperties.skinning === false ) { needsProgramChange = true; } else if ( ! object.isSkinnedMesh && materialProperties.skinning === true ) { needsProgramChange = true; } else if ( object.isInstancedMesh && materialProperties.instancingColor === true && object.instanceColor === null ) { needsProgramChange = true; } else if ( object.isInstancedMesh && materialProperties.instancingColor === false && object.instanceColor !== null ) { needsProgramChange = true; } else if ( object.isInstancedMesh && materialProperties.instancingMorph === true && object.morphTexture === null ) { needsProgramChange = true; } else if ( object.isInstancedMesh && materialProperties.instancingMorph === false && object.morphTexture !== null ) { needsProgramChange = true; } else if ( materialProperties.envMap !== envMap ) { needsProgramChange = true; } else if ( material.fog === true && materialProperties.fog !== fog ) { needsProgramChange = true; } else if ( materialProperties.numClippingPlanes !== undefined && ( materialProperties.numClippingPlanes !== clipping.numPlanes || materialProperties.numIntersection !== clipping.numIntersection ) ) { needsProgramChange = true; } else if ( materialProperties.vertexAlphas !== vertexAlphas ) { needsProgramChange = true; } else if ( materialProperties.vertexTangents !== vertexTangents ) { needsProgramChange = true; } else if ( materialProperties.morphTargets !== morphTargets ) { needsProgramChange = true; } else if ( materialProperties.morphNormals !== morphNormals ) { needsProgramChange = true; } else if ( materialProperties.morphColors !== morphColors ) { needsProgramChange = true; } else if ( materialProperties.toneMapping !== toneMapping ) { needsProgramChange = true; } else if ( materialProperties.morphTargetsCount !== morphTargetsCount ) { needsProgramChange = true; } } else { needsProgramChange = true; materialProperties.__version = material.version; } // let program = materialProperties.currentProgram; if ( needsProgramChange === true ) { program = getProgram( material, scene, object ); } let refreshProgram = false; let refreshMaterial = false; let refreshLights = false; const p_uniforms = program.getUniforms(), m_uniforms = materialProperties.uniforms; if ( state.useProgram( program.program ) ) { refreshProgram = true; refreshMaterial = true; refreshLights = true; } if ( material.id !== _currentMaterialId ) { _currentMaterialId = material.id; refreshMaterial = true; } if ( refreshProgram || _currentCamera !== camera ) { // common camera uniforms const reverseDepthBuffer = state.buffers.depth.getReversed(); if ( reverseDepthBuffer ) { _currentProjectionMatrix.copy( camera.projectionMatrix ); toNormalizedProjectionMatrix( _currentProjectionMatrix ); toReversedProjectionMatrix( _currentProjectionMatrix ); p_uniforms.setValue( _gl, 'projectionMatrix', _currentProjectionMatrix ); } else { p_uniforms.setValue( _gl, 'projectionMatrix', camera.projectionMatrix ); } p_uniforms.setValue( _gl, 'viewMatrix', camera.matrixWorldInverse ); const uCamPos = p_uniforms.map.cameraPosition; if ( uCamPos !== undefined ) { uCamPos.setValue( _gl, _vector3.setFromMatrixPosition( camera.matrixWorld ) ); } if ( capabilities.logarithmicDepthBuffer ) { p_uniforms.setValue( _gl, 'logDepthBufFC', 2.0 / ( Math.log( camera.far + 1.0 ) / Math.LN2 ) ); } // consider moving isOrthographic to UniformLib and WebGLMaterials, see https://github.com/mrdoob/three.js/pull/26467#issuecomment-1645185067 if ( material.isMeshPhongMaterial || material.isMeshToonMaterial || material.isMeshLambertMaterial || material.isMeshBasicMaterial || material.isMeshStandardMaterial || material.isShaderMaterial ) { p_uniforms.setValue( _gl, 'isOrthographic', camera.isOrthographicCamera === true ); } if ( _currentCamera !== camera ) { _currentCamera = camera; // lighting uniforms depend on the camera so enforce an update // now, in case this material supports lights - or later, when // the next material that does gets activated: refreshMaterial = true; // set to true on material change refreshLights = true; // remains set until update done } } // skinning and morph target uniforms must be set even if material didn't change // auto-setting of texture unit for bone and morph texture must go before other textures // otherwise textures used for skinning and morphing can take over texture units reserved for other material textures if ( object.isSkinnedMesh ) { p_uniforms.setOptional( _gl, object, 'bindMatrix' ); p_uniforms.setOptional( _gl, object, 'bindMatrixInverse' ); const skeleton = object.skeleton; if ( skeleton ) { if ( skeleton.boneTexture === null ) skeleton.computeBoneTexture(); p_uniforms.setValue( _gl, 'boneTexture', skeleton.boneTexture, textures ); } } if ( object.isBatchedMesh ) { p_uniforms.setOptional( _gl, object, 'batchingTexture' ); p_uniforms.setValue( _gl, 'batchingTexture', object._matricesTexture, textures ); p_uniforms.setOptional( _gl, object, 'batchingIdTexture' ); p_uniforms.setValue( _gl, 'batchingIdTexture', object._indirectTexture, textures ); p_uniforms.setOptional( _gl, object, 'batchingColorTexture' ); if ( object._colorsTexture !== null ) { p_uniforms.setValue( _gl, 'batchingColorTexture', object._colorsTexture, textures ); } } const morphAttributes = geometry.morphAttributes; if ( morphAttributes.position !== undefined || morphAttributes.normal !== undefined || ( morphAttributes.color !== undefined ) ) { morphtargets.update( object, geometry, program ); } if ( refreshMaterial || materialProperties.receiveShadow !== object.receiveShadow ) { materialProperties.receiveShadow = object.receiveShadow; p_uniforms.setValue( _gl, 'receiveShadow', object.receiveShadow ); } // https://github.com/mrdoob/three.js/pull/24467#issuecomment-1209031512 if ( material.isMeshGouraudMaterial && material.envMap !== null ) { m_uniforms.envMap.value = envMap; m_uniforms.flipEnvMap.value = ( envMap.isCubeTexture && envMap.isRenderTargetTexture === false ) ? -1 : 1; } if ( material.isMeshStandardMaterial && material.envMap === null && scene.environment !== null ) { m_uniforms.envMapIntensity.value = scene.environmentIntensity; } if ( refreshMaterial ) { p_uniforms.setValue( _gl, 'toneMappingExposure', _this.toneMappingExposure ); if ( materialProperties.needsLights ) { // the current material requires lighting info // note: all lighting uniforms are always set correctly // they simply reference the renderer's state for their // values // // use the current material's .needsUpdate flags to set // the GL state when required markUniformsLightsNeedsUpdate( m_uniforms, refreshLights ); } // refresh uniforms common to several materials if ( fog && material.fog === true ) { materials.refreshFogUniforms( m_uniforms, fog ); } materials.refreshMaterialUniforms( m_uniforms, material, _pixelRatio, _height, currentRenderState.state.transmissionRenderTarget[ camera.id ] ); WebGLUniforms.upload( _gl, getUniformList( materialProperties ), m_uniforms, textures ); } if ( material.isShaderMaterial && material.uniformsNeedUpdate === true ) { WebGLUniforms.upload( _gl, getUniformList( materialProperties ), m_uniforms, textures ); material.uniformsNeedUpdate = false; } if ( material.isSpriteMaterial ) { p_uniforms.setValue( _gl, 'center', object.center ); } // common matrices p_uniforms.setValue( _gl, 'modelViewMatrix', object.modelViewMatrix ); p_uniforms.setValue( _gl, 'normalMatrix', object.normalMatrix ); p_uniforms.setValue( _gl, 'modelMatrix', object.matrixWorld ); // UBOs if ( material.isShaderMaterial || material.isRawShaderMaterial ) { const groups = material.uniformsGroups; for ( let i = 0, l = groups.length; i < l; i ++ ) { const group = groups[ i ]; uniformsGroups.update( group, program ); uniformsGroups.bind( group, program ); } } return program; } // If uniforms are marked as clean, they don't need to be loaded to the GPU. function markUniformsLightsNeedsUpdate( uniforms, value ) { uniforms.ambientLightColor.needsUpdate = value; uniforms.lightProbe.needsUpdate = value; uniforms.directionalLights.needsUpdate = value; uniforms.directionalLightShadows.needsUpdate = value; uniforms.pointLights.needsUpdate = value; uniforms.pointLightShadows.needsUpdate = value; uniforms.spotLights.needsUpdate = value; uniforms.spotLightShadows.needsUpdate = value; uniforms.rectAreaLights.needsUpdate = value; uniforms.hemisphereLights.needsUpdate = value; } function materialNeedsLights( material ) { return material.isMeshLambertMaterial || material.isMeshToonMaterial || material.isMeshPhongMaterial || material.isMeshStandardMaterial || material.isShadowMaterial || ( material.isShaderMaterial && material.lights === true ); } /** * Returns the active cube face. * * @return {number} The active cube face. */ this.getActiveCubeFace = function () { return _currentActiveCubeFace; }; /** * Returns the active mipmap level. * * @return {number} The active mipmap level. */ this.getActiveMipmapLevel = function () { return _currentActiveMipmapLevel; }; /** * Returns the active render target. * * @return {?WebGLRenderTarget} The active render target. Returns `null` if no render target * is currently set. */ this.getRenderTarget = function () { return _currentRenderTarget; }; this.setRenderTargetTextures = function ( renderTarget, colorTexture, depthTexture ) { const renderTargetProperties = properties.get( renderTarget ); renderTargetProperties.__autoAllocateDepthBuffer = renderTarget.resolveDepthBuffer === false; if ( renderTargetProperties.__autoAllocateDepthBuffer === false ) { // The multisample_render_to_texture extension doesn't work properly if there // are midframe flushes and an external depth buffer. Disable use of the extension. renderTargetProperties.__useRenderToTexture = false; } properties.get( renderTarget.texture ).__webglTexture = colorTexture; properties.get( renderTarget.depthTexture ).__webglTexture = renderTargetProperties.__autoAllocateDepthBuffer ? undefined : depthTexture; renderTargetProperties.__hasExternalTextures = true; }; this.setRenderTargetFramebuffer = function ( renderTarget, defaultFramebuffer ) { const renderTargetProperties = properties.get( renderTarget ); renderTargetProperties.__webglFramebuffer = defaultFramebuffer; renderTargetProperties.__useDefaultFramebuffer = defaultFramebuffer === undefined; }; const _scratchFrameBuffer = _gl.createFramebuffer(); /** * Sets the active rendertarget. * * @param {?WebGLRenderTarget} renderTarget - The render target to set. When `null` is given, * the canvas is set as the active render target instead. * @param {number} [activeCubeFace=0] - The active cube face when using a cube render target. * Indicates the z layer to render in to when using 3D or array render targets. * @param {number} [activeMipmapLevel=0] - The active mipmap level. */ this.setRenderTarget = function ( renderTarget, activeCubeFace = 0, activeMipmapLevel = 0 ) { _currentRenderTarget = renderTarget; _currentActiveCubeFace = activeCubeFace; _currentActiveMipmapLevel = activeMipmapLevel; let useDefaultFramebuffer = true; let framebuffer = null; let isCube = false; let isRenderTarget3D = false; if ( renderTarget ) { const renderTargetProperties = properties.get( renderTarget ); if ( renderTargetProperties.__useDefaultFramebuffer !== undefined ) { // We need to make sure to rebind the framebuffer. state.bindFramebuffer( _gl.FRAMEBUFFER, null ); useDefaultFramebuffer = false; } else if ( renderTargetProperties.__webglFramebuffer === undefined ) { textures.setupRenderTarget( renderTarget ); } else if ( renderTargetProperties.__hasExternalTextures ) { // Color and depth texture must be rebound in order for the swapchain to update. textures.rebindTextures( renderTarget, properties.get( renderTarget.texture ).__webglTexture, properties.get( renderTarget.depthTexture ).__webglTexture ); } else if ( renderTarget.depthBuffer ) { // check if the depth texture is already bound to the frame buffer and that it's been initialized const depthTexture = renderTarget.depthTexture; if ( renderTargetProperties.__boundDepthTexture !== depthTexture ) { // check if the depth texture is compatible if ( depthTexture !== null && properties.has( depthTexture ) && ( renderTarget.width !== depthTexture.image.width || renderTarget.height !== depthTexture.image.height ) ) { throw new Error( 'WebGLRenderTarget: Attached DepthTexture is initialized to the incorrect size.' ); } // Swap the depth buffer to the currently attached one textures.setupDepthRenderbuffer( renderTarget ); } } const texture = renderTarget.texture; if ( texture.isData3DTexture || texture.isDataArrayTexture || texture.isCompressedArrayTexture ) { isRenderTarget3D = true; } const __webglFramebuffer = properties.get( renderTarget ).__webglFramebuffer; if ( renderTarget.isWebGLCubeRenderTarget ) { if ( Array.isArray( __webglFramebuffer[ activeCubeFace ] ) ) { framebuffer = __webglFramebuffer[ activeCubeFace ][ activeMipmapLevel ]; } else { framebuffer = __webglFramebuffer[ activeCubeFace ]; } isCube = true; } else if ( ( renderTarget.samples > 0 ) && textures.useMultisampledRTT( renderTarget ) === false ) { framebuffer = properties.get( renderTarget ).__webglMultisampledFramebuffer; } else { if ( Array.isArray( __webglFramebuffer ) ) { framebuffer = __webglFramebuffer[ activeMipmapLevel ]; } else { framebuffer = __webglFramebuffer; } } _currentViewport.copy( renderTarget.viewport ); _currentScissor.copy( renderTarget.scissor ); _currentScissorTest = renderTarget.scissorTest; } else { _currentViewport.copy( _viewport ).multiplyScalar( _pixelRatio ).floor(); _currentScissor.copy( _scissor ).multiplyScalar( _pixelRatio ).floor(); _currentScissorTest = _scissorTest; } // Use a scratch frame buffer if rendering to a mip level to avoid depth buffers // being bound that are different sizes. if ( activeMipmapLevel !== 0 ) { framebuffer = _scratchFrameBuffer; } const framebufferBound = state.bindFramebuffer( _gl.FRAMEBUFFER, framebuffer ); if ( framebufferBound && useDefaultFramebuffer ) { state.drawBuffers( renderTarget, framebuffer ); } state.viewport( _currentViewport ); state.scissor( _currentScissor ); state.setScissorTest( _currentScissorTest ); if ( isCube ) { const textureProperties = properties.get( renderTarget.texture ); _gl.framebufferTexture2D( _gl.FRAMEBUFFER, _gl.COLOR_ATTACHMENT0, _gl.TEXTURE_CUBE_MAP_POSITIVE_X + activeCubeFace, textureProperties.__webglTexture, activeMipmapLevel ); } else if ( isRenderTarget3D ) { const textureProperties = properties.get( renderTarget.texture ); const layer = activeCubeFace; _gl.framebufferTextureLayer( _gl.FRAMEBUFFER, _gl.COLOR_ATTACHMENT0, textureProperties.__webglTexture, activeMipmapLevel, layer ); } else if ( renderTarget !== null && activeMipmapLevel !== 0 ) { // Only bind the frame buffer if we are using a scratch frame buffer to render to a mipmap. // If we rebind the texture when using a multi sample buffer then an error about inconsistent samples will be thrown. const textureProperties = properties.get( renderTarget.texture ); _gl.framebufferTexture2D( _gl.FRAMEBUFFER, _gl.COLOR_ATTACHMENT0, _gl.TEXTURE_2D, textureProperties.__webglTexture, activeMipmapLevel ); } _currentMaterialId = -1; // reset current material to ensure correct uniform bindings }; /** * Reads the pixel data from the given render target into the given buffer. * * @param {WebGLRenderTarget} renderTarget - The render target to read from. * @param {number} x - The `x` coordinate of the copy region's origin. * @param {number} y - The `y` coordinate of the copy region's origin. * @param {number} width - The width of the copy region. * @param {number} height - The height of the copy region. * @param {TypedArray} buffer - The result buffer. * @param {number} [activeCubeFaceIndex] - The active cube face index. */ this.readRenderTargetPixels = function ( renderTarget, x, y, width, height, buffer, activeCubeFaceIndex ) { if ( ! ( renderTarget && renderTarget.isWebGLRenderTarget ) ) { console.error( 'THREE.WebGLRenderer.readRenderTargetPixels: renderTarget is not THREE.WebGLRenderTarget.' ); return; } let framebuffer = properties.get( renderTarget ).__webglFramebuffer; if ( renderTarget.isWebGLCubeRenderTarget && activeCubeFaceIndex !== undefined ) { framebuffer = framebuffer[ activeCubeFaceIndex ]; } if ( framebuffer ) { state.bindFramebuffer( _gl.FRAMEBUFFER, framebuffer ); try { const texture = renderTarget.texture; const textureFormat = texture.format; const textureType = texture.type; if ( ! capabilities.textureFormatReadable( textureFormat ) ) { console.error( 'THREE.WebGLRenderer.readRenderTargetPixels: renderTarget is not in RGBA or implementation defined format.' ); return; } if ( ! capabilities.textureTypeReadable( textureType ) ) { console.error( 'THREE.WebGLRenderer.readRenderTargetPixels: renderTarget is not in UnsignedByteType or implementation defined type.' ); return; } // the following if statement ensures valid read requests (no out-of-bounds pixels, see #8604) if ( ( x >= 0 && x <= ( renderTarget.width - width ) ) && ( y >= 0 && y <= ( renderTarget.height - height ) ) ) { _gl.readPixels( x, y, width, height, utils.convert( textureFormat ), utils.convert( textureType ), buffer ); } } finally { // restore framebuffer of current render target if necessary const framebuffer = ( _currentRenderTarget !== null ) ? properties.get( _currentRenderTarget ).__webglFramebuffer : null; state.bindFramebuffer( _gl.FRAMEBUFFER, framebuffer ); } } }; /** * Asynchronous, non-blocking version of {@link WebGLRenderer#readRenderTargetPixels}. * * It is recommended to use this version of `readRenderTargetPixels()` whenever possible. * * @async * @param {WebGLRenderTarget} renderTarget - The render target to read from. * @param {number} x - The `x` coordinate of the copy region's origin. * @param {number} y - The `y` coordinate of the copy region's origin. * @param {number} width - The width of the copy region. * @param {number} height - The height of the copy region. * @param {TypedArray} buffer - The result buffer. * @param {number} [activeCubeFaceIndex] - The active cube face index. * @return {Promise} A Promise that resolves when the read has been finished. The resolve provides the read data as a typed array. */ this.readRenderTargetPixelsAsync = async function ( renderTarget, x, y, width, height, buffer, activeCubeFaceIndex ) { if ( ! ( renderTarget && renderTarget.isWebGLRenderTarget ) ) { throw new Error( 'THREE.WebGLRenderer.readRenderTargetPixels: renderTarget is not THREE.WebGLRenderTarget.' ); } let framebuffer = properties.get( renderTarget ).__webglFramebuffer; if ( renderTarget.isWebGLCubeRenderTarget && activeCubeFaceIndex !== undefined ) { framebuffer = framebuffer[ activeCubeFaceIndex ]; } if ( framebuffer ) { // the following if statement ensures valid read requests (no out-of-bounds pixels, see #8604) if ( ( x >= 0 && x <= ( renderTarget.width - width ) ) && ( y >= 0 && y <= ( renderTarget.height - height ) ) ) { // set the active frame buffer to the one we want to read state.bindFramebuffer( _gl.FRAMEBUFFER, framebuffer ); const texture = renderTarget.texture; const textureFormat = texture.format; const textureType = texture.type; if ( ! capabilities.textureFormatReadable( textureFormat ) ) { throw new Error( 'THREE.WebGLRenderer.readRenderTargetPixelsAsync: renderTarget is not in RGBA or implementation defined format.' ); } if ( ! capabilities.textureTypeReadable( textureType ) ) { throw new Error( 'THREE.WebGLRenderer.readRenderTargetPixelsAsync: renderTarget is not in UnsignedByteType or implementation defined type.' ); } const glBuffer = _gl.createBuffer(); _gl.bindBuffer( _gl.PIXEL_PACK_BUFFER, glBuffer ); _gl.bufferData( _gl.PIXEL_PACK_BUFFER, buffer.byteLength, _gl.STREAM_READ ); _gl.readPixels( x, y, width, height, utils.convert( textureFormat ), utils.convert( textureType ), 0 ); // reset the frame buffer to the currently set buffer before waiting const currFramebuffer = _currentRenderTarget !== null ? properties.get( _currentRenderTarget ).__webglFramebuffer : null; state.bindFramebuffer( _gl.FRAMEBUFFER, currFramebuffer ); // check if the commands have finished every 8 ms const sync = _gl.fenceSync( _gl.SYNC_GPU_COMMANDS_COMPLETE, 0 ); _gl.flush(); await probeAsync( _gl, sync, 4 ); // read the data and delete the buffer _gl.bindBuffer( _gl.PIXEL_PACK_BUFFER, glBuffer ); _gl.getBufferSubData( _gl.PIXEL_PACK_BUFFER, 0, buffer ); _gl.deleteBuffer( glBuffer ); _gl.deleteSync( sync ); return buffer; } else { throw new Error( 'THREE.WebGLRenderer.readRenderTargetPixelsAsync: requested read bounds are out of range.' ); } } }; /** * Copies pixels from the current bound framebuffer into the given texture. * * @param {FramebufferTexture} texture - The texture. * @param {Vector2} position - The start position of the copy operation. * @param {number} [level=0] - The mip level. The default represents the base mip. */ this.copyFramebufferToTexture = function ( texture, position = null, level = 0 ) { const levelScale = Math.pow( 2, - level ); const width = Math.floor( texture.image.width * levelScale ); const height = Math.floor( texture.image.height * levelScale ); const x = position !== null ? position.x : 0; const y = position !== null ? position.y : 0; textures.setTexture2D( texture, 0 ); _gl.copyTexSubImage2D( _gl.TEXTURE_2D, level, 0, 0, x, y, width, height ); state.unbindTexture(); }; const _srcFramebuffer = _gl.createFramebuffer(); const _dstFramebuffer = _gl.createFramebuffer(); /** * Copies data of the given source texture into a destination texture. * * When using render target textures as `srcTexture` and `dstTexture`, you must make sure both render targets are initialized * {@link WebGLRenderer#initRenderTarget}. * * @param {Texture} srcTexture - The source texture. * @param {Texture} dstTexture - The destination texture. * @param {Box2|Box3} [srcRegion=null] - A bounding box which describes the source region. Can be two or three-dimensional. * @param {Vector2|Vector3} [dstPosition=null] - A vector that represents the origin of the destination region. Can be two or three-dimensional. * @param {number} srcLevel - The source mipmap level to copy. * @param {number} dstLevel - The destination mipmap level. */ this.copyTextureToTexture = function ( srcTexture, dstTexture, srcRegion = null, dstPosition = null, srcLevel = 0, dstLevel = null ) { // support the previous signature with just a single dst mipmap level if ( dstLevel === null ) { if ( srcLevel !== 0 ) { // @deprecated, r171 warnOnce( 'WebGLRenderer: copyTextureToTexture function signature has changed to support src and dst mipmap levels.' ); dstLevel = srcLevel; srcLevel = 0; } else { dstLevel = 0; } } // gather the necessary dimensions to copy let width, height, depth, minX, minY, minZ; let dstX, dstY, dstZ; const image = srcTexture.isCompressedTexture ? srcTexture.mipmaps[ dstLevel ] : srcTexture.image; if ( srcRegion !== null ) { width = srcRegion.max.x - srcRegion.min.x; height = srcRegion.max.y - srcRegion.min.y; depth = srcRegion.isBox3 ? srcRegion.max.z - srcRegion.min.z : 1; minX = srcRegion.min.x; minY = srcRegion.min.y; minZ = srcRegion.isBox3 ? srcRegion.min.z : 0; } else { const levelScale = Math.pow( 2, - srcLevel ); width = Math.floor( image.width * levelScale ); height = Math.floor( image.height * levelScale ); if ( srcTexture.isDataArrayTexture ) { depth = image.depth; } else if ( srcTexture.isData3DTexture ) { depth = Math.floor( image.depth * levelScale ); } else { depth = 1; } minX = 0; minY = 0; minZ = 0; } if ( dstPosition !== null ) { dstX = dstPosition.x; dstY = dstPosition.y; dstZ = dstPosition.z; } else { dstX = 0; dstY = 0; dstZ = 0; } // Set up the destination target const glFormat = utils.convert( dstTexture.format ); const glType = utils.convert( dstTexture.type ); let glTarget; if ( dstTexture.isData3DTexture ) { textures.setTexture3D( dstTexture, 0 ); glTarget = _gl.TEXTURE_3D; } else if ( dstTexture.isDataArrayTexture || dstTexture.isCompressedArrayTexture ) { textures.setTexture2DArray( dstTexture, 0 ); glTarget = _gl.TEXTURE_2D_ARRAY; } else { textures.setTexture2D( dstTexture, 0 ); glTarget = _gl.TEXTURE_2D; } _gl.pixelStorei( _gl.UNPACK_FLIP_Y_WEBGL, dstTexture.flipY ); _gl.pixelStorei( _gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, dstTexture.premultiplyAlpha ); _gl.pixelStorei( _gl.UNPACK_ALIGNMENT, dstTexture.unpackAlignment ); // used for copying data from cpu const currentUnpackRowLen = _gl.getParameter( _gl.UNPACK_ROW_LENGTH ); const currentUnpackImageHeight = _gl.getParameter( _gl.UNPACK_IMAGE_HEIGHT ); const currentUnpackSkipPixels = _gl.getParameter( _gl.UNPACK_SKIP_PIXELS ); const currentUnpackSkipRows = _gl.getParameter( _gl.UNPACK_SKIP_ROWS ); const currentUnpackSkipImages = _gl.getParameter( _gl.UNPACK_SKIP_IMAGES ); _gl.pixelStorei( _gl.UNPACK_ROW_LENGTH, image.width ); _gl.pixelStorei( _gl.UNPACK_IMAGE_HEIGHT, image.height ); _gl.pixelStorei( _gl.UNPACK_SKIP_PIXELS, minX ); _gl.pixelStorei( _gl.UNPACK_SKIP_ROWS, minY ); _gl.pixelStorei( _gl.UNPACK_SKIP_IMAGES, minZ ); // set up the src texture const isSrc3D = srcTexture.isDataArrayTexture || srcTexture.isData3DTexture; const isDst3D = dstTexture.isDataArrayTexture || dstTexture.isData3DTexture; if ( srcTexture.isDepthTexture ) { const srcTextureProperties = properties.get( srcTexture ); const dstTextureProperties = properties.get( dstTexture ); const srcRenderTargetProperties = properties.get( srcTextureProperties.__renderTarget ); const dstRenderTargetProperties = properties.get( dstTextureProperties.__renderTarget ); state.bindFramebuffer( _gl.READ_FRAMEBUFFER, srcRenderTargetProperties.__webglFramebuffer ); state.bindFramebuffer( _gl.DRAW_FRAMEBUFFER, dstRenderTargetProperties.__webglFramebuffer ); for ( let i = 0; i < depth; i ++ ) { // if the source or destination are a 3d target then a layer needs to be bound if ( isSrc3D ) { _gl.framebufferTextureLayer( _gl.READ_FRAMEBUFFER, _gl.COLOR_ATTACHMENT0, properties.get( srcTexture ).__webglTexture, srcLevel, minZ + i ); _gl.framebufferTextureLayer( _gl.DRAW_FRAMEBUFFER, _gl.COLOR_ATTACHMENT0, properties.get( dstTexture ).__webglTexture, dstLevel, dstZ + i ); } _gl.blitFramebuffer( minX, minY, width, height, dstX, dstY, width, height, _gl.DEPTH_BUFFER_BIT, _gl.NEAREST ); } state.bindFramebuffer( _gl.READ_FRAMEBUFFER, null ); state.bindFramebuffer( _gl.DRAW_FRAMEBUFFER, null ); } else if ( srcLevel !== 0 || srcTexture.isRenderTargetTexture || properties.has( srcTexture ) ) { // get the appropriate frame buffers const srcTextureProperties = properties.get( srcTexture ); const dstTextureProperties = properties.get( dstTexture ); // bind the frame buffer targets state.bindFramebuffer( _gl.READ_FRAMEBUFFER, _srcFramebuffer ); state.bindFramebuffer( _gl.DRAW_FRAMEBUFFER, _dstFramebuffer ); for ( let i = 0; i < depth; i ++ ) { // assign the correct layers and mip maps to the frame buffers if ( isSrc3D ) { _gl.framebufferTextureLayer( _gl.READ_FRAMEBUFFER, _gl.COLOR_ATTACHMENT0, srcTextureProperties.__webglTexture, srcLevel, minZ + i ); } else { _gl.framebufferTexture2D( _gl.READ_FRAMEBUFFER, _gl.COLOR_ATTACHMENT0, _gl.TEXTURE_2D, srcTextureProperties.__webglTexture, srcLevel ); } if ( isDst3D ) { _gl.framebufferTextureLayer( _gl.DRAW_FRAMEBUFFER, _gl.COLOR_ATTACHMENT0, dstTextureProperties.__webglTexture, dstLevel, dstZ + i ); } else { _gl.framebufferTexture2D( _gl.DRAW_FRAMEBUFFER, _gl.COLOR_ATTACHMENT0, _gl.TEXTURE_2D, dstTextureProperties.__webglTexture, dstLevel ); } // copy the data using the fastest function that can achieve the copy if ( srcLevel !== 0 ) { _gl.blitFramebuffer( minX, minY, width, height, dstX, dstY, width, height, _gl.COLOR_BUFFER_BIT, _gl.NEAREST ); } else if ( isDst3D ) { _gl.copyTexSubImage3D( glTarget, dstLevel, dstX, dstY, dstZ + i, minX, minY, width, height ); } else { _gl.copyTexSubImage2D( glTarget, dstLevel, dstX, dstY, minX, minY, width, height ); } } // unbind read, draw buffers state.bindFramebuffer( _gl.READ_FRAMEBUFFER, null ); state.bindFramebuffer( _gl.DRAW_FRAMEBUFFER, null ); } else { if ( isDst3D ) { // copy data into the 3d texture if ( srcTexture.isDataTexture || srcTexture.isData3DTexture ) { _gl.texSubImage3D( glTarget, dstLevel, dstX, dstY, dstZ, width, height, depth, glFormat, glType, image.data ); } else if ( dstTexture.isCompressedArrayTexture ) { _gl.compressedTexSubImage3D( glTarget, dstLevel, dstX, dstY, dstZ, width, height, depth, glFormat, image.data ); } else { _gl.texSubImage3D( glTarget, dstLevel, dstX, dstY, dstZ, width, height, depth, glFormat, glType, image ); } } else { // copy data into the 2d texture if ( srcTexture.isDataTexture ) { _gl.texSubImage2D( _gl.TEXTURE_2D, dstLevel, dstX, dstY, width, height, glFormat, glType, image.data ); } else if ( srcTexture.isCompressedTexture ) { _gl.compressedTexSubImage2D( _gl.TEXTURE_2D, dstLevel, dstX, dstY, image.width, image.height, glFormat, image.data ); } else { _gl.texSubImage2D( _gl.TEXTURE_2D, dstLevel, dstX, dstY, width, height, glFormat, glType, image ); } } } // reset values _gl.pixelStorei( _gl.UNPACK_ROW_LENGTH, currentUnpackRowLen ); _gl.pixelStorei( _gl.UNPACK_IMAGE_HEIGHT, currentUnpackImageHeight ); _gl.pixelStorei( _gl.UNPACK_SKIP_PIXELS, currentUnpackSkipPixels ); _gl.pixelStorei( _gl.UNPACK_SKIP_ROWS, currentUnpackSkipRows ); _gl.pixelStorei( _gl.UNPACK_SKIP_IMAGES, currentUnpackSkipImages ); // Generate mipmaps only when copying level 0 if ( dstLevel === 0 && dstTexture.generateMipmaps ) { _gl.generateMipmap( glTarget ); } state.unbindTexture(); }; this.copyTextureToTexture3D = function ( srcTexture, dstTexture, srcRegion = null, dstPosition = null, level = 0 ) { // @deprecated, r170 warnOnce( 'WebGLRenderer: copyTextureToTexture3D function has been deprecated. Use "copyTextureToTexture" instead.' ); return this.copyTextureToTexture( srcTexture, dstTexture, srcRegion, dstPosition, level ); }; /** * Initializes the given WebGLRenderTarget memory. Useful for initializing a render target so data * can be copied into it using {@link WebGLRenderer#copyTextureToTexture} before it has been * rendered to. * * @param {WebGLRenderTarget} target - The render target. */ this.initRenderTarget = function ( target ) { if ( properties.get( target ).__webglFramebuffer === undefined ) { textures.setupRenderTarget( target ); } }; /** * Initializes the given texture. Useful for preloading a texture rather than waiting until first * render (which can cause noticeable lags due to decode and GPU upload overhead). * * @param {Texture} texture - The texture. */ this.initTexture = function ( texture ) { if ( texture.isCubeTexture ) { textures.setTextureCube( texture, 0 ); } else if ( texture.isData3DTexture ) { textures.setTexture3D( texture, 0 ); } else if ( texture.isDataArrayTexture || texture.isCompressedArrayTexture ) { textures.setTexture2DArray( texture, 0 ); } else { textures.setTexture2D( texture, 0 ); } state.unbindTexture(); }; /** * Can be used to reset the internal WebGL state. This method is mostly * relevant for applications which share a single WebGL context across * multiple WebGL libraries. */ this.resetState = function () { _currentActiveCubeFace = 0; _currentActiveMipmapLevel = 0; _currentRenderTarget = null; state.reset(); bindingStates.reset(); }; if ( typeof __THREE_DEVTOOLS__ !== 'undefined' ) { __THREE_DEVTOOLS__.dispatchEvent( new CustomEvent( 'observe', { detail: this } ) ); } } /** * Defines the coordinate system of the renderer. * * In `WebGLRenderer`, the value is always `WebGLCoordinateSystem`. * * @type {WebGLCoordinateSystem|WebGPUCoordinateSystem} * @default WebGLCoordinateSystem * @readonly */ get coordinateSystem() { return WebGLCoordinateSystem; } /** * Defines the output color space of the renderer. * * @type {SRGBColorSpace|LinearSRGBColorSpace} * @default SRGBColorSpace */ get outputColorSpace() { return this._outputColorSpace; } set outputColorSpace( colorSpace ) { this._outputColorSpace = colorSpace; const gl = this.getContext(); gl.drawingBufferColorSpace = ColorManagement._getDrawingBufferColorSpace( colorSpace ); gl.unpackColorSpace = ColorManagement._getUnpackColorSpace(); } } exports.ACESFilmicToneMapping = ACESFilmicToneMapping; exports.AddEquation = AddEquation; exports.AddOperation = AddOperation; exports.AdditiveAnimationBlendMode = AdditiveAnimationBlendMode; exports.AdditiveBlending = AdditiveBlending; exports.AgXToneMapping = AgXToneMapping; exports.AlphaFormat = AlphaFormat; exports.AlwaysCompare = AlwaysCompare; exports.AlwaysDepth = AlwaysDepth; exports.AlwaysStencilFunc = AlwaysStencilFunc; exports.AmbientLight = AmbientLight; exports.AnimationAction = AnimationAction; exports.AnimationClip = AnimationClip; exports.AnimationLoader = AnimationLoader; exports.AnimationMixer = AnimationMixer; exports.AnimationObjectGroup = AnimationObjectGroup; exports.AnimationUtils = AnimationUtils; exports.ArcCurve = ArcCurve; exports.ArrayCamera = ArrayCamera; exports.ArrowHelper = ArrowHelper; exports.AttachedBindMode = AttachedBindMode; exports.Audio = Audio; exports.AudioAnalyser = AudioAnalyser; exports.AudioContext = AudioContext; exports.AudioListener = AudioListener; exports.AudioLoader = AudioLoader; exports.AxesHelper = AxesHelper; exports.BackSide = BackSide; exports.BasicDepthPacking = BasicDepthPacking; exports.BasicShadowMap = BasicShadowMap; exports.BatchedMesh = BatchedMesh; exports.Bone = Bone; exports.BooleanKeyframeTrack = BooleanKeyframeTrack; exports.Box2 = Box2; exports.Box3 = Box3; exports.Box3Helper = Box3Helper; exports.BoxGeometry = BoxGeometry; exports.BoxHelper = BoxHelper; exports.BufferAttribute = BufferAttribute; exports.BufferGeometry = BufferGeometry; exports.BufferGeometryLoader = BufferGeometryLoader; exports.ByteType = ByteType; exports.Cache = Cache; exports.Camera = Camera; exports.CameraHelper = CameraHelper; exports.CanvasTexture = CanvasTexture; exports.CapsuleGeometry = CapsuleGeometry; exports.CatmullRomCurve3 = CatmullRomCurve3; exports.CineonToneMapping = CineonToneMapping; exports.CircleGeometry = CircleGeometry; exports.ClampToEdgeWrapping = ClampToEdgeWrapping; exports.Clock = Clock; exports.Color = Color; exports.ColorKeyframeTrack = ColorKeyframeTrack; exports.ColorManagement = ColorManagement; exports.CompressedArrayTexture = CompressedArrayTexture; exports.CompressedCubeTexture = CompressedCubeTexture; exports.CompressedTexture = CompressedTexture; exports.CompressedTextureLoader = CompressedTextureLoader; exports.ConeGeometry = ConeGeometry; exports.ConstantAlphaFactor = ConstantAlphaFactor; exports.ConstantColorFactor = ConstantColorFactor; exports.Controls = Controls; exports.CubeCamera = CubeCamera; exports.CubeReflectionMapping = CubeReflectionMapping; exports.CubeRefractionMapping = CubeRefractionMapping; exports.CubeTexture = CubeTexture; exports.CubeTextureLoader = CubeTextureLoader; exports.CubeUVReflectionMapping = CubeUVReflectionMapping; exports.CubicBezierCurve = CubicBezierCurve; exports.CubicBezierCurve3 = CubicBezierCurve3; exports.CubicInterpolant = CubicInterpolant; exports.CullFaceBack = CullFaceBack; exports.CullFaceFront = CullFaceFront; exports.CullFaceFrontBack = CullFaceFrontBack; exports.CullFaceNone = CullFaceNone; exports.Curve = Curve; exports.CurvePath = CurvePath; exports.CustomBlending = CustomBlending; exports.CustomToneMapping = CustomToneMapping; exports.CylinderGeometry = CylinderGeometry; exports.Cylindrical = Cylindrical; exports.Data3DTexture = Data3DTexture; exports.DataArrayTexture = DataArrayTexture; exports.DataTexture = DataTexture; exports.DataTextureLoader = DataTextureLoader; exports.DataUtils = DataUtils; exports.DecrementStencilOp = DecrementStencilOp; exports.DecrementWrapStencilOp = DecrementWrapStencilOp; exports.DefaultLoadingManager = DefaultLoadingManager; exports.DepthFormat = DepthFormat; exports.DepthStencilFormat = DepthStencilFormat; exports.DepthTexture = DepthTexture; exports.DetachedBindMode = DetachedBindMode; exports.DirectionalLight = DirectionalLight; exports.DirectionalLightHelper = DirectionalLightHelper; exports.DiscreteInterpolant = DiscreteInterpolant; exports.DodecahedronGeometry = DodecahedronGeometry; exports.DoubleSide = DoubleSide; exports.DstAlphaFactor = DstAlphaFactor; exports.DstColorFactor = DstColorFactor; exports.DynamicCopyUsage = DynamicCopyUsage; exports.DynamicDrawUsage = DynamicDrawUsage; exports.DynamicReadUsage = DynamicReadUsage; exports.EdgesGeometry = EdgesGeometry; exports.EllipseCurve = EllipseCurve; exports.EqualCompare = EqualCompare; exports.EqualDepth = EqualDepth; exports.EqualStencilFunc = EqualStencilFunc; exports.EquirectangularReflectionMapping = EquirectangularReflectionMapping; exports.EquirectangularRefractionMapping = EquirectangularRefractionMapping; exports.Euler = Euler; exports.EventDispatcher = EventDispatcher; exports.ExtrudeGeometry = ExtrudeGeometry; exports.FileLoader = FileLoader; exports.Float16BufferAttribute = Float16BufferAttribute; exports.Float32BufferAttribute = Float32BufferAttribute; exports.FloatType = FloatType; exports.Fog = Fog; exports.FogExp2 = FogExp2; exports.FramebufferTexture = FramebufferTexture; exports.FrontSide = FrontSide; exports.Frustum = Frustum; exports.GLBufferAttribute = GLBufferAttribute; exports.GLSL1 = GLSL1; exports.GLSL3 = GLSL3; exports.GreaterCompare = GreaterCompare; exports.GreaterDepth = GreaterDepth; exports.GreaterEqualCompare = GreaterEqualCompare; exports.GreaterEqualDepth = GreaterEqualDepth; exports.GreaterEqualStencilFunc = GreaterEqualStencilFunc; exports.GreaterStencilFunc = GreaterStencilFunc; exports.GridHelper = GridHelper; exports.Group = Group; exports.HalfFloatType = HalfFloatType; exports.HemisphereLight = HemisphereLight; exports.HemisphereLightHelper = HemisphereLightHelper; exports.IcosahedronGeometry = IcosahedronGeometry; exports.ImageBitmapLoader = ImageBitmapLoader; exports.ImageLoader = ImageLoader; exports.ImageUtils = ImageUtils; exports.IncrementStencilOp = IncrementStencilOp; exports.IncrementWrapStencilOp = IncrementWrapStencilOp; exports.InstancedBufferAttribute = InstancedBufferAttribute; exports.InstancedBufferGeometry = InstancedBufferGeometry; exports.InstancedInterleavedBuffer = InstancedInterleavedBuffer; exports.InstancedMesh = InstancedMesh; exports.Int16BufferAttribute = Int16BufferAttribute; exports.Int32BufferAttribute = Int32BufferAttribute; exports.Int8BufferAttribute = Int8BufferAttribute; exports.IntType = IntType; exports.InterleavedBuffer = InterleavedBuffer; exports.InterleavedBufferAttribute = InterleavedBufferAttribute; exports.Interpolant = Interpolant; exports.InterpolateDiscrete = InterpolateDiscrete; exports.InterpolateLinear = InterpolateLinear; exports.InterpolateSmooth = InterpolateSmooth; exports.InvertStencilOp = InvertStencilOp; exports.KeepStencilOp = KeepStencilOp; exports.KeyframeTrack = KeyframeTrack; exports.LOD = LOD; exports.LatheGeometry = LatheGeometry; exports.Layers = Layers; exports.LessCompare = LessCompare; exports.LessDepth = LessDepth; exports.LessEqualCompare = LessEqualCompare; exports.LessEqualDepth = LessEqualDepth; exports.LessEqualStencilFunc = LessEqualStencilFunc; exports.LessStencilFunc = LessStencilFunc; exports.Light = Light; exports.LightProbe = LightProbe; exports.Line = Line; exports.Line3 = Line3; exports.LineBasicMaterial = LineBasicMaterial; exports.LineCurve = LineCurve; exports.LineCurve3 = LineCurve3; exports.LineDashedMaterial = LineDashedMaterial; exports.LineLoop = LineLoop; exports.LineSegments = LineSegments; exports.LinearFilter = LinearFilter; exports.LinearInterpolant = LinearInterpolant; exports.LinearMipMapLinearFilter = LinearMipMapLinearFilter; exports.LinearMipMapNearestFilter = LinearMipMapNearestFilter; exports.LinearMipmapLinearFilter = LinearMipmapLinearFilter; exports.LinearMipmapNearestFilter = LinearMipmapNearestFilter; exports.LinearSRGBColorSpace = LinearSRGBColorSpace; exports.LinearToneMapping = LinearToneMapping; exports.LinearTransfer = LinearTransfer; exports.Loader = Loader; exports.LoaderUtils = LoaderUtils; exports.LoadingManager = LoadingManager; exports.LoopOnce = LoopOnce; exports.LoopPingPong = LoopPingPong; exports.LoopRepeat = LoopRepeat; exports.LuminanceAlphaFormat = LuminanceAlphaFormat; exports.LuminanceFormat = LuminanceFormat; exports.MOUSE = MOUSE; exports.Material = Material; exports.MaterialLoader = MaterialLoader; exports.MathUtils = MathUtils; exports.Matrix2 = Matrix2; exports.Matrix3 = Matrix3; exports.Matrix4 = Matrix4; exports.MaxEquation = MaxEquation; exports.Mesh = Mesh; exports.MeshBasicMaterial = MeshBasicMaterial; exports.MeshDepthMaterial = MeshDepthMaterial; exports.MeshDistanceMaterial = MeshDistanceMaterial; exports.MeshLambertMaterial = MeshLambertMaterial; exports.MeshMatcapMaterial = MeshMatcapMaterial; exports.MeshNormalMaterial = MeshNormalMaterial; exports.MeshPhongMaterial = MeshPhongMaterial; exports.MeshPhysicalMaterial = MeshPhysicalMaterial; exports.MeshStandardMaterial = MeshStandardMaterial; exports.MeshToonMaterial = MeshToonMaterial; exports.MinEquation = MinEquation; exports.MirroredRepeatWrapping = MirroredRepeatWrapping; exports.MixOperation = MixOperation; exports.MultiplyBlending = MultiplyBlending; exports.MultiplyOperation = MultiplyOperation; exports.NearestFilter = NearestFilter; exports.NearestMipMapLinearFilter = NearestMipMapLinearFilter; exports.NearestMipMapNearestFilter = NearestMipMapNearestFilter; exports.NearestMipmapLinearFilter = NearestMipmapLinearFilter; exports.NearestMipmapNearestFilter = NearestMipmapNearestFilter; exports.NeutralToneMapping = NeutralToneMapping; exports.NeverCompare = NeverCompare; exports.NeverDepth = NeverDepth; exports.NeverStencilFunc = NeverStencilFunc; exports.NoBlending = NoBlending; exports.NoColorSpace = NoColorSpace; exports.NoToneMapping = NoToneMapping; exports.NormalAnimationBlendMode = NormalAnimationBlendMode; exports.NormalBlending = NormalBlending; exports.NotEqualCompare = NotEqualCompare; exports.NotEqualDepth = NotEqualDepth; exports.NotEqualStencilFunc = NotEqualStencilFunc; exports.NumberKeyframeTrack = NumberKeyframeTrack; exports.Object3D = Object3D; exports.ObjectLoader = ObjectLoader; exports.ObjectSpaceNormalMap = ObjectSpaceNormalMap; exports.OctahedronGeometry = OctahedronGeometry; exports.OneFactor = OneFactor; exports.OneMinusConstantAlphaFactor = OneMinusConstantAlphaFactor; exports.OneMinusConstantColorFactor = OneMinusConstantColorFactor; exports.OneMinusDstAlphaFactor = OneMinusDstAlphaFactor; exports.OneMinusDstColorFactor = OneMinusDstColorFactor; exports.OneMinusSrcAlphaFactor = OneMinusSrcAlphaFactor; exports.OneMinusSrcColorFactor = OneMinusSrcColorFactor; exports.OrthographicCamera = OrthographicCamera; exports.PCFShadowMap = PCFShadowMap; exports.PCFSoftShadowMap = PCFSoftShadowMap; exports.PMREMGenerator = PMREMGenerator; exports.Path = Path; exports.PerspectiveCamera = PerspectiveCamera; exports.Plane = Plane; exports.PlaneGeometry = PlaneGeometry; exports.PlaneHelper = PlaneHelper; exports.PointLight = PointLight; exports.PointLightHelper = PointLightHelper; exports.Points = Points; exports.PointsMaterial = PointsMaterial; exports.PolarGridHelper = PolarGridHelper; exports.PolyhedronGeometry = PolyhedronGeometry; exports.PositionalAudio = PositionalAudio; exports.PropertyBinding = PropertyBinding; exports.PropertyMixer = PropertyMixer; exports.QuadraticBezierCurve = QuadraticBezierCurve; exports.QuadraticBezierCurve3 = QuadraticBezierCurve3; exports.Quaternion = Quaternion; exports.QuaternionKeyframeTrack = QuaternionKeyframeTrack; exports.QuaternionLinearInterpolant = QuaternionLinearInterpolant; exports.RED_GREEN_RGTC2_Format = RED_GREEN_RGTC2_Format; exports.RED_RGTC1_Format = RED_RGTC1_Format; exports.REVISION = REVISION; exports.RGBADepthPacking = RGBADepthPacking; exports.RGBAFormat = RGBAFormat; exports.RGBAIntegerFormat = RGBAIntegerFormat; exports.RGBA_ASTC_10x10_Format = RGBA_ASTC_10x10_Format; exports.RGBA_ASTC_10x5_Format = RGBA_ASTC_10x5_Format; exports.RGBA_ASTC_10x6_Format = RGBA_ASTC_10x6_Format; exports.RGBA_ASTC_10x8_Format = RGBA_ASTC_10x8_Format; exports.RGBA_ASTC_12x10_Format = RGBA_ASTC_12x10_Format; exports.RGBA_ASTC_12x12_Format = RGBA_ASTC_12x12_Format; exports.RGBA_ASTC_4x4_Format = RGBA_ASTC_4x4_Format; exports.RGBA_ASTC_5x4_Format = RGBA_ASTC_5x4_Format; exports.RGBA_ASTC_5x5_Format = RGBA_ASTC_5x5_Format; exports.RGBA_ASTC_6x5_Format = RGBA_ASTC_6x5_Format; exports.RGBA_ASTC_6x6_Format = RGBA_ASTC_6x6_Format; exports.RGBA_ASTC_8x5_Format = RGBA_ASTC_8x5_Format; exports.RGBA_ASTC_8x6_Format = RGBA_ASTC_8x6_Format; exports.RGBA_ASTC_8x8_Format = RGBA_ASTC_8x8_Format; exports.RGBA_BPTC_Format = RGBA_BPTC_Format; exports.RGBA_ETC2_EAC_Format = RGBA_ETC2_EAC_Format; exports.RGBA_PVRTC_2BPPV1_Format = RGBA_PVRTC_2BPPV1_Format; exports.RGBA_PVRTC_4BPPV1_Format = RGBA_PVRTC_4BPPV1_Format; exports.RGBA_S3TC_DXT1_Format = RGBA_S3TC_DXT1_Format; exports.RGBA_S3TC_DXT3_Format = RGBA_S3TC_DXT3_Format; exports.RGBA_S3TC_DXT5_Format = RGBA_S3TC_DXT5_Format; exports.RGBDepthPacking = RGBDepthPacking; exports.RGBFormat = RGBFormat; exports.RGBIntegerFormat = RGBIntegerFormat; exports.RGB_BPTC_SIGNED_Format = RGB_BPTC_SIGNED_Format; exports.RGB_BPTC_UNSIGNED_Format = RGB_BPTC_UNSIGNED_Format; exports.RGB_ETC1_Format = RGB_ETC1_Format; exports.RGB_ETC2_Format = RGB_ETC2_Format; exports.RGB_PVRTC_2BPPV1_Format = RGB_PVRTC_2BPPV1_Format; exports.RGB_PVRTC_4BPPV1_Format = RGB_PVRTC_4BPPV1_Format; exports.RGB_S3TC_DXT1_Format = RGB_S3TC_DXT1_Format; exports.RGDepthPacking = RGDepthPacking; exports.RGFormat = RGFormat; exports.RGIntegerFormat = RGIntegerFormat; exports.RawShaderMaterial = RawShaderMaterial; exports.Ray = Ray; exports.Raycaster = Raycaster; exports.RectAreaLight = RectAreaLight; exports.RedFormat = RedFormat; exports.RedIntegerFormat = RedIntegerFormat; exports.ReinhardToneMapping = ReinhardToneMapping; exports.RenderTarget = RenderTarget; exports.RenderTarget3D = RenderTarget3D; exports.RenderTargetArray = RenderTargetArray; exports.RepeatWrapping = RepeatWrapping; exports.ReplaceStencilOp = ReplaceStencilOp; exports.ReverseSubtractEquation = ReverseSubtractEquation; exports.RingGeometry = RingGeometry; exports.SIGNED_RED_GREEN_RGTC2_Format = SIGNED_RED_GREEN_RGTC2_Format; exports.SIGNED_RED_RGTC1_Format = SIGNED_RED_RGTC1_Format; exports.SRGBColorSpace = SRGBColorSpace; exports.SRGBTransfer = SRGBTransfer; exports.Scene = Scene; exports.ShaderChunk = ShaderChunk; exports.ShaderLib = ShaderLib; exports.ShaderMaterial = ShaderMaterial; exports.ShadowMaterial = ShadowMaterial; exports.Shape = Shape; exports.ShapeGeometry = ShapeGeometry; exports.ShapePath = ShapePath; exports.ShapeUtils = ShapeUtils; exports.ShortType = ShortType; exports.Skeleton = Skeleton; exports.SkeletonHelper = SkeletonHelper; exports.SkinnedMesh = SkinnedMesh; exports.Source = Source; exports.Sphere = Sphere; exports.SphereGeometry = SphereGeometry; exports.Spherical = Spherical; exports.SphericalHarmonics3 = SphericalHarmonics3; exports.SplineCurve = SplineCurve; exports.SpotLight = SpotLight; exports.SpotLightHelper = SpotLightHelper; exports.Sprite = Sprite; exports.SpriteMaterial = SpriteMaterial; exports.SrcAlphaFactor = SrcAlphaFactor; exports.SrcAlphaSaturateFactor = SrcAlphaSaturateFactor; exports.SrcColorFactor = SrcColorFactor; exports.StaticCopyUsage = StaticCopyUsage; exports.StaticDrawUsage = StaticDrawUsage; exports.StaticReadUsage = StaticReadUsage; exports.StereoCamera = StereoCamera; exports.StreamCopyUsage = StreamCopyUsage; exports.StreamDrawUsage = StreamDrawUsage; exports.StreamReadUsage = StreamReadUsage; exports.StringKeyframeTrack = StringKeyframeTrack; exports.SubtractEquation = SubtractEquation; exports.SubtractiveBlending = SubtractiveBlending; exports.TOUCH = TOUCH; exports.TangentSpaceNormalMap = TangentSpaceNormalMap; exports.TetrahedronGeometry = TetrahedronGeometry; exports.Texture = Texture; exports.TextureLoader = TextureLoader; exports.TextureUtils = TextureUtils; exports.TimestampQuery = TimestampQuery; exports.TorusGeometry = TorusGeometry; exports.TorusKnotGeometry = TorusKnotGeometry; exports.Triangle = Triangle; exports.TriangleFanDrawMode = TriangleFanDrawMode; exports.TriangleStripDrawMode = TriangleStripDrawMode; exports.TrianglesDrawMode = TrianglesDrawMode; exports.TubeGeometry = TubeGeometry; exports.UVMapping = UVMapping; exports.Uint16BufferAttribute = Uint16BufferAttribute; exports.Uint32BufferAttribute = Uint32BufferAttribute; exports.Uint8BufferAttribute = Uint8BufferAttribute; exports.Uint8ClampedBufferAttribute = Uint8ClampedBufferAttribute; exports.Uniform = Uniform; exports.UniformsGroup = UniformsGroup; exports.UniformsLib = UniformsLib; exports.UniformsUtils = UniformsUtils; exports.UnsignedByteType = UnsignedByteType; exports.UnsignedInt248Type = UnsignedInt248Type; exports.UnsignedInt5999Type = UnsignedInt5999Type; exports.UnsignedIntType = UnsignedIntType; exports.UnsignedShort4444Type = UnsignedShort4444Type; exports.UnsignedShort5551Type = UnsignedShort5551Type; exports.UnsignedShortType = UnsignedShortType; exports.VSMShadowMap = VSMShadowMap; exports.Vector2 = Vector2; exports.Vector3 = Vector3; exports.Vector4 = Vector4; exports.VectorKeyframeTrack = VectorKeyframeTrack; exports.VideoFrameTexture = VideoFrameTexture; exports.VideoTexture = VideoTexture; exports.WebGL3DRenderTarget = WebGL3DRenderTarget; exports.WebGLArrayRenderTarget = WebGLArrayRenderTarget; exports.WebGLCoordinateSystem = WebGLCoordinateSystem; exports.WebGLCubeRenderTarget = WebGLCubeRenderTarget; exports.WebGLRenderTarget = WebGLRenderTarget; exports.WebGLRenderer = WebGLRenderer; exports.WebGLUtils = WebGLUtils; exports.WebGPUCoordinateSystem = WebGPUCoordinateSystem; exports.WebXRController = WebXRController; exports.WireframeGeometry = WireframeGeometry; exports.WrapAroundEnding = WrapAroundEnding; exports.ZeroCurvatureEnding = ZeroCurvatureEnding; exports.ZeroFactor = ZeroFactor; exports.ZeroSlopeEnding = ZeroSlopeEnding; exports.ZeroStencilOp = ZeroStencilOp; exports.createCanvasElement = createCanvasElement;